libv8-sgonyea 3.3.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (500) hide show
  1. data/.gitignore +8 -0
  2. data/.gitmodules +3 -0
  3. data/Gemfile +4 -0
  4. data/README.md +76 -0
  5. data/Rakefile +113 -0
  6. data/ext/libv8/extconf.rb +28 -0
  7. data/lib/libv8.rb +15 -0
  8. data/lib/libv8/Makefile +30 -0
  9. data/lib/libv8/detect_cpu.rb +27 -0
  10. data/lib/libv8/fpic-on-linux-amd64.patch +13 -0
  11. data/lib/libv8/v8/.gitignore +35 -0
  12. data/lib/libv8/v8/AUTHORS +44 -0
  13. data/lib/libv8/v8/ChangeLog +2839 -0
  14. data/lib/libv8/v8/LICENSE +52 -0
  15. data/lib/libv8/v8/LICENSE.strongtalk +29 -0
  16. data/lib/libv8/v8/LICENSE.v8 +26 -0
  17. data/lib/libv8/v8/LICENSE.valgrind +45 -0
  18. data/lib/libv8/v8/SConstruct +1478 -0
  19. data/lib/libv8/v8/build/README.txt +49 -0
  20. data/lib/libv8/v8/build/all.gyp +18 -0
  21. data/lib/libv8/v8/build/armu.gypi +32 -0
  22. data/lib/libv8/v8/build/common.gypi +144 -0
  23. data/lib/libv8/v8/build/gyp_v8 +145 -0
  24. data/lib/libv8/v8/include/v8-debug.h +395 -0
  25. data/lib/libv8/v8/include/v8-preparser.h +117 -0
  26. data/lib/libv8/v8/include/v8-profiler.h +505 -0
  27. data/lib/libv8/v8/include/v8-testing.h +104 -0
  28. data/lib/libv8/v8/include/v8.h +4124 -0
  29. data/lib/libv8/v8/include/v8stdint.h +53 -0
  30. data/lib/libv8/v8/preparser/SConscript +38 -0
  31. data/lib/libv8/v8/preparser/preparser-process.cc +379 -0
  32. data/lib/libv8/v8/src/SConscript +368 -0
  33. data/lib/libv8/v8/src/accessors.cc +767 -0
  34. data/lib/libv8/v8/src/accessors.h +123 -0
  35. data/lib/libv8/v8/src/allocation-inl.h +49 -0
  36. data/lib/libv8/v8/src/allocation.cc +122 -0
  37. data/lib/libv8/v8/src/allocation.h +143 -0
  38. data/lib/libv8/v8/src/api.cc +5845 -0
  39. data/lib/libv8/v8/src/api.h +574 -0
  40. data/lib/libv8/v8/src/apinatives.js +110 -0
  41. data/lib/libv8/v8/src/apiutils.h +73 -0
  42. data/lib/libv8/v8/src/arguments.h +118 -0
  43. data/lib/libv8/v8/src/arm/assembler-arm-inl.h +353 -0
  44. data/lib/libv8/v8/src/arm/assembler-arm.cc +2661 -0
  45. data/lib/libv8/v8/src/arm/assembler-arm.h +1375 -0
  46. data/lib/libv8/v8/src/arm/builtins-arm.cc +1658 -0
  47. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +6398 -0
  48. data/lib/libv8/v8/src/arm/code-stubs-arm.h +673 -0
  49. data/lib/libv8/v8/src/arm/codegen-arm.cc +52 -0
  50. data/lib/libv8/v8/src/arm/codegen-arm.h +91 -0
  51. data/lib/libv8/v8/src/arm/constants-arm.cc +152 -0
  52. data/lib/libv8/v8/src/arm/constants-arm.h +775 -0
  53. data/lib/libv8/v8/src/arm/cpu-arm.cc +120 -0
  54. data/lib/libv8/v8/src/arm/debug-arm.cc +317 -0
  55. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +754 -0
  56. data/lib/libv8/v8/src/arm/disasm-arm.cc +1506 -0
  57. data/lib/libv8/v8/src/arm/frames-arm.cc +45 -0
  58. data/lib/libv8/v8/src/arm/frames-arm.h +168 -0
  59. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +4375 -0
  60. data/lib/libv8/v8/src/arm/ic-arm.cc +1562 -0
  61. data/lib/libv8/v8/src/arm/lithium-arm.cc +2206 -0
  62. data/lib/libv8/v8/src/arm/lithium-arm.h +2348 -0
  63. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +4526 -0
  64. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +403 -0
  65. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
  66. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.h +84 -0
  67. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +3163 -0
  68. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +1126 -0
  69. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
  70. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
  71. data/lib/libv8/v8/src/arm/simulator-arm.cc +3424 -0
  72. data/lib/libv8/v8/src/arm/simulator-arm.h +431 -0
  73. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +4243 -0
  74. data/lib/libv8/v8/src/array.js +1366 -0
  75. data/lib/libv8/v8/src/assembler.cc +1207 -0
  76. data/lib/libv8/v8/src/assembler.h +858 -0
  77. data/lib/libv8/v8/src/ast-inl.h +112 -0
  78. data/lib/libv8/v8/src/ast.cc +1146 -0
  79. data/lib/libv8/v8/src/ast.h +2188 -0
  80. data/lib/libv8/v8/src/atomicops.h +167 -0
  81. data/lib/libv8/v8/src/atomicops_internals_arm_gcc.h +145 -0
  82. data/lib/libv8/v8/src/atomicops_internals_mips_gcc.h +169 -0
  83. data/lib/libv8/v8/src/atomicops_internals_x86_gcc.cc +133 -0
  84. data/lib/libv8/v8/src/atomicops_internals_x86_gcc.h +287 -0
  85. data/lib/libv8/v8/src/atomicops_internals_x86_macosx.h +301 -0
  86. data/lib/libv8/v8/src/atomicops_internals_x86_msvc.h +203 -0
  87. data/lib/libv8/v8/src/bignum-dtoa.cc +655 -0
  88. data/lib/libv8/v8/src/bignum-dtoa.h +81 -0
  89. data/lib/libv8/v8/src/bignum.cc +768 -0
  90. data/lib/libv8/v8/src/bignum.h +140 -0
  91. data/lib/libv8/v8/src/bootstrapper.cc +2184 -0
  92. data/lib/libv8/v8/src/bootstrapper.h +188 -0
  93. data/lib/libv8/v8/src/builtins.cc +1707 -0
  94. data/lib/libv8/v8/src/builtins.h +371 -0
  95. data/lib/libv8/v8/src/bytecodes-irregexp.h +105 -0
  96. data/lib/libv8/v8/src/cached-powers.cc +177 -0
  97. data/lib/libv8/v8/src/cached-powers.h +65 -0
  98. data/lib/libv8/v8/src/char-predicates-inl.h +94 -0
  99. data/lib/libv8/v8/src/char-predicates.h +67 -0
  100. data/lib/libv8/v8/src/checks.cc +110 -0
  101. data/lib/libv8/v8/src/checks.h +296 -0
  102. data/lib/libv8/v8/src/circular-queue-inl.h +53 -0
  103. data/lib/libv8/v8/src/circular-queue.cc +122 -0
  104. data/lib/libv8/v8/src/circular-queue.h +103 -0
  105. data/lib/libv8/v8/src/code-stubs.cc +267 -0
  106. data/lib/libv8/v8/src/code-stubs.h +1011 -0
  107. data/lib/libv8/v8/src/code.h +70 -0
  108. data/lib/libv8/v8/src/codegen.cc +231 -0
  109. data/lib/libv8/v8/src/codegen.h +84 -0
  110. data/lib/libv8/v8/src/compilation-cache.cc +540 -0
  111. data/lib/libv8/v8/src/compilation-cache.h +287 -0
  112. data/lib/libv8/v8/src/compiler.cc +786 -0
  113. data/lib/libv8/v8/src/compiler.h +312 -0
  114. data/lib/libv8/v8/src/contexts.cc +347 -0
  115. data/lib/libv8/v8/src/contexts.h +391 -0
  116. data/lib/libv8/v8/src/conversions-inl.h +106 -0
  117. data/lib/libv8/v8/src/conversions.cc +1131 -0
  118. data/lib/libv8/v8/src/conversions.h +135 -0
  119. data/lib/libv8/v8/src/counters.cc +93 -0
  120. data/lib/libv8/v8/src/counters.h +254 -0
  121. data/lib/libv8/v8/src/cpu-profiler-inl.h +101 -0
  122. data/lib/libv8/v8/src/cpu-profiler.cc +609 -0
  123. data/lib/libv8/v8/src/cpu-profiler.h +302 -0
  124. data/lib/libv8/v8/src/cpu.h +69 -0
  125. data/lib/libv8/v8/src/d8-debug.cc +367 -0
  126. data/lib/libv8/v8/src/d8-debug.h +158 -0
  127. data/lib/libv8/v8/src/d8-posix.cc +695 -0
  128. data/lib/libv8/v8/src/d8-readline.cc +130 -0
  129. data/lib/libv8/v8/src/d8-windows.cc +42 -0
  130. data/lib/libv8/v8/src/d8.cc +803 -0
  131. data/lib/libv8/v8/src/d8.gyp +91 -0
  132. data/lib/libv8/v8/src/d8.h +235 -0
  133. data/lib/libv8/v8/src/d8.js +2798 -0
  134. data/lib/libv8/v8/src/data-flow.cc +66 -0
  135. data/lib/libv8/v8/src/data-flow.h +205 -0
  136. data/lib/libv8/v8/src/date.js +1103 -0
  137. data/lib/libv8/v8/src/dateparser-inl.h +127 -0
  138. data/lib/libv8/v8/src/dateparser.cc +178 -0
  139. data/lib/libv8/v8/src/dateparser.h +266 -0
  140. data/lib/libv8/v8/src/debug-agent.cc +447 -0
  141. data/lib/libv8/v8/src/debug-agent.h +129 -0
  142. data/lib/libv8/v8/src/debug-debugger.js +2569 -0
  143. data/lib/libv8/v8/src/debug.cc +3165 -0
  144. data/lib/libv8/v8/src/debug.h +1057 -0
  145. data/lib/libv8/v8/src/deoptimizer.cc +1256 -0
  146. data/lib/libv8/v8/src/deoptimizer.h +602 -0
  147. data/lib/libv8/v8/src/disasm.h +80 -0
  148. data/lib/libv8/v8/src/disassembler.cc +343 -0
  149. data/lib/libv8/v8/src/disassembler.h +58 -0
  150. data/lib/libv8/v8/src/diy-fp.cc +58 -0
  151. data/lib/libv8/v8/src/diy-fp.h +117 -0
  152. data/lib/libv8/v8/src/double.h +238 -0
  153. data/lib/libv8/v8/src/dtoa.cc +103 -0
  154. data/lib/libv8/v8/src/dtoa.h +85 -0
  155. data/lib/libv8/v8/src/execution.cc +849 -0
  156. data/lib/libv8/v8/src/execution.h +297 -0
  157. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +250 -0
  158. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +89 -0
  159. data/lib/libv8/v8/src/extensions/experimental/collator.cc +218 -0
  160. data/lib/libv8/v8/src/extensions/experimental/collator.h +69 -0
  161. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +94 -0
  162. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +78 -0
  163. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +54 -0
  164. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +112 -0
  165. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +60 -0
  166. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +43 -0
  167. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +49 -0
  168. data/lib/libv8/v8/src/extensions/experimental/i18n.js +180 -0
  169. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +251 -0
  170. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +95 -0
  171. data/lib/libv8/v8/src/extensions/externalize-string-extension.cc +141 -0
  172. data/lib/libv8/v8/src/extensions/externalize-string-extension.h +50 -0
  173. data/lib/libv8/v8/src/extensions/gc-extension.cc +58 -0
  174. data/lib/libv8/v8/src/extensions/gc-extension.h +49 -0
  175. data/lib/libv8/v8/src/factory.cc +1222 -0
  176. data/lib/libv8/v8/src/factory.h +442 -0
  177. data/lib/libv8/v8/src/fast-dtoa.cc +736 -0
  178. data/lib/libv8/v8/src/fast-dtoa.h +83 -0
  179. data/lib/libv8/v8/src/fixed-dtoa.cc +405 -0
  180. data/lib/libv8/v8/src/fixed-dtoa.h +55 -0
  181. data/lib/libv8/v8/src/flag-definitions.h +560 -0
  182. data/lib/libv8/v8/src/flags.cc +551 -0
  183. data/lib/libv8/v8/src/flags.h +79 -0
  184. data/lib/libv8/v8/src/frames-inl.h +247 -0
  185. data/lib/libv8/v8/src/frames.cc +1243 -0
  186. data/lib/libv8/v8/src/frames.h +870 -0
  187. data/lib/libv8/v8/src/full-codegen.cc +1374 -0
  188. data/lib/libv8/v8/src/full-codegen.h +771 -0
  189. data/lib/libv8/v8/src/func-name-inferrer.cc +92 -0
  190. data/lib/libv8/v8/src/func-name-inferrer.h +111 -0
  191. data/lib/libv8/v8/src/gdb-jit.cc +1555 -0
  192. data/lib/libv8/v8/src/gdb-jit.h +143 -0
  193. data/lib/libv8/v8/src/global-handles.cc +665 -0
  194. data/lib/libv8/v8/src/global-handles.h +284 -0
  195. data/lib/libv8/v8/src/globals.h +325 -0
  196. data/lib/libv8/v8/src/handles-inl.h +177 -0
  197. data/lib/libv8/v8/src/handles.cc +987 -0
  198. data/lib/libv8/v8/src/handles.h +382 -0
  199. data/lib/libv8/v8/src/hashmap.cc +230 -0
  200. data/lib/libv8/v8/src/hashmap.h +123 -0
  201. data/lib/libv8/v8/src/heap-inl.h +704 -0
  202. data/lib/libv8/v8/src/heap-profiler.cc +1173 -0
  203. data/lib/libv8/v8/src/heap-profiler.h +397 -0
  204. data/lib/libv8/v8/src/heap.cc +5930 -0
  205. data/lib/libv8/v8/src/heap.h +2268 -0
  206. data/lib/libv8/v8/src/hydrogen-instructions.cc +1769 -0
  207. data/lib/libv8/v8/src/hydrogen-instructions.h +3971 -0
  208. data/lib/libv8/v8/src/hydrogen.cc +6239 -0
  209. data/lib/libv8/v8/src/hydrogen.h +1202 -0
  210. data/lib/libv8/v8/src/ia32/assembler-ia32-inl.h +446 -0
  211. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +2487 -0
  212. data/lib/libv8/v8/src/ia32/assembler-ia32.h +1144 -0
  213. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +1621 -0
  214. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +6198 -0
  215. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +517 -0
  216. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +265 -0
  217. data/lib/libv8/v8/src/ia32/codegen-ia32.h +79 -0
  218. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +88 -0
  219. data/lib/libv8/v8/src/ia32/debug-ia32.cc +312 -0
  220. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +774 -0
  221. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +1628 -0
  222. data/lib/libv8/v8/src/ia32/frames-ia32.cc +45 -0
  223. data/lib/libv8/v8/src/ia32/frames-ia32.h +142 -0
  224. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +4338 -0
  225. data/lib/libv8/v8/src/ia32/ic-ia32.cc +1597 -0
  226. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +4461 -0
  227. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +375 -0
  228. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +475 -0
  229. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  230. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +2261 -0
  231. data/lib/libv8/v8/src/ia32/lithium-ia32.h +2396 -0
  232. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +2136 -0
  233. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +775 -0
  234. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +1263 -0
  235. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
  236. data/lib/libv8/v8/src/ia32/simulator-ia32.cc +30 -0
  237. data/lib/libv8/v8/src/ia32/simulator-ia32.h +74 -0
  238. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +3847 -0
  239. data/lib/libv8/v8/src/ic-inl.h +130 -0
  240. data/lib/libv8/v8/src/ic.cc +2577 -0
  241. data/lib/libv8/v8/src/ic.h +736 -0
  242. data/lib/libv8/v8/src/inspector.cc +63 -0
  243. data/lib/libv8/v8/src/inspector.h +62 -0
  244. data/lib/libv8/v8/src/interpreter-irregexp.cc +659 -0
  245. data/lib/libv8/v8/src/interpreter-irregexp.h +49 -0
  246. data/lib/libv8/v8/src/isolate-inl.h +50 -0
  247. data/lib/libv8/v8/src/isolate.cc +1869 -0
  248. data/lib/libv8/v8/src/isolate.h +1382 -0
  249. data/lib/libv8/v8/src/json-parser.cc +504 -0
  250. data/lib/libv8/v8/src/json-parser.h +161 -0
  251. data/lib/libv8/v8/src/json.js +342 -0
  252. data/lib/libv8/v8/src/jsregexp.cc +5385 -0
  253. data/lib/libv8/v8/src/jsregexp.h +1492 -0
  254. data/lib/libv8/v8/src/list-inl.h +212 -0
  255. data/lib/libv8/v8/src/list.h +174 -0
  256. data/lib/libv8/v8/src/lithium-allocator-inl.h +142 -0
  257. data/lib/libv8/v8/src/lithium-allocator.cc +2123 -0
  258. data/lib/libv8/v8/src/lithium-allocator.h +630 -0
  259. data/lib/libv8/v8/src/lithium.cc +190 -0
  260. data/lib/libv8/v8/src/lithium.h +597 -0
  261. data/lib/libv8/v8/src/liveedit-debugger.js +1082 -0
  262. data/lib/libv8/v8/src/liveedit.cc +1691 -0
  263. data/lib/libv8/v8/src/liveedit.h +180 -0
  264. data/lib/libv8/v8/src/liveobjectlist-inl.h +126 -0
  265. data/lib/libv8/v8/src/liveobjectlist.cc +2589 -0
  266. data/lib/libv8/v8/src/liveobjectlist.h +322 -0
  267. data/lib/libv8/v8/src/log-inl.h +59 -0
  268. data/lib/libv8/v8/src/log-utils.cc +428 -0
  269. data/lib/libv8/v8/src/log-utils.h +231 -0
  270. data/lib/libv8/v8/src/log.cc +1993 -0
  271. data/lib/libv8/v8/src/log.h +476 -0
  272. data/lib/libv8/v8/src/macro-assembler.h +120 -0
  273. data/lib/libv8/v8/src/macros.py +178 -0
  274. data/lib/libv8/v8/src/mark-compact.cc +3143 -0
  275. data/lib/libv8/v8/src/mark-compact.h +506 -0
  276. data/lib/libv8/v8/src/math.js +264 -0
  277. data/lib/libv8/v8/src/messages.cc +179 -0
  278. data/lib/libv8/v8/src/messages.h +113 -0
  279. data/lib/libv8/v8/src/messages.js +1096 -0
  280. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +312 -0
  281. data/lib/libv8/v8/src/mips/assembler-mips.cc +1960 -0
  282. data/lib/libv8/v8/src/mips/assembler-mips.h +1138 -0
  283. data/lib/libv8/v8/src/mips/builtins-mips.cc +1628 -0
  284. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +6656 -0
  285. data/lib/libv8/v8/src/mips/code-stubs-mips.h +682 -0
  286. data/lib/libv8/v8/src/mips/codegen-mips.cc +52 -0
  287. data/lib/libv8/v8/src/mips/codegen-mips.h +98 -0
  288. data/lib/libv8/v8/src/mips/constants-mips.cc +352 -0
  289. data/lib/libv8/v8/src/mips/constants-mips.h +739 -0
  290. data/lib/libv8/v8/src/mips/cpu-mips.cc +96 -0
  291. data/lib/libv8/v8/src/mips/debug-mips.cc +308 -0
  292. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +91 -0
  293. data/lib/libv8/v8/src/mips/disasm-mips.cc +1050 -0
  294. data/lib/libv8/v8/src/mips/frames-mips.cc +47 -0
  295. data/lib/libv8/v8/src/mips/frames-mips.h +219 -0
  296. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +4388 -0
  297. data/lib/libv8/v8/src/mips/ic-mips.cc +1580 -0
  298. data/lib/libv8/v8/src/mips/lithium-codegen-mips.h +65 -0
  299. data/lib/libv8/v8/src/mips/lithium-mips.h +307 -0
  300. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +4056 -0
  301. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +1214 -0
  302. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +1251 -0
  303. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +252 -0
  304. data/lib/libv8/v8/src/mips/simulator-mips.cc +2621 -0
  305. data/lib/libv8/v8/src/mips/simulator-mips.h +401 -0
  306. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +4285 -0
  307. data/lib/libv8/v8/src/mirror-debugger.js +2382 -0
  308. data/lib/libv8/v8/src/mksnapshot.cc +328 -0
  309. data/lib/libv8/v8/src/natives.h +64 -0
  310. data/lib/libv8/v8/src/objects-debug.cc +738 -0
  311. data/lib/libv8/v8/src/objects-inl.h +4323 -0
  312. data/lib/libv8/v8/src/objects-printer.cc +829 -0
  313. data/lib/libv8/v8/src/objects-visiting.cc +148 -0
  314. data/lib/libv8/v8/src/objects-visiting.h +424 -0
  315. data/lib/libv8/v8/src/objects.cc +10585 -0
  316. data/lib/libv8/v8/src/objects.h +6838 -0
  317. data/lib/libv8/v8/src/parser.cc +4997 -0
  318. data/lib/libv8/v8/src/parser.h +765 -0
  319. data/lib/libv8/v8/src/platform-cygwin.cc +779 -0
  320. data/lib/libv8/v8/src/platform-freebsd.cc +826 -0
  321. data/lib/libv8/v8/src/platform-linux.cc +1149 -0
  322. data/lib/libv8/v8/src/platform-macos.cc +830 -0
  323. data/lib/libv8/v8/src/platform-nullos.cc +479 -0
  324. data/lib/libv8/v8/src/platform-openbsd.cc +640 -0
  325. data/lib/libv8/v8/src/platform-posix.cc +424 -0
  326. data/lib/libv8/v8/src/platform-solaris.cc +762 -0
  327. data/lib/libv8/v8/src/platform-tls-mac.h +62 -0
  328. data/lib/libv8/v8/src/platform-tls-win32.h +62 -0
  329. data/lib/libv8/v8/src/platform-tls.h +50 -0
  330. data/lib/libv8/v8/src/platform-win32.cc +2021 -0
  331. data/lib/libv8/v8/src/platform.h +667 -0
  332. data/lib/libv8/v8/src/preparse-data-format.h +62 -0
  333. data/lib/libv8/v8/src/preparse-data.cc +183 -0
  334. data/lib/libv8/v8/src/preparse-data.h +225 -0
  335. data/lib/libv8/v8/src/preparser-api.cc +220 -0
  336. data/lib/libv8/v8/src/preparser.cc +1450 -0
  337. data/lib/libv8/v8/src/preparser.h +493 -0
  338. data/lib/libv8/v8/src/prettyprinter.cc +1493 -0
  339. data/lib/libv8/v8/src/prettyprinter.h +223 -0
  340. data/lib/libv8/v8/src/profile-generator-inl.h +128 -0
  341. data/lib/libv8/v8/src/profile-generator.cc +3098 -0
  342. data/lib/libv8/v8/src/profile-generator.h +1126 -0
  343. data/lib/libv8/v8/src/property.cc +105 -0
  344. data/lib/libv8/v8/src/property.h +365 -0
  345. data/lib/libv8/v8/src/proxy.js +83 -0
  346. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp-inl.h +78 -0
  347. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.cc +471 -0
  348. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.h +142 -0
  349. data/lib/libv8/v8/src/regexp-macro-assembler-tracer.cc +373 -0
  350. data/lib/libv8/v8/src/regexp-macro-assembler-tracer.h +104 -0
  351. data/lib/libv8/v8/src/regexp-macro-assembler.cc +267 -0
  352. data/lib/libv8/v8/src/regexp-macro-assembler.h +243 -0
  353. data/lib/libv8/v8/src/regexp-stack.cc +111 -0
  354. data/lib/libv8/v8/src/regexp-stack.h +147 -0
  355. data/lib/libv8/v8/src/regexp.js +483 -0
  356. data/lib/libv8/v8/src/rewriter.cc +360 -0
  357. data/lib/libv8/v8/src/rewriter.h +50 -0
  358. data/lib/libv8/v8/src/runtime-profiler.cc +489 -0
  359. data/lib/libv8/v8/src/runtime-profiler.h +201 -0
  360. data/lib/libv8/v8/src/runtime.cc +12227 -0
  361. data/lib/libv8/v8/src/runtime.h +652 -0
  362. data/lib/libv8/v8/src/runtime.js +649 -0
  363. data/lib/libv8/v8/src/safepoint-table.cc +256 -0
  364. data/lib/libv8/v8/src/safepoint-table.h +270 -0
  365. data/lib/libv8/v8/src/scanner-base.cc +952 -0
  366. data/lib/libv8/v8/src/scanner-base.h +670 -0
  367. data/lib/libv8/v8/src/scanner.cc +345 -0
  368. data/lib/libv8/v8/src/scanner.h +146 -0
  369. data/lib/libv8/v8/src/scopeinfo.cc +646 -0
  370. data/lib/libv8/v8/src/scopeinfo.h +254 -0
  371. data/lib/libv8/v8/src/scopes.cc +1150 -0
  372. data/lib/libv8/v8/src/scopes.h +507 -0
  373. data/lib/libv8/v8/src/serialize.cc +1574 -0
  374. data/lib/libv8/v8/src/serialize.h +589 -0
  375. data/lib/libv8/v8/src/shell.h +55 -0
  376. data/lib/libv8/v8/src/simulator.h +43 -0
  377. data/lib/libv8/v8/src/small-pointer-list.h +163 -0
  378. data/lib/libv8/v8/src/smart-pointer.h +109 -0
  379. data/lib/libv8/v8/src/snapshot-common.cc +83 -0
  380. data/lib/libv8/v8/src/snapshot-empty.cc +54 -0
  381. data/lib/libv8/v8/src/snapshot.h +91 -0
  382. data/lib/libv8/v8/src/spaces-inl.h +529 -0
  383. data/lib/libv8/v8/src/spaces.cc +3145 -0
  384. data/lib/libv8/v8/src/spaces.h +2369 -0
  385. data/lib/libv8/v8/src/splay-tree-inl.h +310 -0
  386. data/lib/libv8/v8/src/splay-tree.h +205 -0
  387. data/lib/libv8/v8/src/string-search.cc +41 -0
  388. data/lib/libv8/v8/src/string-search.h +568 -0
  389. data/lib/libv8/v8/src/string-stream.cc +592 -0
  390. data/lib/libv8/v8/src/string-stream.h +191 -0
  391. data/lib/libv8/v8/src/string.js +994 -0
  392. data/lib/libv8/v8/src/strtod.cc +440 -0
  393. data/lib/libv8/v8/src/strtod.h +40 -0
  394. data/lib/libv8/v8/src/stub-cache.cc +1965 -0
  395. data/lib/libv8/v8/src/stub-cache.h +924 -0
  396. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +3925 -0
  397. data/lib/libv8/v8/src/token.cc +63 -0
  398. data/lib/libv8/v8/src/token.h +288 -0
  399. data/lib/libv8/v8/src/type-info.cc +507 -0
  400. data/lib/libv8/v8/src/type-info.h +272 -0
  401. data/lib/libv8/v8/src/unbound-queue-inl.h +95 -0
  402. data/lib/libv8/v8/src/unbound-queue.h +69 -0
  403. data/lib/libv8/v8/src/unicode-inl.h +238 -0
  404. data/lib/libv8/v8/src/unicode.cc +1624 -0
  405. data/lib/libv8/v8/src/unicode.h +280 -0
  406. data/lib/libv8/v8/src/uri.js +408 -0
  407. data/lib/libv8/v8/src/utils-inl.h +48 -0
  408. data/lib/libv8/v8/src/utils.cc +371 -0
  409. data/lib/libv8/v8/src/utils.h +800 -0
  410. data/lib/libv8/v8/src/v8-counters.cc +62 -0
  411. data/lib/libv8/v8/src/v8-counters.h +314 -0
  412. data/lib/libv8/v8/src/v8.cc +213 -0
  413. data/lib/libv8/v8/src/v8.h +131 -0
  414. data/lib/libv8/v8/src/v8checks.h +64 -0
  415. data/lib/libv8/v8/src/v8dll-main.cc +44 -0
  416. data/lib/libv8/v8/src/v8globals.h +512 -0
  417. data/lib/libv8/v8/src/v8memory.h +82 -0
  418. data/lib/libv8/v8/src/v8natives.js +1310 -0
  419. data/lib/libv8/v8/src/v8preparserdll-main.cc +39 -0
  420. data/lib/libv8/v8/src/v8threads.cc +464 -0
  421. data/lib/libv8/v8/src/v8threads.h +165 -0
  422. data/lib/libv8/v8/src/v8utils.h +319 -0
  423. data/lib/libv8/v8/src/variables.cc +114 -0
  424. data/lib/libv8/v8/src/variables.h +167 -0
  425. data/lib/libv8/v8/src/version.cc +116 -0
  426. data/lib/libv8/v8/src/version.h +68 -0
  427. data/lib/libv8/v8/src/vm-state-inl.h +138 -0
  428. data/lib/libv8/v8/src/vm-state.h +71 -0
  429. data/lib/libv8/v8/src/win32-headers.h +96 -0
  430. data/lib/libv8/v8/src/x64/assembler-x64-inl.h +462 -0
  431. data/lib/libv8/v8/src/x64/assembler-x64.cc +3027 -0
  432. data/lib/libv8/v8/src/x64/assembler-x64.h +1633 -0
  433. data/lib/libv8/v8/src/x64/builtins-x64.cc +1520 -0
  434. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +5132 -0
  435. data/lib/libv8/v8/src/x64/code-stubs-x64.h +514 -0
  436. data/lib/libv8/v8/src/x64/codegen-x64.cc +146 -0
  437. data/lib/libv8/v8/src/x64/codegen-x64.h +76 -0
  438. data/lib/libv8/v8/src/x64/cpu-x64.cc +88 -0
  439. data/lib/libv8/v8/src/x64/debug-x64.cc +319 -0
  440. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +815 -0
  441. data/lib/libv8/v8/src/x64/disasm-x64.cc +1832 -0
  442. data/lib/libv8/v8/src/x64/frames-x64.cc +45 -0
  443. data/lib/libv8/v8/src/x64/frames-x64.h +130 -0
  444. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +4318 -0
  445. data/lib/libv8/v8/src/x64/ic-x64.cc +1608 -0
  446. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +4267 -0
  447. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +367 -0
  448. data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  449. data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.h +74 -0
  450. data/lib/libv8/v8/src/x64/lithium-x64.cc +2202 -0
  451. data/lib/libv8/v8/src/x64/lithium-x64.h +2333 -0
  452. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +3745 -0
  453. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +1290 -0
  454. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
  455. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
  456. data/lib/libv8/v8/src/x64/simulator-x64.cc +27 -0
  457. data/lib/libv8/v8/src/x64/simulator-x64.h +72 -0
  458. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +3610 -0
  459. data/lib/libv8/v8/src/zone-inl.h +140 -0
  460. data/lib/libv8/v8/src/zone.cc +196 -0
  461. data/lib/libv8/v8/src/zone.h +240 -0
  462. data/lib/libv8/v8/tools/codemap.js +265 -0
  463. data/lib/libv8/v8/tools/consarray.js +93 -0
  464. data/lib/libv8/v8/tools/csvparser.js +78 -0
  465. data/lib/libv8/v8/tools/disasm.py +92 -0
  466. data/lib/libv8/v8/tools/freebsd-tick-processor +10 -0
  467. data/lib/libv8/v8/tools/gc-nvp-trace-processor.py +342 -0
  468. data/lib/libv8/v8/tools/gcmole/README +62 -0
  469. data/lib/libv8/v8/tools/gcmole/gccause.lua +60 -0
  470. data/lib/libv8/v8/tools/gcmole/gcmole.cc +1261 -0
  471. data/lib/libv8/v8/tools/gcmole/gcmole.lua +378 -0
  472. data/lib/libv8/v8/tools/generate-ten-powers.scm +286 -0
  473. data/lib/libv8/v8/tools/grokdump.py +841 -0
  474. data/lib/libv8/v8/tools/gyp/v8.gyp +995 -0
  475. data/lib/libv8/v8/tools/js2c.py +364 -0
  476. data/lib/libv8/v8/tools/jsmin.py +280 -0
  477. data/lib/libv8/v8/tools/linux-tick-processor +35 -0
  478. data/lib/libv8/v8/tools/ll_prof.py +942 -0
  479. data/lib/libv8/v8/tools/logreader.js +185 -0
  480. data/lib/libv8/v8/tools/mac-nm +18 -0
  481. data/lib/libv8/v8/tools/mac-tick-processor +6 -0
  482. data/lib/libv8/v8/tools/oom_dump/README +31 -0
  483. data/lib/libv8/v8/tools/oom_dump/SConstruct +42 -0
  484. data/lib/libv8/v8/tools/oom_dump/oom_dump.cc +288 -0
  485. data/lib/libv8/v8/tools/presubmit.py +305 -0
  486. data/lib/libv8/v8/tools/process-heap-prof.py +120 -0
  487. data/lib/libv8/v8/tools/profile.js +751 -0
  488. data/lib/libv8/v8/tools/profile_view.js +219 -0
  489. data/lib/libv8/v8/tools/run-valgrind.py +77 -0
  490. data/lib/libv8/v8/tools/splaytree.js +316 -0
  491. data/lib/libv8/v8/tools/stats-viewer.py +468 -0
  492. data/lib/libv8/v8/tools/test.py +1510 -0
  493. data/lib/libv8/v8/tools/tickprocessor-driver.js +59 -0
  494. data/lib/libv8/v8/tools/tickprocessor.js +877 -0
  495. data/lib/libv8/v8/tools/utils.py +96 -0
  496. data/lib/libv8/v8/tools/visual_studio/README.txt +12 -0
  497. data/lib/libv8/v8/tools/windows-tick-processor.bat +30 -0
  498. data/lib/libv8/version.rb +5 -0
  499. data/libv8.gemspec +36 -0
  500. metadata +578 -0
@@ -0,0 +1,216 @@
1
+ // Copyright 2008-2009 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #ifndef V8_IA32_REGEXP_MACRO_ASSEMBLER_IA32_H_
29
+ #define V8_IA32_REGEXP_MACRO_ASSEMBLER_IA32_H_
30
+
31
+ namespace v8 {
32
+ namespace internal {
33
+
34
+ #ifdef V8_INTERPRETED_REGEXP
35
+ class RegExpMacroAssemblerIA32: public RegExpMacroAssembler {
36
+ public:
37
+ RegExpMacroAssemblerIA32() { }
38
+ virtual ~RegExpMacroAssemblerIA32() { }
39
+ };
40
+
41
+ #else // V8_INTERPRETED_REGEXP
42
+ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
43
+ public:
44
+ RegExpMacroAssemblerIA32(Mode mode, int registers_to_save);
45
+ virtual ~RegExpMacroAssemblerIA32();
46
+ virtual int stack_limit_slack();
47
+ virtual void AdvanceCurrentPosition(int by);
48
+ virtual void AdvanceRegister(int reg, int by);
49
+ virtual void Backtrack();
50
+ virtual void Bind(Label* label);
51
+ virtual void CheckAtStart(Label* on_at_start);
52
+ virtual void CheckCharacter(uint32_t c, Label* on_equal);
53
+ virtual void CheckCharacterAfterAnd(uint32_t c,
54
+ uint32_t mask,
55
+ Label* on_equal);
56
+ virtual void CheckCharacterGT(uc16 limit, Label* on_greater);
57
+ virtual void CheckCharacterLT(uc16 limit, Label* on_less);
58
+ virtual void CheckCharacters(Vector<const uc16> str,
59
+ int cp_offset,
60
+ Label* on_failure,
61
+ bool check_end_of_string);
62
+ // A "greedy loop" is a loop that is both greedy and with a simple
63
+ // body. It has a particularly simple implementation.
64
+ virtual void CheckGreedyLoop(Label* on_tos_equals_current_position);
65
+ virtual void CheckNotAtStart(Label* on_not_at_start);
66
+ virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
67
+ virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
68
+ Label* on_no_match);
69
+ virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
70
+ virtual void CheckNotCharacter(uint32_t c, Label* on_not_equal);
71
+ virtual void CheckNotCharacterAfterAnd(uint32_t c,
72
+ uint32_t mask,
73
+ Label* on_not_equal);
74
+ virtual void CheckNotCharacterAfterMinusAnd(uc16 c,
75
+ uc16 minus,
76
+ uc16 mask,
77
+ Label* on_not_equal);
78
+ // Checks whether the given offset from the current position is before
79
+ // the end of the string.
80
+ virtual void CheckPosition(int cp_offset, Label* on_outside_input);
81
+ virtual bool CheckSpecialCharacterClass(uc16 type, Label* on_no_match);
82
+ virtual void Fail();
83
+ virtual Handle<HeapObject> GetCode(Handle<String> source);
84
+ virtual void GoTo(Label* label);
85
+ virtual void IfRegisterGE(int reg, int comparand, Label* if_ge);
86
+ virtual void IfRegisterLT(int reg, int comparand, Label* if_lt);
87
+ virtual void IfRegisterEqPos(int reg, Label* if_eq);
88
+ virtual IrregexpImplementation Implementation();
89
+ virtual void LoadCurrentCharacter(int cp_offset,
90
+ Label* on_end_of_input,
91
+ bool check_bounds = true,
92
+ int characters = 1);
93
+ virtual void PopCurrentPosition();
94
+ virtual void PopRegister(int register_index);
95
+ virtual void PushBacktrack(Label* label);
96
+ virtual void PushCurrentPosition();
97
+ virtual void PushRegister(int register_index,
98
+ StackCheckFlag check_stack_limit);
99
+ virtual void ReadCurrentPositionFromRegister(int reg);
100
+ virtual void ReadStackPointerFromRegister(int reg);
101
+ virtual void SetCurrentPositionFromEnd(int by);
102
+ virtual void SetRegister(int register_index, int to);
103
+ virtual void Succeed();
104
+ virtual void WriteCurrentPositionToRegister(int reg, int cp_offset);
105
+ virtual void ClearRegisters(int reg_from, int reg_to);
106
+ virtual void WriteStackPointerToRegister(int reg);
107
+
108
+ // Called from RegExp if the stack-guard is triggered.
109
+ // If the code object is relocated, the return address is fixed before
110
+ // returning.
111
+ static int CheckStackGuardState(Address* return_address,
112
+ Code* re_code,
113
+ Address re_frame);
114
+
115
+ private:
116
+ // Offsets from ebp of function parameters and stored registers.
117
+ static const int kFramePointer = 0;
118
+ // Above the frame pointer - function parameters and return address.
119
+ static const int kReturn_eip = kFramePointer + kPointerSize;
120
+ static const int kFrameAlign = kReturn_eip + kPointerSize;
121
+ // Parameters.
122
+ static const int kInputString = kFrameAlign;
123
+ static const int kStartIndex = kInputString + kPointerSize;
124
+ static const int kInputStart = kStartIndex + kPointerSize;
125
+ static const int kInputEnd = kInputStart + kPointerSize;
126
+ static const int kRegisterOutput = kInputEnd + kPointerSize;
127
+ static const int kStackHighEnd = kRegisterOutput + kPointerSize;
128
+ static const int kDirectCall = kStackHighEnd + kPointerSize;
129
+ static const int kIsolate = kDirectCall + kPointerSize;
130
+ // Below the frame pointer - local stack variables.
131
+ // When adding local variables remember to push space for them in
132
+ // the frame in GetCode.
133
+ static const int kBackup_esi = kFramePointer - kPointerSize;
134
+ static const int kBackup_edi = kBackup_esi - kPointerSize;
135
+ static const int kBackup_ebx = kBackup_edi - kPointerSize;
136
+ static const int kInputStartMinusOne = kBackup_ebx - kPointerSize;
137
+ // First register address. Following registers are below it on the stack.
138
+ static const int kRegisterZero = kInputStartMinusOne - kPointerSize;
139
+
140
+ // Initial size of code buffer.
141
+ static const size_t kRegExpCodeSize = 1024;
142
+
143
+ // Load a number of characters at the given offset from the
144
+ // current position, into the current-character register.
145
+ void LoadCurrentCharacterUnchecked(int cp_offset, int character_count);
146
+
147
+ // Check whether preemption has been requested.
148
+ void CheckPreemption();
149
+
150
+ // Check whether we are exceeding the stack limit on the backtrack stack.
151
+ void CheckStackLimit();
152
+
153
+ // Generate a call to CheckStackGuardState.
154
+ void CallCheckStackGuardState(Register scratch);
155
+
156
+ // The ebp-relative location of a regexp register.
157
+ Operand register_location(int register_index);
158
+
159
+ // The register containing the current character after LoadCurrentCharacter.
160
+ inline Register current_character() { return edx; }
161
+
162
+ // The register containing the backtrack stack top. Provides a meaningful
163
+ // name to the register.
164
+ inline Register backtrack_stackpointer() { return ecx; }
165
+
166
+ // Byte size of chars in the string to match (decided by the Mode argument)
167
+ inline int char_size() { return static_cast<int>(mode_); }
168
+
169
+ // Equivalent to a conditional branch to the label, unless the label
170
+ // is NULL, in which case it is a conditional Backtrack.
171
+ void BranchOrBacktrack(Condition condition, Label* to);
172
+
173
+ // Call and return internally in the generated code in a way that
174
+ // is GC-safe (i.e., doesn't leave absolute code addresses on the stack)
175
+ inline void SafeCall(Label* to);
176
+ inline void SafeReturn();
177
+ inline void SafeCallTarget(Label* name);
178
+
179
+ // Pushes the value of a register on the backtrack stack. Decrements the
180
+ // stack pointer (ecx) by a word size and stores the register's value there.
181
+ inline void Push(Register source);
182
+
183
+ // Pushes a value on the backtrack stack. Decrements the stack pointer (ecx)
184
+ // by a word size and stores the value there.
185
+ inline void Push(Immediate value);
186
+
187
+ // Pops a value from the backtrack stack. Reads the word at the stack pointer
188
+ // (ecx) and increments it by a word size.
189
+ inline void Pop(Register target);
190
+
191
+ MacroAssembler* masm_;
192
+
193
+ // Which mode to generate code for (ASCII or UC16).
194
+ Mode mode_;
195
+
196
+ // One greater than maximal register index actually used.
197
+ int num_registers_;
198
+
199
+ // Number of registers to output at the end (the saved registers
200
+ // are always 0..num_saved_registers_-1)
201
+ int num_saved_registers_;
202
+
203
+ // Labels used internally.
204
+ Label entry_label_;
205
+ Label start_label_;
206
+ Label success_label_;
207
+ Label backtrack_label_;
208
+ Label exit_label_;
209
+ Label check_preempt_label_;
210
+ Label stack_overflow_label_;
211
+ };
212
+ #endif // V8_INTERPRETED_REGEXP
213
+
214
+ }} // namespace v8::internal
215
+
216
+ #endif // V8_IA32_REGEXP_MACRO_ASSEMBLER_IA32_H_
@@ -0,0 +1,30 @@
1
+ // Copyright 2008 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+
29
+ // Since there is no simulator for the ia32 architecture this file is empty.
30
+
@@ -0,0 +1,74 @@
1
+ // Copyright 2008 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #ifndef V8_IA32_SIMULATOR_IA32_H_
29
+ #define V8_IA32_SIMULATOR_IA32_H_
30
+
31
+ #include "allocation.h"
32
+
33
+ namespace v8 {
34
+ namespace internal {
35
+
36
+ // Since there is no simulator for the ia32 architecture the only thing we can
37
+ // do is to call the entry directly.
38
+ #define CALL_GENERATED_CODE(entry, p0, p1, p2, p3, p4) \
39
+ (entry(p0, p1, p2, p3, p4))
40
+
41
+
42
+ typedef int (*regexp_matcher)(String*, int, const byte*,
43
+ const byte*, int*, Address, int, Isolate*);
44
+
45
+ // Call the generated regexp code directly. The code at the entry address should
46
+ // expect eight int/pointer sized arguments and return an int.
47
+ #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
48
+ (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7))
49
+
50
+
51
+ #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
52
+ (reinterpret_cast<TryCatch*>(try_catch_address))
53
+
54
+ // The stack limit beyond which we will throw stack overflow errors in
55
+ // generated code. Because generated code on ia32 uses the C stack, we
56
+ // just use the C stack limit.
57
+ class SimulatorStack : public v8::internal::AllStatic {
58
+ public:
59
+ static inline uintptr_t JsLimitFromCLimit(Isolate* isolate,
60
+ uintptr_t c_limit) {
61
+ USE(isolate);
62
+ return c_limit;
63
+ }
64
+
65
+ static inline uintptr_t RegisterCTryCatch(uintptr_t try_catch_address) {
66
+ return try_catch_address;
67
+ }
68
+
69
+ static inline void UnregisterCTryCatch() { }
70
+ };
71
+
72
+ } } // namespace v8::internal
73
+
74
+ #endif // V8_IA32_SIMULATOR_IA32_H_
@@ -0,0 +1,3847 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_IA32)
31
+
32
+ #include "ic-inl.h"
33
+ #include "codegen.h"
34
+ #include "stub-cache.h"
35
+
36
+ namespace v8 {
37
+ namespace internal {
38
+
39
+ #define __ ACCESS_MASM(masm)
40
+
41
+
42
+ static void ProbeTable(Isolate* isolate,
43
+ MacroAssembler* masm,
44
+ Code::Flags flags,
45
+ StubCache::Table table,
46
+ Register name,
47
+ Register offset,
48
+ Register extra) {
49
+ ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
50
+ ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
51
+
52
+ Label miss;
53
+
54
+ if (extra.is_valid()) {
55
+ // Get the code entry from the cache.
56
+ __ mov(extra, Operand::StaticArray(offset, times_2, value_offset));
57
+
58
+ // Check that the key in the entry matches the name.
59
+ __ cmp(name, Operand::StaticArray(offset, times_2, key_offset));
60
+ __ j(not_equal, &miss);
61
+
62
+ // Check that the flags match what we're looking for.
63
+ __ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
64
+ __ and_(offset, ~Code::kFlagsNotUsedInLookup);
65
+ __ cmp(offset, flags);
66
+ __ j(not_equal, &miss);
67
+
68
+ // Jump to the first instruction in the code stub.
69
+ __ add(Operand(extra), Immediate(Code::kHeaderSize - kHeapObjectTag));
70
+ __ jmp(Operand(extra));
71
+
72
+ __ bind(&miss);
73
+ } else {
74
+ // Save the offset on the stack.
75
+ __ push(offset);
76
+
77
+ // Check that the key in the entry matches the name.
78
+ __ cmp(name, Operand::StaticArray(offset, times_2, key_offset));
79
+ __ j(not_equal, &miss);
80
+
81
+ // Get the code entry from the cache.
82
+ __ mov(offset, Operand::StaticArray(offset, times_2, value_offset));
83
+
84
+ // Check that the flags match what we're looking for.
85
+ __ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
86
+ __ and_(offset, ~Code::kFlagsNotUsedInLookup);
87
+ __ cmp(offset, flags);
88
+ __ j(not_equal, &miss);
89
+
90
+ // Restore offset and re-load code entry from cache.
91
+ __ pop(offset);
92
+ __ mov(offset, Operand::StaticArray(offset, times_2, value_offset));
93
+
94
+ // Jump to the first instruction in the code stub.
95
+ __ add(Operand(offset), Immediate(Code::kHeaderSize - kHeapObjectTag));
96
+ __ jmp(Operand(offset));
97
+
98
+ // Pop at miss.
99
+ __ bind(&miss);
100
+ __ pop(offset);
101
+ }
102
+ }
103
+
104
+
105
+ // Helper function used to check that the dictionary doesn't contain
106
+ // the property. This function may return false negatives, so miss_label
107
+ // must always call a backup property check that is complete.
108
+ // This function is safe to call if the receiver has fast properties.
109
+ // Name must be a symbol and receiver must be a heap object.
110
+ static MaybeObject* GenerateDictionaryNegativeLookup(MacroAssembler* masm,
111
+ Label* miss_label,
112
+ Register receiver,
113
+ String* name,
114
+ Register r0,
115
+ Register r1) {
116
+ ASSERT(name->IsSymbol());
117
+ Counters* counters = masm->isolate()->counters();
118
+ __ IncrementCounter(counters->negative_lookups(), 1);
119
+ __ IncrementCounter(counters->negative_lookups_miss(), 1);
120
+
121
+ __ mov(r0, FieldOperand(receiver, HeapObject::kMapOffset));
122
+
123
+ const int kInterceptorOrAccessCheckNeededMask =
124
+ (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
125
+
126
+ // Bail out if the receiver has a named interceptor or requires access checks.
127
+ __ test_b(FieldOperand(r0, Map::kBitFieldOffset),
128
+ kInterceptorOrAccessCheckNeededMask);
129
+ __ j(not_zero, miss_label);
130
+
131
+ // Check that receiver is a JSObject.
132
+ __ CmpInstanceType(r0, FIRST_JS_OBJECT_TYPE);
133
+ __ j(below, miss_label);
134
+
135
+ // Load properties array.
136
+ Register properties = r0;
137
+ __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
138
+
139
+ // Check that the properties array is a dictionary.
140
+ __ cmp(FieldOperand(properties, HeapObject::kMapOffset),
141
+ Immediate(masm->isolate()->factory()->hash_table_map()));
142
+ __ j(not_equal, miss_label);
143
+
144
+ Label done;
145
+ MaybeObject* result =
146
+ StringDictionaryLookupStub::GenerateNegativeLookup(masm,
147
+ miss_label,
148
+ &done,
149
+ properties,
150
+ name,
151
+ r1);
152
+ if (result->IsFailure()) return result;
153
+
154
+ __ bind(&done);
155
+ __ DecrementCounter(counters->negative_lookups_miss(), 1);
156
+
157
+ return result;
158
+ }
159
+
160
+
161
+ void StubCache::GenerateProbe(MacroAssembler* masm,
162
+ Code::Flags flags,
163
+ Register receiver,
164
+ Register name,
165
+ Register scratch,
166
+ Register extra,
167
+ Register extra2) {
168
+ Isolate* isolate = Isolate::Current();
169
+ Label miss;
170
+ USE(extra2); // The register extra2 is not used on the ia32 platform.
171
+
172
+ // Make sure that code is valid. The shifting code relies on the
173
+ // entry size being 8.
174
+ ASSERT(sizeof(Entry) == 8);
175
+
176
+ // Make sure the flags does not name a specific type.
177
+ ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
178
+
179
+ // Make sure that there are no register conflicts.
180
+ ASSERT(!scratch.is(receiver));
181
+ ASSERT(!scratch.is(name));
182
+ ASSERT(!extra.is(receiver));
183
+ ASSERT(!extra.is(name));
184
+ ASSERT(!extra.is(scratch));
185
+
186
+ // Check scratch and extra registers are valid, and extra2 is unused.
187
+ ASSERT(!scratch.is(no_reg));
188
+ ASSERT(extra2.is(no_reg));
189
+
190
+ // Check that the receiver isn't a smi.
191
+ __ test(receiver, Immediate(kSmiTagMask));
192
+ __ j(zero, &miss);
193
+
194
+ // Get the map of the receiver and compute the hash.
195
+ __ mov(scratch, FieldOperand(name, String::kHashFieldOffset));
196
+ __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
197
+ __ xor_(scratch, flags);
198
+ __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
199
+
200
+ // Probe the primary table.
201
+ ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra);
202
+
203
+ // Primary miss: Compute hash for secondary probe.
204
+ __ mov(scratch, FieldOperand(name, String::kHashFieldOffset));
205
+ __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
206
+ __ xor_(scratch, flags);
207
+ __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
208
+ __ sub(scratch, Operand(name));
209
+ __ add(Operand(scratch), Immediate(flags));
210
+ __ and_(scratch, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
211
+
212
+ // Probe the secondary table.
213
+ ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra);
214
+
215
+ // Cache miss: Fall-through and let caller handle the miss by
216
+ // entering the runtime system.
217
+ __ bind(&miss);
218
+ }
219
+
220
+
221
+ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
222
+ int index,
223
+ Register prototype) {
224
+ __ LoadGlobalFunction(index, prototype);
225
+ __ LoadGlobalFunctionInitialMap(prototype, prototype);
226
+ // Load the prototype from the initial map.
227
+ __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
228
+ }
229
+
230
+
231
+ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
232
+ MacroAssembler* masm, int index, Register prototype, Label* miss) {
233
+ // Check we're still in the same context.
234
+ __ cmp(Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)),
235
+ masm->isolate()->global());
236
+ __ j(not_equal, miss);
237
+ // Get the global function with the given index.
238
+ JSFunction* function =
239
+ JSFunction::cast(masm->isolate()->global_context()->get(index));
240
+ // Load its initial map. The global functions all have initial maps.
241
+ __ Set(prototype, Immediate(Handle<Map>(function->initial_map())));
242
+ // Load the prototype from the initial map.
243
+ __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
244
+ }
245
+
246
+
247
+ void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
248
+ Register receiver,
249
+ Register scratch,
250
+ Label* miss_label) {
251
+ // Check that the receiver isn't a smi.
252
+ __ test(receiver, Immediate(kSmiTagMask));
253
+ __ j(zero, miss_label);
254
+
255
+ // Check that the object is a JS array.
256
+ __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
257
+ __ j(not_equal, miss_label);
258
+
259
+ // Load length directly from the JS array.
260
+ __ mov(eax, FieldOperand(receiver, JSArray::kLengthOffset));
261
+ __ ret(0);
262
+ }
263
+
264
+
265
+ // Generate code to check if an object is a string. If the object is
266
+ // a string, the map's instance type is left in the scratch register.
267
+ static void GenerateStringCheck(MacroAssembler* masm,
268
+ Register receiver,
269
+ Register scratch,
270
+ Label* smi,
271
+ Label* non_string_object) {
272
+ // Check that the object isn't a smi.
273
+ __ test(receiver, Immediate(kSmiTagMask));
274
+ __ j(zero, smi);
275
+
276
+ // Check that the object is a string.
277
+ __ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
278
+ __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
279
+ ASSERT(kNotStringTag != 0);
280
+ __ test(scratch, Immediate(kNotStringTag));
281
+ __ j(not_zero, non_string_object);
282
+ }
283
+
284
+
285
+ void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
286
+ Register receiver,
287
+ Register scratch1,
288
+ Register scratch2,
289
+ Label* miss,
290
+ bool support_wrappers) {
291
+ Label check_wrapper;
292
+
293
+ // Check if the object is a string leaving the instance type in the
294
+ // scratch register.
295
+ GenerateStringCheck(masm, receiver, scratch1, miss,
296
+ support_wrappers ? &check_wrapper : miss);
297
+
298
+ // Load length from the string and convert to a smi.
299
+ __ mov(eax, FieldOperand(receiver, String::kLengthOffset));
300
+ __ ret(0);
301
+
302
+ if (support_wrappers) {
303
+ // Check if the object is a JSValue wrapper.
304
+ __ bind(&check_wrapper);
305
+ __ cmp(scratch1, JS_VALUE_TYPE);
306
+ __ j(not_equal, miss);
307
+
308
+ // Check if the wrapped value is a string and load the length
309
+ // directly if it is.
310
+ __ mov(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
311
+ GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
312
+ __ mov(eax, FieldOperand(scratch2, String::kLengthOffset));
313
+ __ ret(0);
314
+ }
315
+ }
316
+
317
+
318
+ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
319
+ Register receiver,
320
+ Register scratch1,
321
+ Register scratch2,
322
+ Label* miss_label) {
323
+ __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
324
+ __ mov(eax, Operand(scratch1));
325
+ __ ret(0);
326
+ }
327
+
328
+
329
+ // Load a fast property out of a holder object (src). In-object properties
330
+ // are loaded directly otherwise the property is loaded from the properties
331
+ // fixed array.
332
+ void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
333
+ Register dst, Register src,
334
+ JSObject* holder, int index) {
335
+ // Adjust for the number of properties stored in the holder.
336
+ index -= holder->map()->inobject_properties();
337
+ if (index < 0) {
338
+ // Get the property straight out of the holder.
339
+ int offset = holder->map()->instance_size() + (index * kPointerSize);
340
+ __ mov(dst, FieldOperand(src, offset));
341
+ } else {
342
+ // Calculate the offset into the properties array.
343
+ int offset = index * kPointerSize + FixedArray::kHeaderSize;
344
+ __ mov(dst, FieldOperand(src, JSObject::kPropertiesOffset));
345
+ __ mov(dst, FieldOperand(dst, offset));
346
+ }
347
+ }
348
+
349
+
350
+ static void PushInterceptorArguments(MacroAssembler* masm,
351
+ Register receiver,
352
+ Register holder,
353
+ Register name,
354
+ JSObject* holder_obj) {
355
+ __ push(name);
356
+ InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
357
+ ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
358
+ Register scratch = name;
359
+ __ mov(scratch, Immediate(Handle<Object>(interceptor)));
360
+ __ push(scratch);
361
+ __ push(receiver);
362
+ __ push(holder);
363
+ __ push(FieldOperand(scratch, InterceptorInfo::kDataOffset));
364
+ }
365
+
366
+
367
+ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
368
+ Register receiver,
369
+ Register holder,
370
+ Register name,
371
+ JSObject* holder_obj) {
372
+ PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
373
+ __ CallExternalReference(
374
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
375
+ masm->isolate()),
376
+ 5);
377
+ }
378
+
379
+
380
+ // Number of pointers to be reserved on stack for fast API call.
381
+ static const int kFastApiCallArguments = 3;
382
+
383
+
384
+ // Reserves space for the extra arguments to API function in the
385
+ // caller's frame.
386
+ //
387
+ // These arguments are set by CheckPrototypes and GenerateFastApiCall.
388
+ static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
389
+ // ----------- S t a t e -------------
390
+ // -- esp[0] : return address
391
+ // -- esp[4] : last argument in the internal frame of the caller
392
+ // -----------------------------------
393
+ __ pop(scratch);
394
+ for (int i = 0; i < kFastApiCallArguments; i++) {
395
+ __ push(Immediate(Smi::FromInt(0)));
396
+ }
397
+ __ push(scratch);
398
+ }
399
+
400
+
401
+ // Undoes the effects of ReserveSpaceForFastApiCall.
402
+ static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
403
+ // ----------- S t a t e -------------
404
+ // -- esp[0] : return address.
405
+ // -- esp[4] : last fast api call extra argument.
406
+ // -- ...
407
+ // -- esp[kFastApiCallArguments * 4] : first fast api call extra argument.
408
+ // -- esp[kFastApiCallArguments * 4 + 4] : last argument in the internal
409
+ // frame.
410
+ // -----------------------------------
411
+ __ pop(scratch);
412
+ __ add(Operand(esp), Immediate(kPointerSize * kFastApiCallArguments));
413
+ __ push(scratch);
414
+ }
415
+
416
+
417
+ // Generates call to API function.
418
+ static MaybeObject* GenerateFastApiCall(MacroAssembler* masm,
419
+ const CallOptimization& optimization,
420
+ int argc) {
421
+ // ----------- S t a t e -------------
422
+ // -- esp[0] : return address
423
+ // -- esp[4] : object passing the type check
424
+ // (last fast api call extra argument,
425
+ // set by CheckPrototypes)
426
+ // -- esp[8] : api function
427
+ // (first fast api call extra argument)
428
+ // -- esp[12] : api call data
429
+ // -- esp[16] : last argument
430
+ // -- ...
431
+ // -- esp[(argc + 3) * 4] : first argument
432
+ // -- esp[(argc + 4) * 4] : receiver
433
+ // -----------------------------------
434
+ // Get the function and setup the context.
435
+ JSFunction* function = optimization.constant_function();
436
+ __ mov(edi, Immediate(Handle<JSFunction>(function)));
437
+ __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
438
+
439
+ // Pass the additional arguments.
440
+ __ mov(Operand(esp, 2 * kPointerSize), edi);
441
+ Object* call_data = optimization.api_call_info()->data();
442
+ Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
443
+ if (masm->isolate()->heap()->InNewSpace(call_data)) {
444
+ __ mov(ecx, api_call_info_handle);
445
+ __ mov(ebx, FieldOperand(ecx, CallHandlerInfo::kDataOffset));
446
+ __ mov(Operand(esp, 3 * kPointerSize), ebx);
447
+ } else {
448
+ __ mov(Operand(esp, 3 * kPointerSize),
449
+ Immediate(Handle<Object>(call_data)));
450
+ }
451
+
452
+ // Prepare arguments.
453
+ __ lea(eax, Operand(esp, 3 * kPointerSize));
454
+
455
+ Object* callback = optimization.api_call_info()->callback();
456
+ Address api_function_address = v8::ToCData<Address>(callback);
457
+ ApiFunction fun(api_function_address);
458
+
459
+ const int kApiArgc = 1; // API function gets reference to the v8::Arguments.
460
+
461
+ // Allocate the v8::Arguments structure in the arguments' space since
462
+ // it's not controlled by GC.
463
+ const int kApiStackSpace = 4;
464
+
465
+ __ PrepareCallApiFunction(kApiArgc + kApiStackSpace, ebx);
466
+
467
+ __ mov(ApiParameterOperand(1), eax); // v8::Arguments::implicit_args_.
468
+ __ add(Operand(eax), Immediate(argc * kPointerSize));
469
+ __ mov(ApiParameterOperand(2), eax); // v8::Arguments::values_.
470
+ __ Set(ApiParameterOperand(3), Immediate(argc)); // v8::Arguments::length_.
471
+ // v8::Arguments::is_construct_call_.
472
+ __ Set(ApiParameterOperand(4), Immediate(0));
473
+
474
+ // v8::InvocationCallback's argument.
475
+ __ lea(eax, ApiParameterOperand(1));
476
+ __ mov(ApiParameterOperand(0), eax);
477
+
478
+ // Emitting a stub call may try to allocate (if the code is not
479
+ // already generated). Do not allow the assembler to perform a
480
+ // garbage collection but instead return the allocation failure
481
+ // object.
482
+ return masm->TryCallApiFunctionAndReturn(&fun,
483
+ argc + kFastApiCallArguments + 1);
484
+ }
485
+
486
+
487
+ class CallInterceptorCompiler BASE_EMBEDDED {
488
+ public:
489
+ CallInterceptorCompiler(StubCompiler* stub_compiler,
490
+ const ParameterCount& arguments,
491
+ Register name)
492
+ : stub_compiler_(stub_compiler),
493
+ arguments_(arguments),
494
+ name_(name) {}
495
+
496
+ MaybeObject* Compile(MacroAssembler* masm,
497
+ JSObject* object,
498
+ JSObject* holder,
499
+ String* name,
500
+ LookupResult* lookup,
501
+ Register receiver,
502
+ Register scratch1,
503
+ Register scratch2,
504
+ Register scratch3,
505
+ Label* miss) {
506
+ ASSERT(holder->HasNamedInterceptor());
507
+ ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
508
+
509
+ // Check that the receiver isn't a smi.
510
+ __ test(receiver, Immediate(kSmiTagMask));
511
+ __ j(zero, miss);
512
+
513
+ CallOptimization optimization(lookup);
514
+
515
+ if (optimization.is_constant_call()) {
516
+ return CompileCacheable(masm,
517
+ object,
518
+ receiver,
519
+ scratch1,
520
+ scratch2,
521
+ scratch3,
522
+ holder,
523
+ lookup,
524
+ name,
525
+ optimization,
526
+ miss);
527
+ } else {
528
+ CompileRegular(masm,
529
+ object,
530
+ receiver,
531
+ scratch1,
532
+ scratch2,
533
+ scratch3,
534
+ name,
535
+ holder,
536
+ miss);
537
+ return masm->isolate()->heap()->undefined_value(); // Success.
538
+ }
539
+ }
540
+
541
+ private:
542
+ MaybeObject* CompileCacheable(MacroAssembler* masm,
543
+ JSObject* object,
544
+ Register receiver,
545
+ Register scratch1,
546
+ Register scratch2,
547
+ Register scratch3,
548
+ JSObject* interceptor_holder,
549
+ LookupResult* lookup,
550
+ String* name,
551
+ const CallOptimization& optimization,
552
+ Label* miss_label) {
553
+ ASSERT(optimization.is_constant_call());
554
+ ASSERT(!lookup->holder()->IsGlobalObject());
555
+
556
+ int depth1 = kInvalidProtoDepth;
557
+ int depth2 = kInvalidProtoDepth;
558
+ bool can_do_fast_api_call = false;
559
+ if (optimization.is_simple_api_call() &&
560
+ !lookup->holder()->IsGlobalObject()) {
561
+ depth1 =
562
+ optimization.GetPrototypeDepthOfExpectedType(object,
563
+ interceptor_holder);
564
+ if (depth1 == kInvalidProtoDepth) {
565
+ depth2 =
566
+ optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
567
+ lookup->holder());
568
+ }
569
+ can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
570
+ (depth2 != kInvalidProtoDepth);
571
+ }
572
+
573
+ Counters* counters = masm->isolate()->counters();
574
+ __ IncrementCounter(counters->call_const_interceptor(), 1);
575
+
576
+ if (can_do_fast_api_call) {
577
+ __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
578
+ ReserveSpaceForFastApiCall(masm, scratch1);
579
+ }
580
+
581
+ // Check that the maps from receiver to interceptor's holder
582
+ // haven't changed and thus we can invoke interceptor.
583
+ Label miss_cleanup;
584
+ Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
585
+ Register holder =
586
+ stub_compiler_->CheckPrototypes(object, receiver,
587
+ interceptor_holder, scratch1,
588
+ scratch2, scratch3, name, depth1, miss);
589
+
590
+ // Invoke an interceptor and if it provides a value,
591
+ // branch to |regular_invoke|.
592
+ Label regular_invoke;
593
+ LoadWithInterceptor(masm, receiver, holder, interceptor_holder,
594
+ &regular_invoke);
595
+
596
+ // Interceptor returned nothing for this property. Try to use cached
597
+ // constant function.
598
+
599
+ // Check that the maps from interceptor's holder to constant function's
600
+ // holder haven't changed and thus we can use cached constant function.
601
+ if (interceptor_holder != lookup->holder()) {
602
+ stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
603
+ lookup->holder(), scratch1,
604
+ scratch2, scratch3, name, depth2, miss);
605
+ } else {
606
+ // CheckPrototypes has a side effect of fetching a 'holder'
607
+ // for API (object which is instanceof for the signature). It's
608
+ // safe to omit it here, as if present, it should be fetched
609
+ // by the previous CheckPrototypes.
610
+ ASSERT(depth2 == kInvalidProtoDepth);
611
+ }
612
+
613
+ // Invoke function.
614
+ if (can_do_fast_api_call) {
615
+ MaybeObject* result =
616
+ GenerateFastApiCall(masm, optimization, arguments_.immediate());
617
+ if (result->IsFailure()) return result;
618
+ } else {
619
+ __ InvokeFunction(optimization.constant_function(), arguments_,
620
+ JUMP_FUNCTION);
621
+ }
622
+
623
+ // Deferred code for fast API call case---clean preallocated space.
624
+ if (can_do_fast_api_call) {
625
+ __ bind(&miss_cleanup);
626
+ FreeSpaceForFastApiCall(masm, scratch1);
627
+ __ jmp(miss_label);
628
+ }
629
+
630
+ // Invoke a regular function.
631
+ __ bind(&regular_invoke);
632
+ if (can_do_fast_api_call) {
633
+ FreeSpaceForFastApiCall(masm, scratch1);
634
+ }
635
+
636
+ return masm->isolate()->heap()->undefined_value(); // Success.
637
+ }
638
+
639
+ void CompileRegular(MacroAssembler* masm,
640
+ JSObject* object,
641
+ Register receiver,
642
+ Register scratch1,
643
+ Register scratch2,
644
+ Register scratch3,
645
+ String* name,
646
+ JSObject* interceptor_holder,
647
+ Label* miss_label) {
648
+ Register holder =
649
+ stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
650
+ scratch1, scratch2, scratch3, name,
651
+ miss_label);
652
+
653
+ __ EnterInternalFrame();
654
+ // Save the name_ register across the call.
655
+ __ push(name_);
656
+
657
+ PushInterceptorArguments(masm,
658
+ receiver,
659
+ holder,
660
+ name_,
661
+ interceptor_holder);
662
+
663
+ __ CallExternalReference(
664
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
665
+ masm->isolate()),
666
+ 5);
667
+
668
+ // Restore the name_ register.
669
+ __ pop(name_);
670
+ __ LeaveInternalFrame();
671
+ }
672
+
673
+ void LoadWithInterceptor(MacroAssembler* masm,
674
+ Register receiver,
675
+ Register holder,
676
+ JSObject* holder_obj,
677
+ Label* interceptor_succeeded) {
678
+ __ EnterInternalFrame();
679
+ __ push(holder); // Save the holder.
680
+ __ push(name_); // Save the name.
681
+
682
+ CompileCallLoadPropertyWithInterceptor(masm,
683
+ receiver,
684
+ holder,
685
+ name_,
686
+ holder_obj);
687
+
688
+ __ pop(name_); // Restore the name.
689
+ __ pop(receiver); // Restore the holder.
690
+ __ LeaveInternalFrame();
691
+
692
+ __ cmp(eax, masm->isolate()->factory()->no_interceptor_result_sentinel());
693
+ __ j(not_equal, interceptor_succeeded);
694
+ }
695
+
696
+ StubCompiler* stub_compiler_;
697
+ const ParameterCount& arguments_;
698
+ Register name_;
699
+ };
700
+
701
+
702
+ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
703
+ ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
704
+ Code* code = NULL;
705
+ if (kind == Code::LOAD_IC) {
706
+ code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
707
+ } else {
708
+ code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
709
+ }
710
+
711
+ Handle<Code> ic(code);
712
+ __ jmp(ic, RelocInfo::CODE_TARGET);
713
+ }
714
+
715
+
716
+ void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) {
717
+ Code* code = masm->isolate()->builtins()->builtin(
718
+ Builtins::kKeyedLoadIC_MissForceGeneric);
719
+ Handle<Code> ic(code);
720
+ __ jmp(ic, RelocInfo::CODE_TARGET);
721
+ }
722
+
723
+
724
+ // Both name_reg and receiver_reg are preserved on jumps to miss_label,
725
+ // but may be destroyed if store is successful.
726
+ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
727
+ JSObject* object,
728
+ int index,
729
+ Map* transition,
730
+ Register receiver_reg,
731
+ Register name_reg,
732
+ Register scratch,
733
+ Label* miss_label) {
734
+ // Check that the object isn't a smi.
735
+ __ test(receiver_reg, Immediate(kSmiTagMask));
736
+ __ j(zero, miss_label);
737
+
738
+ // Check that the map of the object hasn't changed.
739
+ __ cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset),
740
+ Immediate(Handle<Map>(object->map())));
741
+ __ j(not_equal, miss_label);
742
+
743
+ // Perform global security token check if needed.
744
+ if (object->IsJSGlobalProxy()) {
745
+ __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
746
+ }
747
+
748
+ // Stub never generated for non-global objects that require access
749
+ // checks.
750
+ ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
751
+
752
+ // Perform map transition for the receiver if necessary.
753
+ if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
754
+ // The properties must be extended before we can store the value.
755
+ // We jump to a runtime call that extends the properties array.
756
+ __ pop(scratch); // Return address.
757
+ __ push(receiver_reg);
758
+ __ push(Immediate(Handle<Map>(transition)));
759
+ __ push(eax);
760
+ __ push(scratch);
761
+ __ TailCallExternalReference(
762
+ ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
763
+ masm->isolate()),
764
+ 3,
765
+ 1);
766
+ return;
767
+ }
768
+
769
+ if (transition != NULL) {
770
+ // Update the map of the object; no write barrier updating is
771
+ // needed because the map is never in new space.
772
+ __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset),
773
+ Immediate(Handle<Map>(transition)));
774
+ }
775
+
776
+ // Adjust for the number of properties stored in the object. Even in the
777
+ // face of a transition we can use the old map here because the size of the
778
+ // object and the number of in-object properties is not going to change.
779
+ index -= object->map()->inobject_properties();
780
+
781
+ if (index < 0) {
782
+ // Set the property straight into the object.
783
+ int offset = object->map()->instance_size() + (index * kPointerSize);
784
+ __ mov(FieldOperand(receiver_reg, offset), eax);
785
+
786
+ // Update the write barrier for the array address.
787
+ // Pass the value being stored in the now unused name_reg.
788
+ __ mov(name_reg, Operand(eax));
789
+ __ RecordWrite(receiver_reg, offset, name_reg, scratch);
790
+ } else {
791
+ // Write to the properties array.
792
+ int offset = index * kPointerSize + FixedArray::kHeaderSize;
793
+ // Get the properties array (optimistically).
794
+ __ mov(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
795
+ __ mov(FieldOperand(scratch, offset), eax);
796
+
797
+ // Update the write barrier for the array address.
798
+ // Pass the value being stored in the now unused name_reg.
799
+ __ mov(name_reg, Operand(eax));
800
+ __ RecordWrite(scratch, offset, name_reg, receiver_reg);
801
+ }
802
+
803
+ // Return the value (register eax).
804
+ __ ret(0);
805
+ }
806
+
807
+
808
+ // Generate code to check that a global property cell is empty. Create
809
+ // the property cell at compilation time if no cell exists for the
810
+ // property.
811
+ MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
812
+ MacroAssembler* masm,
813
+ GlobalObject* global,
814
+ String* name,
815
+ Register scratch,
816
+ Label* miss) {
817
+ Object* probe;
818
+ { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
819
+ if (!maybe_probe->ToObject(&probe)) return maybe_probe;
820
+ }
821
+ JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
822
+ ASSERT(cell->value()->IsTheHole());
823
+ if (Serializer::enabled()) {
824
+ __ mov(scratch, Immediate(Handle<Object>(cell)));
825
+ __ cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
826
+ Immediate(masm->isolate()->factory()->the_hole_value()));
827
+ } else {
828
+ __ cmp(Operand::Cell(Handle<JSGlobalPropertyCell>(cell)),
829
+ Immediate(masm->isolate()->factory()->the_hole_value()));
830
+ }
831
+ __ j(not_equal, miss);
832
+ return cell;
833
+ }
834
+
835
+
836
+ // Calls GenerateCheckPropertyCell for each global object in the prototype chain
837
+ // from object to (but not including) holder.
838
+ MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
839
+ MacroAssembler* masm,
840
+ JSObject* object,
841
+ JSObject* holder,
842
+ String* name,
843
+ Register scratch,
844
+ Label* miss) {
845
+ JSObject* current = object;
846
+ while (current != holder) {
847
+ if (current->IsGlobalObject()) {
848
+ // Returns a cell or a failure.
849
+ MaybeObject* result = GenerateCheckPropertyCell(
850
+ masm,
851
+ GlobalObject::cast(current),
852
+ name,
853
+ scratch,
854
+ miss);
855
+ if (result->IsFailure()) return result;
856
+ }
857
+ ASSERT(current->IsJSObject());
858
+ current = JSObject::cast(current->GetPrototype());
859
+ }
860
+ return NULL;
861
+ }
862
+
863
+
864
+ #undef __
865
+ #define __ ACCESS_MASM(masm())
866
+
867
+
868
+ Register StubCompiler::CheckPrototypes(JSObject* object,
869
+ Register object_reg,
870
+ JSObject* holder,
871
+ Register holder_reg,
872
+ Register scratch1,
873
+ Register scratch2,
874
+ String* name,
875
+ int save_at_depth,
876
+ Label* miss) {
877
+ // Make sure there's no overlap between holder and object registers.
878
+ ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
879
+ ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
880
+ && !scratch2.is(scratch1));
881
+
882
+ // Keep track of the current object in register reg.
883
+ Register reg = object_reg;
884
+ JSObject* current = object;
885
+ int depth = 0;
886
+
887
+ if (save_at_depth == depth) {
888
+ __ mov(Operand(esp, kPointerSize), reg);
889
+ }
890
+
891
+ // Traverse the prototype chain and check the maps in the prototype chain for
892
+ // fast and global objects or do negative lookup for normal objects.
893
+ while (current != holder) {
894
+ depth++;
895
+
896
+ // Only global objects and objects that do not require access
897
+ // checks are allowed in stubs.
898
+ ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
899
+
900
+ ASSERT(current->GetPrototype()->IsJSObject());
901
+ JSObject* prototype = JSObject::cast(current->GetPrototype());
902
+ if (!current->HasFastProperties() &&
903
+ !current->IsJSGlobalObject() &&
904
+ !current->IsJSGlobalProxy()) {
905
+ if (!name->IsSymbol()) {
906
+ MaybeObject* maybe_lookup_result = heap()->LookupSymbol(name);
907
+ Object* lookup_result = NULL; // Initialization to please compiler.
908
+ if (!maybe_lookup_result->ToObject(&lookup_result)) {
909
+ set_failure(Failure::cast(maybe_lookup_result));
910
+ return reg;
911
+ }
912
+ name = String::cast(lookup_result);
913
+ }
914
+ ASSERT(current->property_dictionary()->FindEntry(name) ==
915
+ StringDictionary::kNotFound);
916
+
917
+ MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(),
918
+ miss,
919
+ reg,
920
+ name,
921
+ scratch1,
922
+ scratch2);
923
+ if (negative_lookup->IsFailure()) {
924
+ set_failure(Failure::cast(negative_lookup));
925
+ return reg;
926
+ }
927
+
928
+ __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
929
+ reg = holder_reg; // from now the object is in holder_reg
930
+ __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
931
+ } else if (heap()->InNewSpace(prototype)) {
932
+ // Get the map of the current object.
933
+ __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
934
+ __ cmp(Operand(scratch1), Immediate(Handle<Map>(current->map())));
935
+ // Branch on the result of the map check.
936
+ __ j(not_equal, miss);
937
+ // Check access rights to the global object. This has to happen
938
+ // after the map check so that we know that the object is
939
+ // actually a global object.
940
+ if (current->IsJSGlobalProxy()) {
941
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
942
+
943
+ // Restore scratch register to be the map of the object.
944
+ // We load the prototype from the map in the scratch register.
945
+ __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
946
+ }
947
+ // The prototype is in new space; we cannot store a reference
948
+ // to it in the code. Load it from the map.
949
+ reg = holder_reg; // from now the object is in holder_reg
950
+ __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
951
+ } else {
952
+ // Check the map of the current object.
953
+ __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
954
+ Immediate(Handle<Map>(current->map())));
955
+ // Branch on the result of the map check.
956
+ __ j(not_equal, miss);
957
+ // Check access rights to the global object. This has to happen
958
+ // after the map check so that we know that the object is
959
+ // actually a global object.
960
+ if (current->IsJSGlobalProxy()) {
961
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
962
+ }
963
+ // The prototype is in old space; load it directly.
964
+ reg = holder_reg; // from now the object is in holder_reg
965
+ __ mov(reg, Handle<JSObject>(prototype));
966
+ }
967
+
968
+ if (save_at_depth == depth) {
969
+ __ mov(Operand(esp, kPointerSize), reg);
970
+ }
971
+
972
+ // Go to the next object in the prototype chain.
973
+ current = prototype;
974
+ }
975
+ ASSERT(current == holder);
976
+
977
+ // Log the check depth.
978
+ LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
979
+
980
+ // Check the holder map.
981
+ __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
982
+ Immediate(Handle<Map>(holder->map())));
983
+ __ j(not_equal, miss);
984
+
985
+ // Perform security check for access to the global object.
986
+ ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
987
+ if (holder->IsJSGlobalProxy()) {
988
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
989
+ };
990
+
991
+ // If we've skipped any global objects, it's not enough to verify
992
+ // that their maps haven't changed. We also need to check that the
993
+ // property cell for the property is still empty.
994
+ MaybeObject* result = GenerateCheckPropertyCells(masm(),
995
+ object,
996
+ holder,
997
+ name,
998
+ scratch1,
999
+ miss);
1000
+ if (result->IsFailure()) set_failure(Failure::cast(result));
1001
+
1002
+ // Return the register containing the holder.
1003
+ return reg;
1004
+ }
1005
+
1006
+
1007
+ void StubCompiler::GenerateLoadField(JSObject* object,
1008
+ JSObject* holder,
1009
+ Register receiver,
1010
+ Register scratch1,
1011
+ Register scratch2,
1012
+ Register scratch3,
1013
+ int index,
1014
+ String* name,
1015
+ Label* miss) {
1016
+ // Check that the receiver isn't a smi.
1017
+ __ test(receiver, Immediate(kSmiTagMask));
1018
+ __ j(zero, miss);
1019
+
1020
+ // Check the prototype chain.
1021
+ Register reg =
1022
+ CheckPrototypes(object, receiver, holder,
1023
+ scratch1, scratch2, scratch3, name, miss);
1024
+
1025
+ // Get the value from the properties.
1026
+ GenerateFastPropertyLoad(masm(), eax, reg, holder, index);
1027
+ __ ret(0);
1028
+ }
1029
+
1030
+
1031
+ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1032
+ JSObject* holder,
1033
+ Register receiver,
1034
+ Register name_reg,
1035
+ Register scratch1,
1036
+ Register scratch2,
1037
+ Register scratch3,
1038
+ AccessorInfo* callback,
1039
+ String* name,
1040
+ Label* miss) {
1041
+ // Check that the receiver isn't a smi.
1042
+ __ test(receiver, Immediate(kSmiTagMask));
1043
+ __ j(zero, miss);
1044
+
1045
+ // Check that the maps haven't changed.
1046
+ Register reg =
1047
+ CheckPrototypes(object, receiver, holder, scratch1,
1048
+ scratch2, scratch3, name, miss);
1049
+
1050
+ Handle<AccessorInfo> callback_handle(callback);
1051
+
1052
+ // Insert additional parameters into the stack frame above return address.
1053
+ ASSERT(!scratch3.is(reg));
1054
+ __ pop(scratch3); // Get return address to place it below.
1055
+
1056
+ __ push(receiver); // receiver
1057
+ __ mov(scratch2, Operand(esp));
1058
+ ASSERT(!scratch2.is(reg));
1059
+ __ push(reg); // holder
1060
+ // Push data from AccessorInfo.
1061
+ if (isolate()->heap()->InNewSpace(callback_handle->data())) {
1062
+ __ mov(scratch1, Immediate(callback_handle));
1063
+ __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset));
1064
+ } else {
1065
+ __ push(Immediate(Handle<Object>(callback_handle->data())));
1066
+ }
1067
+
1068
+ // Save a pointer to where we pushed the arguments pointer.
1069
+ // This will be passed as the const AccessorInfo& to the C++ callback.
1070
+ __ push(scratch2);
1071
+
1072
+ __ push(name_reg); // name
1073
+ __ mov(ebx, esp); // esp points to reference to name (handler).
1074
+
1075
+ __ push(scratch3); // Restore return address.
1076
+
1077
+ // Do call through the api.
1078
+ Address getter_address = v8::ToCData<Address>(callback->getter());
1079
+ ApiFunction fun(getter_address);
1080
+
1081
+ // 3 elements array for v8::Agruments::values_, handler for name and pointer
1082
+ // to the values (it considered as smi in GC).
1083
+ const int kStackSpace = 5;
1084
+ const int kApiArgc = 2;
1085
+
1086
+ __ PrepareCallApiFunction(kApiArgc, eax);
1087
+ __ mov(ApiParameterOperand(0), ebx); // name.
1088
+ __ add(Operand(ebx), Immediate(kPointerSize));
1089
+ __ mov(ApiParameterOperand(1), ebx); // arguments pointer.
1090
+
1091
+ // Emitting a stub call may try to allocate (if the code is not
1092
+ // already generated). Do not allow the assembler to perform a
1093
+ // garbage collection but instead return the allocation failure
1094
+ // object.
1095
+ return masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
1096
+ }
1097
+
1098
+
1099
+ void StubCompiler::GenerateLoadConstant(JSObject* object,
1100
+ JSObject* holder,
1101
+ Register receiver,
1102
+ Register scratch1,
1103
+ Register scratch2,
1104
+ Register scratch3,
1105
+ Object* value,
1106
+ String* name,
1107
+ Label* miss) {
1108
+ // Check that the receiver isn't a smi.
1109
+ __ test(receiver, Immediate(kSmiTagMask));
1110
+ __ j(zero, miss);
1111
+
1112
+ // Check that the maps haven't changed.
1113
+ CheckPrototypes(object, receiver, holder,
1114
+ scratch1, scratch2, scratch3, name, miss);
1115
+
1116
+ // Return the constant value.
1117
+ __ mov(eax, Handle<Object>(value));
1118
+ __ ret(0);
1119
+ }
1120
+
1121
+
1122
+ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1123
+ JSObject* interceptor_holder,
1124
+ LookupResult* lookup,
1125
+ Register receiver,
1126
+ Register name_reg,
1127
+ Register scratch1,
1128
+ Register scratch2,
1129
+ Register scratch3,
1130
+ String* name,
1131
+ Label* miss) {
1132
+ ASSERT(interceptor_holder->HasNamedInterceptor());
1133
+ ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1134
+
1135
+ // Check that the receiver isn't a smi.
1136
+ __ test(receiver, Immediate(kSmiTagMask));
1137
+ __ j(zero, miss);
1138
+
1139
+ // So far the most popular follow ups for interceptor loads are FIELD
1140
+ // and CALLBACKS, so inline only them, other cases may be added
1141
+ // later.
1142
+ bool compile_followup_inline = false;
1143
+ if (lookup->IsProperty() && lookup->IsCacheable()) {
1144
+ if (lookup->type() == FIELD) {
1145
+ compile_followup_inline = true;
1146
+ } else if (lookup->type() == CALLBACKS &&
1147
+ lookup->GetCallbackObject()->IsAccessorInfo() &&
1148
+ AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1149
+ compile_followup_inline = true;
1150
+ }
1151
+ }
1152
+
1153
+ if (compile_followup_inline) {
1154
+ // Compile the interceptor call, followed by inline code to load the
1155
+ // property from further up the prototype chain if the call fails.
1156
+ // Check that the maps haven't changed.
1157
+ Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1158
+ scratch1, scratch2, scratch3,
1159
+ name, miss);
1160
+ ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1161
+
1162
+ // Save necessary data before invoking an interceptor.
1163
+ // Requires a frame to make GC aware of pushed pointers.
1164
+ __ EnterInternalFrame();
1165
+
1166
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1167
+ // CALLBACKS case needs a receiver to be passed into C++ callback.
1168
+ __ push(receiver);
1169
+ }
1170
+ __ push(holder_reg);
1171
+ __ push(name_reg);
1172
+
1173
+ // Invoke an interceptor. Note: map checks from receiver to
1174
+ // interceptor's holder has been compiled before (see a caller
1175
+ // of this method.)
1176
+ CompileCallLoadPropertyWithInterceptor(masm(),
1177
+ receiver,
1178
+ holder_reg,
1179
+ name_reg,
1180
+ interceptor_holder);
1181
+
1182
+ // Check if interceptor provided a value for property. If it's
1183
+ // the case, return immediately.
1184
+ Label interceptor_failed;
1185
+ __ cmp(eax, factory()->no_interceptor_result_sentinel());
1186
+ __ j(equal, &interceptor_failed);
1187
+ __ LeaveInternalFrame();
1188
+ __ ret(0);
1189
+
1190
+ __ bind(&interceptor_failed);
1191
+ __ pop(name_reg);
1192
+ __ pop(holder_reg);
1193
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1194
+ __ pop(receiver);
1195
+ }
1196
+
1197
+ __ LeaveInternalFrame();
1198
+
1199
+ // Check that the maps from interceptor's holder to lookup's holder
1200
+ // haven't changed. And load lookup's holder into holder_reg.
1201
+ if (interceptor_holder != lookup->holder()) {
1202
+ holder_reg = CheckPrototypes(interceptor_holder,
1203
+ holder_reg,
1204
+ lookup->holder(),
1205
+ scratch1,
1206
+ scratch2,
1207
+ scratch3,
1208
+ name,
1209
+ miss);
1210
+ }
1211
+
1212
+ if (lookup->type() == FIELD) {
1213
+ // We found FIELD property in prototype chain of interceptor's holder.
1214
+ // Retrieve a field from field's holder.
1215
+ GenerateFastPropertyLoad(masm(), eax, holder_reg,
1216
+ lookup->holder(), lookup->GetFieldIndex());
1217
+ __ ret(0);
1218
+ } else {
1219
+ // We found CALLBACKS property in prototype chain of interceptor's
1220
+ // holder.
1221
+ ASSERT(lookup->type() == CALLBACKS);
1222
+ ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1223
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1224
+ ASSERT(callback != NULL);
1225
+ ASSERT(callback->getter() != NULL);
1226
+
1227
+ // Tail call to runtime.
1228
+ // Important invariant in CALLBACKS case: the code above must be
1229
+ // structured to never clobber |receiver| register.
1230
+ __ pop(scratch2); // return address
1231
+ __ push(receiver);
1232
+ __ push(holder_reg);
1233
+ __ mov(holder_reg, Immediate(Handle<AccessorInfo>(callback)));
1234
+ __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
1235
+ __ push(holder_reg);
1236
+ __ push(name_reg);
1237
+ __ push(scratch2); // restore return address
1238
+
1239
+ ExternalReference ref =
1240
+ ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1241
+ masm()->isolate());
1242
+ __ TailCallExternalReference(ref, 5, 1);
1243
+ }
1244
+ } else { // !compile_followup_inline
1245
+ // Call the runtime system to load the interceptor.
1246
+ // Check that the maps haven't changed.
1247
+ Register holder_reg =
1248
+ CheckPrototypes(object, receiver, interceptor_holder,
1249
+ scratch1, scratch2, scratch3, name, miss);
1250
+ __ pop(scratch2); // save old return address
1251
+ PushInterceptorArguments(masm(), receiver, holder_reg,
1252
+ name_reg, interceptor_holder);
1253
+ __ push(scratch2); // restore old return address
1254
+
1255
+ ExternalReference ref =
1256
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1257
+ isolate());
1258
+ __ TailCallExternalReference(ref, 5, 1);
1259
+ }
1260
+ }
1261
+
1262
+
1263
+ void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
1264
+ if (kind_ == Code::KEYED_CALL_IC) {
1265
+ __ cmp(Operand(ecx), Immediate(Handle<String>(name)));
1266
+ __ j(not_equal, miss);
1267
+ }
1268
+ }
1269
+
1270
+
1271
+ void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1272
+ JSObject* holder,
1273
+ String* name,
1274
+ Label* miss) {
1275
+ ASSERT(holder->IsGlobalObject());
1276
+
1277
+ // Get the number of arguments.
1278
+ const int argc = arguments().immediate();
1279
+
1280
+ // Get the receiver from the stack.
1281
+ __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1282
+
1283
+ // If the object is the holder then we know that it's a global
1284
+ // object which can only happen for contextual calls. In this case,
1285
+ // the receiver cannot be a smi.
1286
+ if (object != holder) {
1287
+ __ test(edx, Immediate(kSmiTagMask));
1288
+ __ j(zero, miss);
1289
+ }
1290
+
1291
+ // Check that the maps haven't changed.
1292
+ CheckPrototypes(object, edx, holder, ebx, eax, edi, name, miss);
1293
+ }
1294
+
1295
+
1296
+ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1297
+ JSFunction* function,
1298
+ Label* miss) {
1299
+ // Get the value from the cell.
1300
+ if (Serializer::enabled()) {
1301
+ __ mov(edi, Immediate(Handle<JSGlobalPropertyCell>(cell)));
1302
+ __ mov(edi, FieldOperand(edi, JSGlobalPropertyCell::kValueOffset));
1303
+ } else {
1304
+ __ mov(edi, Operand::Cell(Handle<JSGlobalPropertyCell>(cell)));
1305
+ }
1306
+
1307
+ // Check that the cell contains the same function.
1308
+ if (isolate()->heap()->InNewSpace(function)) {
1309
+ // We can't embed a pointer to a function in new space so we have
1310
+ // to verify that the shared function info is unchanged. This has
1311
+ // the nice side effect that multiple closures based on the same
1312
+ // function can all use this call IC. Before we load through the
1313
+ // function, we have to verify that it still is a function.
1314
+ __ test(edi, Immediate(kSmiTagMask));
1315
+ __ j(zero, miss);
1316
+ __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
1317
+ __ j(not_equal, miss);
1318
+
1319
+ // Check the shared function info. Make sure it hasn't changed.
1320
+ __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset),
1321
+ Immediate(Handle<SharedFunctionInfo>(function->shared())));
1322
+ __ j(not_equal, miss);
1323
+ } else {
1324
+ __ cmp(Operand(edi), Immediate(Handle<JSFunction>(function)));
1325
+ __ j(not_equal, miss);
1326
+ }
1327
+ }
1328
+
1329
+
1330
+ MaybeObject* CallStubCompiler::GenerateMissBranch() {
1331
+ MaybeObject* maybe_obj =
1332
+ isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1333
+ kind_,
1334
+ extra_ic_state_);
1335
+ Object* obj;
1336
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1337
+ __ jmp(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1338
+ return obj;
1339
+ }
1340
+
1341
+
1342
+ MUST_USE_RESULT MaybeObject* CallStubCompiler::CompileCallField(
1343
+ JSObject* object,
1344
+ JSObject* holder,
1345
+ int index,
1346
+ String* name) {
1347
+ // ----------- S t a t e -------------
1348
+ // -- ecx : name
1349
+ // -- esp[0] : return address
1350
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1351
+ // -- ...
1352
+ // -- esp[(argc + 1) * 4] : receiver
1353
+ // -----------------------------------
1354
+ Label miss;
1355
+
1356
+ GenerateNameCheck(name, &miss);
1357
+
1358
+ // Get the receiver from the stack.
1359
+ const int argc = arguments().immediate();
1360
+ __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1361
+
1362
+ // Check that the receiver isn't a smi.
1363
+ __ test(edx, Immediate(kSmiTagMask));
1364
+ __ j(zero, &miss);
1365
+
1366
+ // Do the right check and compute the holder register.
1367
+ Register reg = CheckPrototypes(object, edx, holder, ebx, eax, edi,
1368
+ name, &miss);
1369
+
1370
+ GenerateFastPropertyLoad(masm(), edi, reg, holder, index);
1371
+
1372
+ // Check that the function really is a function.
1373
+ __ test(edi, Immediate(kSmiTagMask));
1374
+ __ j(zero, &miss);
1375
+ __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
1376
+ __ j(not_equal, &miss);
1377
+
1378
+ // Patch the receiver on the stack with the global proxy if
1379
+ // necessary.
1380
+ if (object->IsGlobalObject()) {
1381
+ __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
1382
+ __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
1383
+ }
1384
+
1385
+ // Invoke the function.
1386
+ __ InvokeFunction(edi, arguments(), JUMP_FUNCTION);
1387
+
1388
+ // Handle call cache miss.
1389
+ __ bind(&miss);
1390
+ MaybeObject* maybe_result = GenerateMissBranch();
1391
+ if (maybe_result->IsFailure()) return maybe_result;
1392
+
1393
+ // Return the generated code.
1394
+ return GetCode(FIELD, name);
1395
+ }
1396
+
1397
+
1398
+ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1399
+ JSObject* holder,
1400
+ JSGlobalPropertyCell* cell,
1401
+ JSFunction* function,
1402
+ String* name) {
1403
+ // ----------- S t a t e -------------
1404
+ // -- ecx : name
1405
+ // -- esp[0] : return address
1406
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1407
+ // -- ...
1408
+ // -- esp[(argc + 1) * 4] : receiver
1409
+ // -----------------------------------
1410
+
1411
+ // If object is not an array, bail out to regular call.
1412
+ if (!object->IsJSArray() || cell != NULL) {
1413
+ return isolate()->heap()->undefined_value();
1414
+ }
1415
+
1416
+ Label miss;
1417
+
1418
+ GenerateNameCheck(name, &miss);
1419
+
1420
+ // Get the receiver from the stack.
1421
+ const int argc = arguments().immediate();
1422
+ __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1423
+
1424
+ // Check that the receiver isn't a smi.
1425
+ __ test(edx, Immediate(kSmiTagMask));
1426
+ __ j(zero, &miss);
1427
+
1428
+ CheckPrototypes(JSObject::cast(object), edx,
1429
+ holder, ebx,
1430
+ eax, edi, name, &miss);
1431
+
1432
+ if (argc == 0) {
1433
+ // Noop, return the length.
1434
+ __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
1435
+ __ ret((argc + 1) * kPointerSize);
1436
+ } else {
1437
+ Label call_builtin;
1438
+
1439
+ // Get the elements array of the object.
1440
+ __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
1441
+
1442
+ // Check that the elements are in fast mode and writable.
1443
+ __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
1444
+ Immediate(factory()->fixed_array_map()));
1445
+ __ j(not_equal, &call_builtin);
1446
+
1447
+ if (argc == 1) { // Otherwise fall through to call builtin.
1448
+ Label exit, with_write_barrier, attempt_to_grow_elements;
1449
+
1450
+ // Get the array's length into eax and calculate new length.
1451
+ __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
1452
+ STATIC_ASSERT(kSmiTagSize == 1);
1453
+ STATIC_ASSERT(kSmiTag == 0);
1454
+ __ add(Operand(eax), Immediate(Smi::FromInt(argc)));
1455
+
1456
+ // Get the element's length into ecx.
1457
+ __ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
1458
+
1459
+ // Check if we could survive without allocation.
1460
+ __ cmp(eax, Operand(ecx));
1461
+ __ j(greater, &attempt_to_grow_elements);
1462
+
1463
+ // Save new length.
1464
+ __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
1465
+
1466
+ // Push the element.
1467
+ __ lea(edx, FieldOperand(ebx,
1468
+ eax, times_half_pointer_size,
1469
+ FixedArray::kHeaderSize - argc * kPointerSize));
1470
+ __ mov(ecx, Operand(esp, argc * kPointerSize));
1471
+ __ mov(Operand(edx, 0), ecx);
1472
+
1473
+ // Check if value is a smi.
1474
+ __ test(ecx, Immediate(kSmiTagMask));
1475
+ __ j(not_zero, &with_write_barrier);
1476
+
1477
+ __ bind(&exit);
1478
+ __ ret((argc + 1) * kPointerSize);
1479
+
1480
+ __ bind(&with_write_barrier);
1481
+
1482
+ __ InNewSpace(ebx, ecx, equal, &exit);
1483
+
1484
+ __ RecordWriteHelper(ebx, edx, ecx);
1485
+ __ ret((argc + 1) * kPointerSize);
1486
+
1487
+ __ bind(&attempt_to_grow_elements);
1488
+ if (!FLAG_inline_new) {
1489
+ __ jmp(&call_builtin);
1490
+ }
1491
+
1492
+ ExternalReference new_space_allocation_top =
1493
+ ExternalReference::new_space_allocation_top_address(isolate());
1494
+ ExternalReference new_space_allocation_limit =
1495
+ ExternalReference::new_space_allocation_limit_address(isolate());
1496
+
1497
+ const int kAllocationDelta = 4;
1498
+ // Load top.
1499
+ __ mov(ecx, Operand::StaticVariable(new_space_allocation_top));
1500
+
1501
+ // Check if it's the end of elements.
1502
+ __ lea(edx, FieldOperand(ebx,
1503
+ eax, times_half_pointer_size,
1504
+ FixedArray::kHeaderSize - argc * kPointerSize));
1505
+ __ cmp(edx, Operand(ecx));
1506
+ __ j(not_equal, &call_builtin);
1507
+ __ add(Operand(ecx), Immediate(kAllocationDelta * kPointerSize));
1508
+ __ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit));
1509
+ __ j(above, &call_builtin);
1510
+
1511
+ // We fit and could grow elements.
1512
+ __ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
1513
+ __ mov(ecx, Operand(esp, argc * kPointerSize));
1514
+
1515
+ // Push the argument...
1516
+ __ mov(Operand(edx, 0), ecx);
1517
+ // ... and fill the rest with holes.
1518
+ for (int i = 1; i < kAllocationDelta; i++) {
1519
+ __ mov(Operand(edx, i * kPointerSize),
1520
+ Immediate(factory()->the_hole_value()));
1521
+ }
1522
+
1523
+ // Restore receiver to edx as finish sequence assumes it's here.
1524
+ __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1525
+
1526
+ // Increment element's and array's sizes.
1527
+ __ add(FieldOperand(ebx, FixedArray::kLengthOffset),
1528
+ Immediate(Smi::FromInt(kAllocationDelta)));
1529
+ __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
1530
+
1531
+ // Elements are in new space, so write barrier is not required.
1532
+ __ ret((argc + 1) * kPointerSize);
1533
+ }
1534
+
1535
+ __ bind(&call_builtin);
1536
+ __ TailCallExternalReference(
1537
+ ExternalReference(Builtins::c_ArrayPush, isolate()),
1538
+ argc + 1,
1539
+ 1);
1540
+ }
1541
+
1542
+ __ bind(&miss);
1543
+ MaybeObject* maybe_result = GenerateMissBranch();
1544
+ if (maybe_result->IsFailure()) return maybe_result;
1545
+
1546
+ // Return the generated code.
1547
+ return GetCode(function);
1548
+ }
1549
+
1550
+
1551
+ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1552
+ JSObject* holder,
1553
+ JSGlobalPropertyCell* cell,
1554
+ JSFunction* function,
1555
+ String* name) {
1556
+ // ----------- S t a t e -------------
1557
+ // -- ecx : name
1558
+ // -- esp[0] : return address
1559
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1560
+ // -- ...
1561
+ // -- esp[(argc + 1) * 4] : receiver
1562
+ // -----------------------------------
1563
+
1564
+ // If object is not an array, bail out to regular call.
1565
+ if (!object->IsJSArray() || cell != NULL) {
1566
+ return heap()->undefined_value();
1567
+ }
1568
+
1569
+ Label miss, return_undefined, call_builtin;
1570
+
1571
+ GenerateNameCheck(name, &miss);
1572
+
1573
+ // Get the receiver from the stack.
1574
+ const int argc = arguments().immediate();
1575
+ __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1576
+
1577
+ // Check that the receiver isn't a smi.
1578
+ __ test(edx, Immediate(kSmiTagMask));
1579
+ __ j(zero, &miss);
1580
+ CheckPrototypes(JSObject::cast(object), edx,
1581
+ holder, ebx,
1582
+ eax, edi, name, &miss);
1583
+
1584
+ // Get the elements array of the object.
1585
+ __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
1586
+
1587
+ // Check that the elements are in fast mode and writable.
1588
+ __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
1589
+ Immediate(factory()->fixed_array_map()));
1590
+ __ j(not_equal, &call_builtin);
1591
+
1592
+ // Get the array's length into ecx and calculate new length.
1593
+ __ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset));
1594
+ __ sub(Operand(ecx), Immediate(Smi::FromInt(1)));
1595
+ __ j(negative, &return_undefined);
1596
+
1597
+ // Get the last element.
1598
+ STATIC_ASSERT(kSmiTagSize == 1);
1599
+ STATIC_ASSERT(kSmiTag == 0);
1600
+ __ mov(eax, FieldOperand(ebx,
1601
+ ecx, times_half_pointer_size,
1602
+ FixedArray::kHeaderSize));
1603
+ __ cmp(Operand(eax), Immediate(factory()->the_hole_value()));
1604
+ __ j(equal, &call_builtin);
1605
+
1606
+ // Set the array's length.
1607
+ __ mov(FieldOperand(edx, JSArray::kLengthOffset), ecx);
1608
+
1609
+ // Fill with the hole.
1610
+ __ mov(FieldOperand(ebx,
1611
+ ecx, times_half_pointer_size,
1612
+ FixedArray::kHeaderSize),
1613
+ Immediate(factory()->the_hole_value()));
1614
+ __ ret((argc + 1) * kPointerSize);
1615
+
1616
+ __ bind(&return_undefined);
1617
+ __ mov(eax, Immediate(factory()->undefined_value()));
1618
+ __ ret((argc + 1) * kPointerSize);
1619
+
1620
+ __ bind(&call_builtin);
1621
+ __ TailCallExternalReference(
1622
+ ExternalReference(Builtins::c_ArrayPop, isolate()),
1623
+ argc + 1,
1624
+ 1);
1625
+
1626
+ __ bind(&miss);
1627
+ MaybeObject* maybe_result = GenerateMissBranch();
1628
+ if (maybe_result->IsFailure()) return maybe_result;
1629
+
1630
+ // Return the generated code.
1631
+ return GetCode(function);
1632
+ }
1633
+
1634
+
1635
+ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1636
+ Object* object,
1637
+ JSObject* holder,
1638
+ JSGlobalPropertyCell* cell,
1639
+ JSFunction* function,
1640
+ String* name) {
1641
+ // ----------- S t a t e -------------
1642
+ // -- ecx : function name
1643
+ // -- esp[0] : return address
1644
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1645
+ // -- ...
1646
+ // -- esp[(argc + 1) * 4] : receiver
1647
+ // -----------------------------------
1648
+
1649
+ // If object is not a string, bail out to regular call.
1650
+ if (!object->IsString() || cell != NULL) {
1651
+ return isolate()->heap()->undefined_value();
1652
+ }
1653
+
1654
+ const int argc = arguments().immediate();
1655
+
1656
+ Label miss;
1657
+ Label name_miss;
1658
+ Label index_out_of_range;
1659
+ Label* index_out_of_range_label = &index_out_of_range;
1660
+
1661
+ if (kind_ == Code::CALL_IC &&
1662
+ (CallICBase::StringStubState::decode(extra_ic_state_) ==
1663
+ DEFAULT_STRING_STUB)) {
1664
+ index_out_of_range_label = &miss;
1665
+ }
1666
+
1667
+ GenerateNameCheck(name, &name_miss);
1668
+
1669
+ // Check that the maps starting from the prototype haven't changed.
1670
+ GenerateDirectLoadGlobalFunctionPrototype(masm(),
1671
+ Context::STRING_FUNCTION_INDEX,
1672
+ eax,
1673
+ &miss);
1674
+ ASSERT(object != holder);
1675
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
1676
+ ebx, edx, edi, name, &miss);
1677
+
1678
+ Register receiver = ebx;
1679
+ Register index = edi;
1680
+ Register scratch = edx;
1681
+ Register result = eax;
1682
+ __ mov(receiver, Operand(esp, (argc + 1) * kPointerSize));
1683
+ if (argc > 0) {
1684
+ __ mov(index, Operand(esp, (argc - 0) * kPointerSize));
1685
+ } else {
1686
+ __ Set(index, Immediate(factory()->undefined_value()));
1687
+ }
1688
+
1689
+ StringCharCodeAtGenerator char_code_at_generator(receiver,
1690
+ index,
1691
+ scratch,
1692
+ result,
1693
+ &miss, // When not a string.
1694
+ &miss, // When not a number.
1695
+ index_out_of_range_label,
1696
+ STRING_INDEX_IS_NUMBER);
1697
+ char_code_at_generator.GenerateFast(masm());
1698
+ __ ret((argc + 1) * kPointerSize);
1699
+
1700
+ StubRuntimeCallHelper call_helper;
1701
+ char_code_at_generator.GenerateSlow(masm(), call_helper);
1702
+
1703
+ if (index_out_of_range.is_linked()) {
1704
+ __ bind(&index_out_of_range);
1705
+ __ Set(eax, Immediate(factory()->nan_value()));
1706
+ __ ret((argc + 1) * kPointerSize);
1707
+ }
1708
+
1709
+ __ bind(&miss);
1710
+ // Restore function name in ecx.
1711
+ __ Set(ecx, Immediate(Handle<String>(name)));
1712
+ __ bind(&name_miss);
1713
+ MaybeObject* maybe_result = GenerateMissBranch();
1714
+ if (maybe_result->IsFailure()) return maybe_result;
1715
+
1716
+ // Return the generated code.
1717
+ return GetCode(function);
1718
+ }
1719
+
1720
+
1721
+ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1722
+ Object* object,
1723
+ JSObject* holder,
1724
+ JSGlobalPropertyCell* cell,
1725
+ JSFunction* function,
1726
+ String* name) {
1727
+ // ----------- S t a t e -------------
1728
+ // -- ecx : function name
1729
+ // -- esp[0] : return address
1730
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1731
+ // -- ...
1732
+ // -- esp[(argc + 1) * 4] : receiver
1733
+ // -----------------------------------
1734
+
1735
+ // If object is not a string, bail out to regular call.
1736
+ if (!object->IsString() || cell != NULL) {
1737
+ return heap()->undefined_value();
1738
+ }
1739
+
1740
+ const int argc = arguments().immediate();
1741
+
1742
+ Label miss;
1743
+ Label name_miss;
1744
+ Label index_out_of_range;
1745
+ Label* index_out_of_range_label = &index_out_of_range;
1746
+
1747
+ if (kind_ == Code::CALL_IC &&
1748
+ (CallICBase::StringStubState::decode(extra_ic_state_) ==
1749
+ DEFAULT_STRING_STUB)) {
1750
+ index_out_of_range_label = &miss;
1751
+ }
1752
+
1753
+ GenerateNameCheck(name, &name_miss);
1754
+
1755
+ // Check that the maps starting from the prototype haven't changed.
1756
+ GenerateDirectLoadGlobalFunctionPrototype(masm(),
1757
+ Context::STRING_FUNCTION_INDEX,
1758
+ eax,
1759
+ &miss);
1760
+ ASSERT(object != holder);
1761
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
1762
+ ebx, edx, edi, name, &miss);
1763
+
1764
+ Register receiver = eax;
1765
+ Register index = edi;
1766
+ Register scratch1 = ebx;
1767
+ Register scratch2 = edx;
1768
+ Register result = eax;
1769
+ __ mov(receiver, Operand(esp, (argc + 1) * kPointerSize));
1770
+ if (argc > 0) {
1771
+ __ mov(index, Operand(esp, (argc - 0) * kPointerSize));
1772
+ } else {
1773
+ __ Set(index, Immediate(factory()->undefined_value()));
1774
+ }
1775
+
1776
+ StringCharAtGenerator char_at_generator(receiver,
1777
+ index,
1778
+ scratch1,
1779
+ scratch2,
1780
+ result,
1781
+ &miss, // When not a string.
1782
+ &miss, // When not a number.
1783
+ index_out_of_range_label,
1784
+ STRING_INDEX_IS_NUMBER);
1785
+ char_at_generator.GenerateFast(masm());
1786
+ __ ret((argc + 1) * kPointerSize);
1787
+
1788
+ StubRuntimeCallHelper call_helper;
1789
+ char_at_generator.GenerateSlow(masm(), call_helper);
1790
+
1791
+ if (index_out_of_range.is_linked()) {
1792
+ __ bind(&index_out_of_range);
1793
+ __ Set(eax, Immediate(factory()->empty_string()));
1794
+ __ ret((argc + 1) * kPointerSize);
1795
+ }
1796
+
1797
+ __ bind(&miss);
1798
+ // Restore function name in ecx.
1799
+ __ Set(ecx, Immediate(Handle<String>(name)));
1800
+ __ bind(&name_miss);
1801
+ MaybeObject* maybe_result = GenerateMissBranch();
1802
+ if (maybe_result->IsFailure()) return maybe_result;
1803
+
1804
+ // Return the generated code.
1805
+ return GetCode(function);
1806
+ }
1807
+
1808
+
1809
+ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1810
+ Object* object,
1811
+ JSObject* holder,
1812
+ JSGlobalPropertyCell* cell,
1813
+ JSFunction* function,
1814
+ String* name) {
1815
+ // ----------- S t a t e -------------
1816
+ // -- ecx : function name
1817
+ // -- esp[0] : return address
1818
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1819
+ // -- ...
1820
+ // -- esp[(argc + 1) * 4] : receiver
1821
+ // -----------------------------------
1822
+
1823
+ const int argc = arguments().immediate();
1824
+
1825
+ // If the object is not a JSObject or we got an unexpected number of
1826
+ // arguments, bail out to the regular call.
1827
+ if (!object->IsJSObject() || argc != 1) {
1828
+ return isolate()->heap()->undefined_value();
1829
+ }
1830
+
1831
+ Label miss;
1832
+ GenerateNameCheck(name, &miss);
1833
+
1834
+ if (cell == NULL) {
1835
+ __ mov(edx, Operand(esp, 2 * kPointerSize));
1836
+
1837
+ STATIC_ASSERT(kSmiTag == 0);
1838
+ __ test(edx, Immediate(kSmiTagMask));
1839
+ __ j(zero, &miss);
1840
+
1841
+ CheckPrototypes(JSObject::cast(object), edx, holder, ebx, eax, edi, name,
1842
+ &miss);
1843
+ } else {
1844
+ ASSERT(cell->value() == function);
1845
+ GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1846
+ GenerateLoadFunctionFromCell(cell, function, &miss);
1847
+ }
1848
+
1849
+ // Load the char code argument.
1850
+ Register code = ebx;
1851
+ __ mov(code, Operand(esp, 1 * kPointerSize));
1852
+
1853
+ // Check the code is a smi.
1854
+ Label slow;
1855
+ STATIC_ASSERT(kSmiTag == 0);
1856
+ __ test(code, Immediate(kSmiTagMask));
1857
+ __ j(not_zero, &slow);
1858
+
1859
+ // Convert the smi code to uint16.
1860
+ __ and_(code, Immediate(Smi::FromInt(0xffff)));
1861
+
1862
+ StringCharFromCodeGenerator char_from_code_generator(code, eax);
1863
+ char_from_code_generator.GenerateFast(masm());
1864
+ __ ret(2 * kPointerSize);
1865
+
1866
+ StubRuntimeCallHelper call_helper;
1867
+ char_from_code_generator.GenerateSlow(masm(), call_helper);
1868
+
1869
+ // Tail call the full function. We do not have to patch the receiver
1870
+ // because the function makes no use of it.
1871
+ __ bind(&slow);
1872
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1873
+
1874
+ __ bind(&miss);
1875
+ // ecx: function name.
1876
+ MaybeObject* maybe_result = GenerateMissBranch();
1877
+ if (maybe_result->IsFailure()) return maybe_result;
1878
+
1879
+ // Return the generated code.
1880
+ return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1881
+ }
1882
+
1883
+
1884
+ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
1885
+ JSObject* holder,
1886
+ JSGlobalPropertyCell* cell,
1887
+ JSFunction* function,
1888
+ String* name) {
1889
+ // ----------- S t a t e -------------
1890
+ // -- ecx : name
1891
+ // -- esp[0] : return address
1892
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1893
+ // -- ...
1894
+ // -- esp[(argc + 1) * 4] : receiver
1895
+ // -----------------------------------
1896
+
1897
+ if (!CpuFeatures::IsSupported(SSE2)) {
1898
+ return isolate()->heap()->undefined_value();
1899
+ }
1900
+
1901
+ CpuFeatures::Scope use_sse2(SSE2);
1902
+
1903
+ const int argc = arguments().immediate();
1904
+
1905
+ // If the object is not a JSObject or we got an unexpected number of
1906
+ // arguments, bail out to the regular call.
1907
+ if (!object->IsJSObject() || argc != 1) {
1908
+ return isolate()->heap()->undefined_value();
1909
+ }
1910
+
1911
+ Label miss;
1912
+ GenerateNameCheck(name, &miss);
1913
+
1914
+ if (cell == NULL) {
1915
+ __ mov(edx, Operand(esp, 2 * kPointerSize));
1916
+
1917
+ STATIC_ASSERT(kSmiTag == 0);
1918
+ __ test(edx, Immediate(kSmiTagMask));
1919
+ __ j(zero, &miss);
1920
+
1921
+ CheckPrototypes(JSObject::cast(object), edx, holder, ebx, eax, edi, name,
1922
+ &miss);
1923
+ } else {
1924
+ ASSERT(cell->value() == function);
1925
+ GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1926
+ GenerateLoadFunctionFromCell(cell, function, &miss);
1927
+ }
1928
+
1929
+ // Load the (only) argument into eax.
1930
+ __ mov(eax, Operand(esp, 1 * kPointerSize));
1931
+
1932
+ // Check if the argument is a smi.
1933
+ Label smi;
1934
+ STATIC_ASSERT(kSmiTag == 0);
1935
+ __ test(eax, Immediate(kSmiTagMask));
1936
+ __ j(zero, &smi);
1937
+
1938
+ // Check if the argument is a heap number and load its value into xmm0.
1939
+ Label slow;
1940
+ __ CheckMap(eax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK);
1941
+ __ movdbl(xmm0, FieldOperand(eax, HeapNumber::kValueOffset));
1942
+
1943
+ // Check if the argument is strictly positive. Note this also
1944
+ // discards NaN.
1945
+ __ xorpd(xmm1, xmm1);
1946
+ __ ucomisd(xmm0, xmm1);
1947
+ __ j(below_equal, &slow);
1948
+
1949
+ // Do a truncating conversion.
1950
+ __ cvttsd2si(eax, Operand(xmm0));
1951
+
1952
+ // Check if the result fits into a smi. Note this also checks for
1953
+ // 0x80000000 which signals a failed conversion.
1954
+ Label wont_fit_into_smi;
1955
+ __ test(eax, Immediate(0xc0000000));
1956
+ __ j(not_zero, &wont_fit_into_smi);
1957
+
1958
+ // Smi tag and return.
1959
+ __ SmiTag(eax);
1960
+ __ bind(&smi);
1961
+ __ ret(2 * kPointerSize);
1962
+
1963
+ // Check if the argument is < 2^kMantissaBits.
1964
+ Label already_round;
1965
+ __ bind(&wont_fit_into_smi);
1966
+ __ LoadPowerOf2(xmm1, ebx, HeapNumber::kMantissaBits);
1967
+ __ ucomisd(xmm0, xmm1);
1968
+ __ j(above_equal, &already_round);
1969
+
1970
+ // Save a copy of the argument.
1971
+ __ movaps(xmm2, xmm0);
1972
+
1973
+ // Compute (argument + 2^kMantissaBits) - 2^kMantissaBits.
1974
+ __ addsd(xmm0, xmm1);
1975
+ __ subsd(xmm0, xmm1);
1976
+
1977
+ // Compare the argument and the tentative result to get the right mask:
1978
+ // if xmm2 < xmm0:
1979
+ // xmm2 = 1...1
1980
+ // else:
1981
+ // xmm2 = 0...0
1982
+ __ cmpltsd(xmm2, xmm0);
1983
+
1984
+ // Subtract 1 if the argument was less than the tentative result.
1985
+ __ LoadPowerOf2(xmm1, ebx, 0);
1986
+ __ andpd(xmm1, xmm2);
1987
+ __ subsd(xmm0, xmm1);
1988
+
1989
+ // Return a new heap number.
1990
+ __ AllocateHeapNumber(eax, ebx, edx, &slow);
1991
+ __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1992
+ __ ret(2 * kPointerSize);
1993
+
1994
+ // Return the argument (when it's an already round heap number).
1995
+ __ bind(&already_round);
1996
+ __ mov(eax, Operand(esp, 1 * kPointerSize));
1997
+ __ ret(2 * kPointerSize);
1998
+
1999
+ // Tail call the full function. We do not have to patch the receiver
2000
+ // because the function makes no use of it.
2001
+ __ bind(&slow);
2002
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2003
+
2004
+ __ bind(&miss);
2005
+ // ecx: function name.
2006
+ MaybeObject* maybe_result = GenerateMissBranch();
2007
+ if (maybe_result->IsFailure()) return maybe_result;
2008
+
2009
+ // Return the generated code.
2010
+ return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2011
+ }
2012
+
2013
+
2014
+ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
2015
+ JSObject* holder,
2016
+ JSGlobalPropertyCell* cell,
2017
+ JSFunction* function,
2018
+ String* name) {
2019
+ // ----------- S t a t e -------------
2020
+ // -- ecx : name
2021
+ // -- esp[0] : return address
2022
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
2023
+ // -- ...
2024
+ // -- esp[(argc + 1) * 4] : receiver
2025
+ // -----------------------------------
2026
+
2027
+ const int argc = arguments().immediate();
2028
+
2029
+ // If the object is not a JSObject or we got an unexpected number of
2030
+ // arguments, bail out to the regular call.
2031
+ if (!object->IsJSObject() || argc != 1) {
2032
+ return isolate()->heap()->undefined_value();
2033
+ }
2034
+
2035
+ Label miss;
2036
+ GenerateNameCheck(name, &miss);
2037
+
2038
+ if (cell == NULL) {
2039
+ __ mov(edx, Operand(esp, 2 * kPointerSize));
2040
+
2041
+ STATIC_ASSERT(kSmiTag == 0);
2042
+ __ test(edx, Immediate(kSmiTagMask));
2043
+ __ j(zero, &miss);
2044
+
2045
+ CheckPrototypes(JSObject::cast(object), edx, holder, ebx, eax, edi, name,
2046
+ &miss);
2047
+ } else {
2048
+ ASSERT(cell->value() == function);
2049
+ GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2050
+ GenerateLoadFunctionFromCell(cell, function, &miss);
2051
+ }
2052
+
2053
+ // Load the (only) argument into eax.
2054
+ __ mov(eax, Operand(esp, 1 * kPointerSize));
2055
+
2056
+ // Check if the argument is a smi.
2057
+ Label not_smi;
2058
+ STATIC_ASSERT(kSmiTag == 0);
2059
+ __ test(eax, Immediate(kSmiTagMask));
2060
+ __ j(not_zero, &not_smi);
2061
+
2062
+ // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
2063
+ // otherwise.
2064
+ __ mov(ebx, eax);
2065
+ __ sar(ebx, kBitsPerInt - 1);
2066
+
2067
+ // Do bitwise not or do nothing depending on ebx.
2068
+ __ xor_(eax, Operand(ebx));
2069
+
2070
+ // Add 1 or do nothing depending on ebx.
2071
+ __ sub(eax, Operand(ebx));
2072
+
2073
+ // If the result is still negative, go to the slow case.
2074
+ // This only happens for the most negative smi.
2075
+ Label slow;
2076
+ __ j(negative, &slow);
2077
+
2078
+ // Smi case done.
2079
+ __ ret(2 * kPointerSize);
2080
+
2081
+ // Check if the argument is a heap number and load its exponent and
2082
+ // sign into ebx.
2083
+ __ bind(&not_smi);
2084
+ __ CheckMap(eax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK);
2085
+ __ mov(ebx, FieldOperand(eax, HeapNumber::kExponentOffset));
2086
+
2087
+ // Check the sign of the argument. If the argument is positive,
2088
+ // just return it.
2089
+ Label negative_sign;
2090
+ __ test(ebx, Immediate(HeapNumber::kSignMask));
2091
+ __ j(not_zero, &negative_sign);
2092
+ __ ret(2 * kPointerSize);
2093
+
2094
+ // If the argument is negative, clear the sign, and return a new
2095
+ // number.
2096
+ __ bind(&negative_sign);
2097
+ __ and_(ebx, ~HeapNumber::kSignMask);
2098
+ __ mov(ecx, FieldOperand(eax, HeapNumber::kMantissaOffset));
2099
+ __ AllocateHeapNumber(eax, edi, edx, &slow);
2100
+ __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ebx);
2101
+ __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
2102
+ __ ret(2 * kPointerSize);
2103
+
2104
+ // Tail call the full function. We do not have to patch the receiver
2105
+ // because the function makes no use of it.
2106
+ __ bind(&slow);
2107
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2108
+
2109
+ __ bind(&miss);
2110
+ // ecx: function name.
2111
+ MaybeObject* maybe_result = GenerateMissBranch();
2112
+ if (maybe_result->IsFailure()) return maybe_result;
2113
+
2114
+ // Return the generated code.
2115
+ return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2116
+ }
2117
+
2118
+
2119
+ MaybeObject* CallStubCompiler::CompileFastApiCall(
2120
+ const CallOptimization& optimization,
2121
+ Object* object,
2122
+ JSObject* holder,
2123
+ JSGlobalPropertyCell* cell,
2124
+ JSFunction* function,
2125
+ String* name) {
2126
+ ASSERT(optimization.is_simple_api_call());
2127
+ // Bail out if object is a global object as we don't want to
2128
+ // repatch it to global receiver.
2129
+ if (object->IsGlobalObject()) return heap()->undefined_value();
2130
+ if (cell != NULL) return heap()->undefined_value();
2131
+ if (!object->IsJSObject()) return heap()->undefined_value();
2132
+ int depth = optimization.GetPrototypeDepthOfExpectedType(
2133
+ JSObject::cast(object), holder);
2134
+ if (depth == kInvalidProtoDepth) return heap()->undefined_value();
2135
+
2136
+ Label miss, miss_before_stack_reserved;
2137
+
2138
+ GenerateNameCheck(name, &miss_before_stack_reserved);
2139
+
2140
+ // Get the receiver from the stack.
2141
+ const int argc = arguments().immediate();
2142
+ __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
2143
+
2144
+ // Check that the receiver isn't a smi.
2145
+ __ test(edx, Immediate(kSmiTagMask));
2146
+ __ j(zero, &miss_before_stack_reserved);
2147
+
2148
+ Counters* counters = isolate()->counters();
2149
+ __ IncrementCounter(counters->call_const(), 1);
2150
+ __ IncrementCounter(counters->call_const_fast_api(), 1);
2151
+
2152
+ // Allocate space for v8::Arguments implicit values. Must be initialized
2153
+ // before calling any runtime function.
2154
+ __ sub(Operand(esp), Immediate(kFastApiCallArguments * kPointerSize));
2155
+
2156
+ // Check that the maps haven't changed and find a Holder as a side effect.
2157
+ CheckPrototypes(JSObject::cast(object), edx, holder,
2158
+ ebx, eax, edi, name, depth, &miss);
2159
+
2160
+ // Move the return address on top of the stack.
2161
+ __ mov(eax, Operand(esp, 3 * kPointerSize));
2162
+ __ mov(Operand(esp, 0 * kPointerSize), eax);
2163
+
2164
+ // esp[2 * kPointerSize] is uninitialized, esp[3 * kPointerSize] contains
2165
+ // duplicate of return address and will be overwritten.
2166
+ MaybeObject* result = GenerateFastApiCall(masm(), optimization, argc);
2167
+ if (result->IsFailure()) return result;
2168
+
2169
+ __ bind(&miss);
2170
+ __ add(Operand(esp), Immediate(kFastApiCallArguments * kPointerSize));
2171
+
2172
+ __ bind(&miss_before_stack_reserved);
2173
+ MaybeObject* maybe_result = GenerateMissBranch();
2174
+ if (maybe_result->IsFailure()) return maybe_result;
2175
+
2176
+ // Return the generated code.
2177
+ return GetCode(function);
2178
+ }
2179
+
2180
+
2181
+ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2182
+ JSObject* holder,
2183
+ JSFunction* function,
2184
+ String* name,
2185
+ CheckType check) {
2186
+ // ----------- S t a t e -------------
2187
+ // -- ecx : name
2188
+ // -- esp[0] : return address
2189
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
2190
+ // -- ...
2191
+ // -- esp[(argc + 1) * 4] : receiver
2192
+ // -----------------------------------
2193
+
2194
+ if (HasCustomCallGenerator(function)) {
2195
+ MaybeObject* maybe_result = CompileCustomCall(
2196
+ object, holder, NULL, function, name);
2197
+ Object* result;
2198
+ if (!maybe_result->ToObject(&result)) return maybe_result;
2199
+ // undefined means bail out to regular compiler.
2200
+ if (!result->IsUndefined()) return result;
2201
+ }
2202
+
2203
+ Label miss;
2204
+
2205
+ GenerateNameCheck(name, &miss);
2206
+
2207
+ // Get the receiver from the stack.
2208
+ const int argc = arguments().immediate();
2209
+ __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
2210
+
2211
+ // Check that the receiver isn't a smi.
2212
+ if (check != NUMBER_CHECK) {
2213
+ __ test(edx, Immediate(kSmiTagMask));
2214
+ __ j(zero, &miss);
2215
+ }
2216
+
2217
+ // Make sure that it's okay not to patch the on stack receiver
2218
+ // unless we're doing a receiver map check.
2219
+ ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2220
+
2221
+ SharedFunctionInfo* function_info = function->shared();
2222
+ switch (check) {
2223
+ case RECEIVER_MAP_CHECK:
2224
+ __ IncrementCounter(isolate()->counters()->call_const(), 1);
2225
+
2226
+ // Check that the maps haven't changed.
2227
+ CheckPrototypes(JSObject::cast(object), edx, holder,
2228
+ ebx, eax, edi, name, &miss);
2229
+
2230
+ // Patch the receiver on the stack with the global proxy if
2231
+ // necessary.
2232
+ if (object->IsGlobalObject()) {
2233
+ __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
2234
+ __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
2235
+ }
2236
+ break;
2237
+
2238
+ case STRING_CHECK:
2239
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2240
+ // Calling non-strict non-builtins with a value as the receiver
2241
+ // requires boxing.
2242
+ __ jmp(&miss);
2243
+ } else {
2244
+ // Check that the object is a string or a symbol.
2245
+ __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, eax);
2246
+ __ j(above_equal, &miss);
2247
+ // Check that the maps starting from the prototype haven't changed.
2248
+ GenerateDirectLoadGlobalFunctionPrototype(
2249
+ masm(), Context::STRING_FUNCTION_INDEX, eax, &miss);
2250
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
2251
+ ebx, edx, edi, name, &miss);
2252
+ }
2253
+ break;
2254
+
2255
+ case NUMBER_CHECK: {
2256
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2257
+ // Calling non-strict non-builtins with a value as the receiver
2258
+ // requires boxing.
2259
+ __ jmp(&miss);
2260
+ } else {
2261
+ Label fast;
2262
+ // Check that the object is a smi or a heap number.
2263
+ __ test(edx, Immediate(kSmiTagMask));
2264
+ __ j(zero, &fast);
2265
+ __ CmpObjectType(edx, HEAP_NUMBER_TYPE, eax);
2266
+ __ j(not_equal, &miss);
2267
+ __ bind(&fast);
2268
+ // Check that the maps starting from the prototype haven't changed.
2269
+ GenerateDirectLoadGlobalFunctionPrototype(
2270
+ masm(), Context::NUMBER_FUNCTION_INDEX, eax, &miss);
2271
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
2272
+ ebx, edx, edi, name, &miss);
2273
+ }
2274
+ break;
2275
+ }
2276
+
2277
+ case BOOLEAN_CHECK: {
2278
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2279
+ // Calling non-strict non-builtins with a value as the receiver
2280
+ // requires boxing.
2281
+ __ jmp(&miss);
2282
+ } else {
2283
+ Label fast;
2284
+ // Check that the object is a boolean.
2285
+ __ cmp(edx, factory()->true_value());
2286
+ __ j(equal, &fast);
2287
+ __ cmp(edx, factory()->false_value());
2288
+ __ j(not_equal, &miss);
2289
+ __ bind(&fast);
2290
+ // Check that the maps starting from the prototype haven't changed.
2291
+ GenerateDirectLoadGlobalFunctionPrototype(
2292
+ masm(), Context::BOOLEAN_FUNCTION_INDEX, eax, &miss);
2293
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
2294
+ ebx, edx, edi, name, &miss);
2295
+ }
2296
+ break;
2297
+ }
2298
+
2299
+ default:
2300
+ UNREACHABLE();
2301
+ }
2302
+
2303
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2304
+
2305
+ // Handle call cache miss.
2306
+ __ bind(&miss);
2307
+ MaybeObject* maybe_result = GenerateMissBranch();
2308
+ if (maybe_result->IsFailure()) return maybe_result;
2309
+
2310
+ // Return the generated code.
2311
+ return GetCode(function);
2312
+ }
2313
+
2314
+
2315
+ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2316
+ JSObject* holder,
2317
+ String* name) {
2318
+ // ----------- S t a t e -------------
2319
+ // -- ecx : name
2320
+ // -- esp[0] : return address
2321
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
2322
+ // -- ...
2323
+ // -- esp[(argc + 1) * 4] : receiver
2324
+ // -----------------------------------
2325
+ Label miss;
2326
+
2327
+ GenerateNameCheck(name, &miss);
2328
+
2329
+ // Get the number of arguments.
2330
+ const int argc = arguments().immediate();
2331
+
2332
+ LookupResult lookup;
2333
+ LookupPostInterceptor(holder, name, &lookup);
2334
+
2335
+ // Get the receiver from the stack.
2336
+ __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
2337
+
2338
+ CallInterceptorCompiler compiler(this, arguments(), ecx);
2339
+ MaybeObject* result = compiler.Compile(masm(),
2340
+ object,
2341
+ holder,
2342
+ name,
2343
+ &lookup,
2344
+ edx,
2345
+ ebx,
2346
+ edi,
2347
+ eax,
2348
+ &miss);
2349
+ if (result->IsFailure()) return result;
2350
+
2351
+ // Restore receiver.
2352
+ __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
2353
+
2354
+ // Check that the function really is a function.
2355
+ __ test(eax, Immediate(kSmiTagMask));
2356
+ __ j(zero, &miss);
2357
+ __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2358
+ __ j(not_equal, &miss);
2359
+
2360
+ // Patch the receiver on the stack with the global proxy if
2361
+ // necessary.
2362
+ if (object->IsGlobalObject()) {
2363
+ __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
2364
+ __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
2365
+ }
2366
+
2367
+ // Invoke the function.
2368
+ __ mov(edi, eax);
2369
+ __ InvokeFunction(edi, arguments(), JUMP_FUNCTION);
2370
+
2371
+ // Handle load cache miss.
2372
+ __ bind(&miss);
2373
+ MaybeObject* maybe_result = GenerateMissBranch();
2374
+ if (maybe_result->IsFailure()) return maybe_result;
2375
+
2376
+ // Return the generated code.
2377
+ return GetCode(INTERCEPTOR, name);
2378
+ }
2379
+
2380
+
2381
+ MaybeObject* CallStubCompiler::CompileCallGlobal(
2382
+ JSObject* object,
2383
+ GlobalObject* holder,
2384
+ JSGlobalPropertyCell* cell,
2385
+ JSFunction* function,
2386
+ String* name,
2387
+ Code::ExtraICState extra_ic_state) {
2388
+ // ----------- S t a t e -------------
2389
+ // -- ecx : name
2390
+ // -- esp[0] : return address
2391
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
2392
+ // -- ...
2393
+ // -- esp[(argc + 1) * 4] : receiver
2394
+ // -----------------------------------
2395
+
2396
+ if (HasCustomCallGenerator(function)) {
2397
+ MaybeObject* maybe_result = CompileCustomCall(
2398
+ object, holder, cell, function, name);
2399
+ Object* result;
2400
+ if (!maybe_result->ToObject(&result)) return maybe_result;
2401
+ // undefined means bail out to regular compiler.
2402
+ if (!result->IsUndefined()) return result;
2403
+ }
2404
+
2405
+ Label miss;
2406
+
2407
+ GenerateNameCheck(name, &miss);
2408
+
2409
+ // Get the number of arguments.
2410
+ const int argc = arguments().immediate();
2411
+
2412
+ GenerateGlobalReceiverCheck(object, holder, name, &miss);
2413
+
2414
+ GenerateLoadFunctionFromCell(cell, function, &miss);
2415
+
2416
+ // Patch the receiver on the stack with the global proxy.
2417
+ if (object->IsGlobalObject()) {
2418
+ __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
2419
+ __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
2420
+ }
2421
+
2422
+ // Setup the context (function already in edi).
2423
+ __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2424
+
2425
+ // Jump to the cached code (tail call).
2426
+ Counters* counters = isolate()->counters();
2427
+ __ IncrementCounter(counters->call_global_inline(), 1);
2428
+ ASSERT(function->is_compiled());
2429
+ ParameterCount expected(function->shared()->formal_parameter_count());
2430
+ CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
2431
+ ? CALL_AS_FUNCTION
2432
+ : CALL_AS_METHOD;
2433
+ if (V8::UseCrankshaft()) {
2434
+ // TODO(kasperl): For now, we always call indirectly through the
2435
+ // code field in the function to allow recompilation to take effect
2436
+ // without changing any of the call sites.
2437
+ __ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2438
+ expected, arguments(), JUMP_FUNCTION,
2439
+ NullCallWrapper(), call_kind);
2440
+ } else {
2441
+ Handle<Code> code(function->code());
2442
+ __ InvokeCode(code, expected, arguments(),
2443
+ RelocInfo::CODE_TARGET, JUMP_FUNCTION,
2444
+ NullCallWrapper(), call_kind);
2445
+ }
2446
+
2447
+ // Handle call cache miss.
2448
+ __ bind(&miss);
2449
+ __ IncrementCounter(counters->call_global_inline_miss(), 1);
2450
+ MaybeObject* maybe_result = GenerateMissBranch();
2451
+ if (maybe_result->IsFailure()) return maybe_result;
2452
+
2453
+ // Return the generated code.
2454
+ return GetCode(NORMAL, name);
2455
+ }
2456
+
2457
+
2458
+ MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2459
+ int index,
2460
+ Map* transition,
2461
+ String* name) {
2462
+ // ----------- S t a t e -------------
2463
+ // -- eax : value
2464
+ // -- ecx : name
2465
+ // -- edx : receiver
2466
+ // -- esp[0] : return address
2467
+ // -----------------------------------
2468
+ Label miss;
2469
+
2470
+ // Generate store field code. Trashes the name register.
2471
+ GenerateStoreField(masm(),
2472
+ object,
2473
+ index,
2474
+ transition,
2475
+ edx, ecx, ebx,
2476
+ &miss);
2477
+
2478
+ // Handle store cache miss.
2479
+ __ bind(&miss);
2480
+ __ mov(ecx, Immediate(Handle<String>(name))); // restore name
2481
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2482
+ __ jmp(ic, RelocInfo::CODE_TARGET);
2483
+
2484
+ // Return the generated code.
2485
+ return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2486
+ }
2487
+
2488
+
2489
+ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2490
+ AccessorInfo* callback,
2491
+ String* name) {
2492
+ // ----------- S t a t e -------------
2493
+ // -- eax : value
2494
+ // -- ecx : name
2495
+ // -- edx : receiver
2496
+ // -- esp[0] : return address
2497
+ // -----------------------------------
2498
+ Label miss;
2499
+
2500
+ // Check that the object isn't a smi.
2501
+ __ test(edx, Immediate(kSmiTagMask));
2502
+ __ j(zero, &miss);
2503
+
2504
+ // Check that the map of the object hasn't changed.
2505
+ __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2506
+ Immediate(Handle<Map>(object->map())));
2507
+ __ j(not_equal, &miss);
2508
+
2509
+ // Perform global security token check if needed.
2510
+ if (object->IsJSGlobalProxy()) {
2511
+ __ CheckAccessGlobalProxy(edx, ebx, &miss);
2512
+ }
2513
+
2514
+ // Stub never generated for non-global objects that require access
2515
+ // checks.
2516
+ ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2517
+
2518
+ __ pop(ebx); // remove the return address
2519
+ __ push(edx); // receiver
2520
+ __ push(Immediate(Handle<AccessorInfo>(callback))); // callback info
2521
+ __ push(ecx); // name
2522
+ __ push(eax); // value
2523
+ __ push(ebx); // restore return address
2524
+
2525
+ // Do tail-call to the runtime system.
2526
+ ExternalReference store_callback_property =
2527
+ ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2528
+ __ TailCallExternalReference(store_callback_property, 4, 1);
2529
+
2530
+ // Handle store cache miss.
2531
+ __ bind(&miss);
2532
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2533
+ __ jmp(ic, RelocInfo::CODE_TARGET);
2534
+
2535
+ // Return the generated code.
2536
+ return GetCode(CALLBACKS, name);
2537
+ }
2538
+
2539
+
2540
+ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2541
+ String* name) {
2542
+ // ----------- S t a t e -------------
2543
+ // -- eax : value
2544
+ // -- ecx : name
2545
+ // -- edx : receiver
2546
+ // -- esp[0] : return address
2547
+ // -----------------------------------
2548
+ Label miss;
2549
+
2550
+ // Check that the object isn't a smi.
2551
+ __ test(edx, Immediate(kSmiTagMask));
2552
+ __ j(zero, &miss);
2553
+
2554
+ // Check that the map of the object hasn't changed.
2555
+ __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2556
+ Immediate(Handle<Map>(receiver->map())));
2557
+ __ j(not_equal, &miss);
2558
+
2559
+ // Perform global security token check if needed.
2560
+ if (receiver->IsJSGlobalProxy()) {
2561
+ __ CheckAccessGlobalProxy(edx, ebx, &miss);
2562
+ }
2563
+
2564
+ // Stub never generated for non-global objects that require access
2565
+ // checks.
2566
+ ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2567
+
2568
+ __ pop(ebx); // remove the return address
2569
+ __ push(edx); // receiver
2570
+ __ push(ecx); // name
2571
+ __ push(eax); // value
2572
+ __ push(Immediate(Smi::FromInt(strict_mode_)));
2573
+ __ push(ebx); // restore return address
2574
+
2575
+ // Do tail-call to the runtime system.
2576
+ ExternalReference store_ic_property =
2577
+ ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
2578
+ __ TailCallExternalReference(store_ic_property, 4, 1);
2579
+
2580
+ // Handle store cache miss.
2581
+ __ bind(&miss);
2582
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2583
+ __ jmp(ic, RelocInfo::CODE_TARGET);
2584
+
2585
+ // Return the generated code.
2586
+ return GetCode(INTERCEPTOR, name);
2587
+ }
2588
+
2589
+
2590
+ MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2591
+ JSGlobalPropertyCell* cell,
2592
+ String* name) {
2593
+ // ----------- S t a t e -------------
2594
+ // -- eax : value
2595
+ // -- ecx : name
2596
+ // -- edx : receiver
2597
+ // -- esp[0] : return address
2598
+ // -----------------------------------
2599
+ Label miss;
2600
+
2601
+ // Check that the map of the global has not changed.
2602
+ __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2603
+ Immediate(Handle<Map>(object->map())));
2604
+ __ j(not_equal, &miss);
2605
+
2606
+
2607
+ // Compute the cell operand to use.
2608
+ Operand cell_operand = Operand::Cell(Handle<JSGlobalPropertyCell>(cell));
2609
+ if (Serializer::enabled()) {
2610
+ __ mov(ebx, Immediate(Handle<JSGlobalPropertyCell>(cell)));
2611
+ cell_operand = FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset);
2612
+ }
2613
+
2614
+ // Check that the value in the cell is not the hole. If it is, this
2615
+ // cell could have been deleted and reintroducing the global needs
2616
+ // to update the property details in the property dictionary of the
2617
+ // global object. We bail out to the runtime system to do that.
2618
+ __ cmp(cell_operand, factory()->the_hole_value());
2619
+ __ j(equal, &miss);
2620
+
2621
+ // Store the value in the cell.
2622
+ __ mov(cell_operand, eax);
2623
+
2624
+ // Return the value (register eax).
2625
+ Counters* counters = isolate()->counters();
2626
+ __ IncrementCounter(counters->named_store_global_inline(), 1);
2627
+ __ ret(0);
2628
+
2629
+ // Handle store cache miss.
2630
+ __ bind(&miss);
2631
+ __ IncrementCounter(counters->named_store_global_inline_miss(), 1);
2632
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2633
+ __ jmp(ic, RelocInfo::CODE_TARGET);
2634
+
2635
+ // Return the generated code.
2636
+ return GetCode(NORMAL, name);
2637
+ }
2638
+
2639
+
2640
+ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2641
+ int index,
2642
+ Map* transition,
2643
+ String* name) {
2644
+ // ----------- S t a t e -------------
2645
+ // -- eax : value
2646
+ // -- ecx : key
2647
+ // -- edx : receiver
2648
+ // -- esp[0] : return address
2649
+ // -----------------------------------
2650
+ Label miss;
2651
+
2652
+ Counters* counters = isolate()->counters();
2653
+ __ IncrementCounter(counters->keyed_store_field(), 1);
2654
+
2655
+ // Check that the name has not changed.
2656
+ __ cmp(Operand(ecx), Immediate(Handle<String>(name)));
2657
+ __ j(not_equal, &miss);
2658
+
2659
+ // Generate store field code. Trashes the name register.
2660
+ GenerateStoreField(masm(),
2661
+ object,
2662
+ index,
2663
+ transition,
2664
+ edx, ecx, ebx,
2665
+ &miss);
2666
+
2667
+ // Handle store cache miss.
2668
+ __ bind(&miss);
2669
+ __ DecrementCounter(counters->keyed_store_field(), 1);
2670
+ Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2671
+ __ jmp(ic, RelocInfo::CODE_TARGET);
2672
+
2673
+ // Return the generated code.
2674
+ return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2675
+ }
2676
+
2677
+
2678
+ MaybeObject* KeyedStoreStubCompiler::CompileStoreFastElement(
2679
+ Map* receiver_map) {
2680
+ // ----------- S t a t e -------------
2681
+ // -- eax : value
2682
+ // -- ecx : key
2683
+ // -- edx : receiver
2684
+ // -- esp[0] : return address
2685
+ // -----------------------------------
2686
+ bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
2687
+ MaybeObject* maybe_stub =
2688
+ KeyedStoreFastElementStub(is_js_array).TryGetCode();
2689
+ Code* stub;
2690
+ if (!maybe_stub->To(&stub)) return maybe_stub;
2691
+ __ DispatchMap(edx,
2692
+ Handle<Map>(receiver_map),
2693
+ Handle<Code>(stub),
2694
+ DO_SMI_CHECK);
2695
+
2696
+ Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2697
+ __ jmp(ic, RelocInfo::CODE_TARGET);
2698
+
2699
+ // Return the generated code.
2700
+ return GetCode(NORMAL, NULL);
2701
+ }
2702
+
2703
+
2704
+ MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic(
2705
+ MapList* receiver_maps,
2706
+ CodeList* handler_ics) {
2707
+ // ----------- S t a t e -------------
2708
+ // -- eax : value
2709
+ // -- ecx : key
2710
+ // -- edx : receiver
2711
+ // -- esp[0] : return address
2712
+ // -----------------------------------
2713
+ Label miss;
2714
+ __ JumpIfSmi(edx, &miss);
2715
+
2716
+ Register map_reg = ebx;
2717
+ __ mov(map_reg, FieldOperand(edx, HeapObject::kMapOffset));
2718
+ int receiver_count = receiver_maps->length();
2719
+ for (int current = 0; current < receiver_count; ++current) {
2720
+ Handle<Map> map(receiver_maps->at(current));
2721
+ __ cmp(map_reg, map);
2722
+ __ j(equal, Handle<Code>(handler_ics->at(current)));
2723
+ }
2724
+ __ bind(&miss);
2725
+ Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
2726
+ __ jmp(miss_ic, RelocInfo::CODE_TARGET);
2727
+
2728
+ // Return the generated code.
2729
+ return GetCode(NORMAL, NULL, MEGAMORPHIC);
2730
+ }
2731
+
2732
+
2733
+ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2734
+ JSObject* object,
2735
+ JSObject* last) {
2736
+ // ----------- S t a t e -------------
2737
+ // -- eax : receiver
2738
+ // -- ecx : name
2739
+ // -- esp[0] : return address
2740
+ // -----------------------------------
2741
+ Label miss;
2742
+
2743
+ // Check that the receiver isn't a smi.
2744
+ __ test(eax, Immediate(kSmiTagMask));
2745
+ __ j(zero, &miss);
2746
+
2747
+ ASSERT(last->IsGlobalObject() || last->HasFastProperties());
2748
+
2749
+ // Check the maps of the full prototype chain. Also check that
2750
+ // global property cells up to (but not including) the last object
2751
+ // in the prototype chain are empty.
2752
+ CheckPrototypes(object, eax, last, ebx, edx, edi, name, &miss);
2753
+
2754
+ // If the last object in the prototype chain is a global object,
2755
+ // check that the global property cell is empty.
2756
+ if (last->IsGlobalObject()) {
2757
+ MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2758
+ GlobalObject::cast(last),
2759
+ name,
2760
+ edx,
2761
+ &miss);
2762
+ if (cell->IsFailure()) {
2763
+ miss.Unuse();
2764
+ return cell;
2765
+ }
2766
+ }
2767
+
2768
+ // Return undefined if maps of the full prototype chain are still the
2769
+ // same and no global property with this name contains a value.
2770
+ __ mov(eax, isolate()->factory()->undefined_value());
2771
+ __ ret(0);
2772
+
2773
+ __ bind(&miss);
2774
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2775
+
2776
+ // Return the generated code.
2777
+ return GetCode(NONEXISTENT, isolate()->heap()->empty_string());
2778
+ }
2779
+
2780
+
2781
+ MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
2782
+ JSObject* holder,
2783
+ int index,
2784
+ String* name) {
2785
+ // ----------- S t a t e -------------
2786
+ // -- eax : receiver
2787
+ // -- ecx : name
2788
+ // -- esp[0] : return address
2789
+ // -----------------------------------
2790
+ Label miss;
2791
+
2792
+ GenerateLoadField(object, holder, eax, ebx, edx, edi, index, name, &miss);
2793
+ __ bind(&miss);
2794
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2795
+
2796
+ // Return the generated code.
2797
+ return GetCode(FIELD, name);
2798
+ }
2799
+
2800
+
2801
+ MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2802
+ JSObject* object,
2803
+ JSObject* holder,
2804
+ AccessorInfo* callback) {
2805
+ // ----------- S t a t e -------------
2806
+ // -- eax : receiver
2807
+ // -- ecx : name
2808
+ // -- esp[0] : return address
2809
+ // -----------------------------------
2810
+ Label miss;
2811
+
2812
+ MaybeObject* result = GenerateLoadCallback(object, holder, eax, ecx, ebx, edx,
2813
+ edi, callback, name, &miss);
2814
+ if (result->IsFailure()) {
2815
+ miss.Unuse();
2816
+ return result;
2817
+ }
2818
+
2819
+ __ bind(&miss);
2820
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2821
+
2822
+ // Return the generated code.
2823
+ return GetCode(CALLBACKS, name);
2824
+ }
2825
+
2826
+
2827
+ MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2828
+ JSObject* holder,
2829
+ Object* value,
2830
+ String* name) {
2831
+ // ----------- S t a t e -------------
2832
+ // -- eax : receiver
2833
+ // -- ecx : name
2834
+ // -- esp[0] : return address
2835
+ // -----------------------------------
2836
+ Label miss;
2837
+
2838
+ GenerateLoadConstant(object, holder, eax, ebx, edx, edi, value, name, &miss);
2839
+ __ bind(&miss);
2840
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2841
+
2842
+ // Return the generated code.
2843
+ return GetCode(CONSTANT_FUNCTION, name);
2844
+ }
2845
+
2846
+
2847
+ MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2848
+ JSObject* holder,
2849
+ String* name) {
2850
+ // ----------- S t a t e -------------
2851
+ // -- eax : receiver
2852
+ // -- ecx : name
2853
+ // -- esp[0] : return address
2854
+ // -----------------------------------
2855
+ Label miss;
2856
+
2857
+ LookupResult lookup;
2858
+ LookupPostInterceptor(holder, name, &lookup);
2859
+
2860
+ // TODO(368): Compile in the whole chain: all the interceptors in
2861
+ // prototypes and ultimate answer.
2862
+ GenerateLoadInterceptor(receiver,
2863
+ holder,
2864
+ &lookup,
2865
+ eax,
2866
+ ecx,
2867
+ edx,
2868
+ ebx,
2869
+ edi,
2870
+ name,
2871
+ &miss);
2872
+
2873
+ __ bind(&miss);
2874
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2875
+
2876
+ // Return the generated code.
2877
+ return GetCode(INTERCEPTOR, name);
2878
+ }
2879
+
2880
+
2881
+ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2882
+ GlobalObject* holder,
2883
+ JSGlobalPropertyCell* cell,
2884
+ String* name,
2885
+ bool is_dont_delete) {
2886
+ // ----------- S t a t e -------------
2887
+ // -- eax : receiver
2888
+ // -- ecx : name
2889
+ // -- esp[0] : return address
2890
+ // -----------------------------------
2891
+ Label miss;
2892
+
2893
+ // If the object is the holder then we know that it's a global
2894
+ // object which can only happen for contextual loads. In this case,
2895
+ // the receiver cannot be a smi.
2896
+ if (object != holder) {
2897
+ __ test(eax, Immediate(kSmiTagMask));
2898
+ __ j(zero, &miss);
2899
+ }
2900
+
2901
+ // Check that the maps haven't changed.
2902
+ CheckPrototypes(object, eax, holder, ebx, edx, edi, name, &miss);
2903
+
2904
+ // Get the value from the cell.
2905
+ if (Serializer::enabled()) {
2906
+ __ mov(ebx, Immediate(Handle<JSGlobalPropertyCell>(cell)));
2907
+ __ mov(ebx, FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset));
2908
+ } else {
2909
+ __ mov(ebx, Operand::Cell(Handle<JSGlobalPropertyCell>(cell)));
2910
+ }
2911
+
2912
+ // Check for deleted property if property can actually be deleted.
2913
+ if (!is_dont_delete) {
2914
+ __ cmp(ebx, factory()->the_hole_value());
2915
+ __ j(equal, &miss);
2916
+ } else if (FLAG_debug_code) {
2917
+ __ cmp(ebx, factory()->the_hole_value());
2918
+ __ Check(not_equal, "DontDelete cells can't contain the hole");
2919
+ }
2920
+
2921
+ Counters* counters = isolate()->counters();
2922
+ __ IncrementCounter(counters->named_load_global_stub(), 1);
2923
+ __ mov(eax, ebx);
2924
+ __ ret(0);
2925
+
2926
+ __ bind(&miss);
2927
+ __ IncrementCounter(counters->named_load_global_stub_miss(), 1);
2928
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2929
+
2930
+ // Return the generated code.
2931
+ return GetCode(NORMAL, name);
2932
+ }
2933
+
2934
+
2935
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2936
+ JSObject* receiver,
2937
+ JSObject* holder,
2938
+ int index) {
2939
+ // ----------- S t a t e -------------
2940
+ // -- eax : key
2941
+ // -- edx : receiver
2942
+ // -- esp[0] : return address
2943
+ // -----------------------------------
2944
+ Label miss;
2945
+
2946
+ Counters* counters = isolate()->counters();
2947
+ __ IncrementCounter(counters->keyed_load_field(), 1);
2948
+
2949
+ // Check that the name has not changed.
2950
+ __ cmp(Operand(eax), Immediate(Handle<String>(name)));
2951
+ __ j(not_equal, &miss);
2952
+
2953
+ GenerateLoadField(receiver, holder, edx, ebx, ecx, edi, index, name, &miss);
2954
+
2955
+ __ bind(&miss);
2956
+ __ DecrementCounter(counters->keyed_load_field(), 1);
2957
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2958
+
2959
+ // Return the generated code.
2960
+ return GetCode(FIELD, name);
2961
+ }
2962
+
2963
+
2964
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2965
+ String* name,
2966
+ JSObject* receiver,
2967
+ JSObject* holder,
2968
+ AccessorInfo* callback) {
2969
+ // ----------- S t a t e -------------
2970
+ // -- eax : key
2971
+ // -- edx : receiver
2972
+ // -- esp[0] : return address
2973
+ // -----------------------------------
2974
+ Label miss;
2975
+
2976
+ Counters* counters = isolate()->counters();
2977
+ __ IncrementCounter(counters->keyed_load_callback(), 1);
2978
+
2979
+ // Check that the name has not changed.
2980
+ __ cmp(Operand(eax), Immediate(Handle<String>(name)));
2981
+ __ j(not_equal, &miss);
2982
+
2983
+ MaybeObject* result = GenerateLoadCallback(receiver, holder, edx, eax, ebx,
2984
+ ecx, edi, callback, name, &miss);
2985
+ if (result->IsFailure()) {
2986
+ miss.Unuse();
2987
+ return result;
2988
+ }
2989
+
2990
+ __ bind(&miss);
2991
+
2992
+ __ DecrementCounter(counters->keyed_load_callback(), 1);
2993
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2994
+
2995
+ // Return the generated code.
2996
+ return GetCode(CALLBACKS, name);
2997
+ }
2998
+
2999
+
3000
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
3001
+ JSObject* receiver,
3002
+ JSObject* holder,
3003
+ Object* value) {
3004
+ // ----------- S t a t e -------------
3005
+ // -- eax : key
3006
+ // -- edx : receiver
3007
+ // -- esp[0] : return address
3008
+ // -----------------------------------
3009
+ Label miss;
3010
+
3011
+ Counters* counters = isolate()->counters();
3012
+ __ IncrementCounter(counters->keyed_load_constant_function(), 1);
3013
+
3014
+ // Check that the name has not changed.
3015
+ __ cmp(Operand(eax), Immediate(Handle<String>(name)));
3016
+ __ j(not_equal, &miss);
3017
+
3018
+ GenerateLoadConstant(receiver, holder, edx, ebx, ecx, edi,
3019
+ value, name, &miss);
3020
+ __ bind(&miss);
3021
+ __ DecrementCounter(counters->keyed_load_constant_function(), 1);
3022
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3023
+
3024
+ // Return the generated code.
3025
+ return GetCode(CONSTANT_FUNCTION, name);
3026
+ }
3027
+
3028
+
3029
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
3030
+ JSObject* holder,
3031
+ String* name) {
3032
+ // ----------- S t a t e -------------
3033
+ // -- eax : key
3034
+ // -- edx : receiver
3035
+ // -- esp[0] : return address
3036
+ // -----------------------------------
3037
+ Label miss;
3038
+
3039
+ Counters* counters = isolate()->counters();
3040
+ __ IncrementCounter(counters->keyed_load_interceptor(), 1);
3041
+
3042
+ // Check that the name has not changed.
3043
+ __ cmp(Operand(eax), Immediate(Handle<String>(name)));
3044
+ __ j(not_equal, &miss);
3045
+
3046
+ LookupResult lookup;
3047
+ LookupPostInterceptor(holder, name, &lookup);
3048
+ GenerateLoadInterceptor(receiver,
3049
+ holder,
3050
+ &lookup,
3051
+ edx,
3052
+ eax,
3053
+ ecx,
3054
+ ebx,
3055
+ edi,
3056
+ name,
3057
+ &miss);
3058
+ __ bind(&miss);
3059
+ __ DecrementCounter(counters->keyed_load_interceptor(), 1);
3060
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3061
+
3062
+ // Return the generated code.
3063
+ return GetCode(INTERCEPTOR, name);
3064
+ }
3065
+
3066
+
3067
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
3068
+ // ----------- S t a t e -------------
3069
+ // -- eax : key
3070
+ // -- edx : receiver
3071
+ // -- esp[0] : return address
3072
+ // -----------------------------------
3073
+ Label miss;
3074
+
3075
+ Counters* counters = isolate()->counters();
3076
+ __ IncrementCounter(counters->keyed_load_array_length(), 1);
3077
+
3078
+ // Check that the name has not changed.
3079
+ __ cmp(Operand(eax), Immediate(Handle<String>(name)));
3080
+ __ j(not_equal, &miss);
3081
+
3082
+ GenerateLoadArrayLength(masm(), edx, ecx, &miss);
3083
+ __ bind(&miss);
3084
+ __ DecrementCounter(counters->keyed_load_array_length(), 1);
3085
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3086
+
3087
+ // Return the generated code.
3088
+ return GetCode(CALLBACKS, name);
3089
+ }
3090
+
3091
+
3092
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
3093
+ // ----------- S t a t e -------------
3094
+ // -- eax : key
3095
+ // -- edx : receiver
3096
+ // -- esp[0] : return address
3097
+ // -----------------------------------
3098
+ Label miss;
3099
+
3100
+ Counters* counters = isolate()->counters();
3101
+ __ IncrementCounter(counters->keyed_load_string_length(), 1);
3102
+
3103
+ // Check that the name has not changed.
3104
+ __ cmp(Operand(eax), Immediate(Handle<String>(name)));
3105
+ __ j(not_equal, &miss);
3106
+
3107
+ GenerateLoadStringLength(masm(), edx, ecx, ebx, &miss, true);
3108
+ __ bind(&miss);
3109
+ __ DecrementCounter(counters->keyed_load_string_length(), 1);
3110
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3111
+
3112
+ // Return the generated code.
3113
+ return GetCode(CALLBACKS, name);
3114
+ }
3115
+
3116
+
3117
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
3118
+ // ----------- S t a t e -------------
3119
+ // -- eax : key
3120
+ // -- edx : receiver
3121
+ // -- esp[0] : return address
3122
+ // -----------------------------------
3123
+ Label miss;
3124
+
3125
+ Counters* counters = isolate()->counters();
3126
+ __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
3127
+
3128
+ // Check that the name has not changed.
3129
+ __ cmp(Operand(eax), Immediate(Handle<String>(name)));
3130
+ __ j(not_equal, &miss);
3131
+
3132
+ GenerateLoadFunctionPrototype(masm(), edx, ecx, ebx, &miss);
3133
+ __ bind(&miss);
3134
+ __ DecrementCounter(counters->keyed_load_function_prototype(), 1);
3135
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3136
+
3137
+ // Return the generated code.
3138
+ return GetCode(CALLBACKS, name);
3139
+ }
3140
+
3141
+
3142
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadFastElement(Map* receiver_map) {
3143
+ // ----------- S t a t e -------------
3144
+ // -- eax : key
3145
+ // -- edx : receiver
3146
+ // -- esp[0] : return address
3147
+ // -----------------------------------
3148
+ MaybeObject* maybe_stub = KeyedLoadFastElementStub().TryGetCode();
3149
+ Code* stub;
3150
+ if (!maybe_stub->To(&stub)) return maybe_stub;
3151
+ __ DispatchMap(edx,
3152
+ Handle<Map>(receiver_map),
3153
+ Handle<Code>(stub),
3154
+ DO_SMI_CHECK);
3155
+
3156
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3157
+
3158
+ // Return the generated code.
3159
+ return GetCode(NORMAL, NULL);
3160
+ }
3161
+
3162
+
3163
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadMegamorphic(
3164
+ MapList* receiver_maps,
3165
+ CodeList* handler_ics) {
3166
+ // ----------- S t a t e -------------
3167
+ // -- eax : key
3168
+ // -- edx : receiver
3169
+ // -- esp[0] : return address
3170
+ // -----------------------------------
3171
+ Label miss;
3172
+ __ JumpIfSmi(edx, &miss);
3173
+
3174
+ Register map_reg = ebx;
3175
+ __ mov(map_reg, FieldOperand(edx, HeapObject::kMapOffset));
3176
+ int receiver_count = receiver_maps->length();
3177
+ for (int current = 0; current < receiver_count; ++current) {
3178
+ Handle<Map> map(receiver_maps->at(current));
3179
+ __ cmp(map_reg, map);
3180
+ __ j(equal, Handle<Code>(handler_ics->at(current)));
3181
+ }
3182
+
3183
+ __ bind(&miss);
3184
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3185
+
3186
+ // Return the generated code.
3187
+ return GetCode(NORMAL, NULL, MEGAMORPHIC);
3188
+ }
3189
+
3190
+
3191
+ // Specialized stub for constructing objects from functions which only have only
3192
+ // simple assignments of the form this.x = ...; in their body.
3193
+ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3194
+ // ----------- S t a t e -------------
3195
+ // -- eax : argc
3196
+ // -- edi : constructor
3197
+ // -- esp[0] : return address
3198
+ // -- esp[4] : last argument
3199
+ // -----------------------------------
3200
+ Label generic_stub_call;
3201
+ #ifdef ENABLE_DEBUGGER_SUPPORT
3202
+ // Check to see whether there are any break points in the function code. If
3203
+ // there are jump to the generic constructor stub which calls the actual
3204
+ // code for the function thereby hitting the break points.
3205
+ __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
3206
+ __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kDebugInfoOffset));
3207
+ __ cmp(ebx, factory()->undefined_value());
3208
+ __ j(not_equal, &generic_stub_call);
3209
+ #endif
3210
+
3211
+ // Load the initial map and verify that it is in fact a map.
3212
+ __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
3213
+ // Will both indicate a NULL and a Smi.
3214
+ __ test(ebx, Immediate(kSmiTagMask));
3215
+ __ j(zero, &generic_stub_call);
3216
+ __ CmpObjectType(ebx, MAP_TYPE, ecx);
3217
+ __ j(not_equal, &generic_stub_call);
3218
+
3219
+ #ifdef DEBUG
3220
+ // Cannot construct functions this way.
3221
+ // edi: constructor
3222
+ // ebx: initial map
3223
+ __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
3224
+ __ Assert(not_equal, "Function constructed by construct stub.");
3225
+ #endif
3226
+
3227
+ // Now allocate the JSObject on the heap by moving the new space allocation
3228
+ // top forward.
3229
+ // edi: constructor
3230
+ // ebx: initial map
3231
+ __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
3232
+ __ shl(ecx, kPointerSizeLog2);
3233
+ __ AllocateInNewSpace(ecx,
3234
+ edx,
3235
+ ecx,
3236
+ no_reg,
3237
+ &generic_stub_call,
3238
+ NO_ALLOCATION_FLAGS);
3239
+
3240
+ // Allocated the JSObject, now initialize the fields and add the heap tag.
3241
+ // ebx: initial map
3242
+ // edx: JSObject (untagged)
3243
+ __ mov(Operand(edx, JSObject::kMapOffset), ebx);
3244
+ __ mov(ebx, factory()->empty_fixed_array());
3245
+ __ mov(Operand(edx, JSObject::kPropertiesOffset), ebx);
3246
+ __ mov(Operand(edx, JSObject::kElementsOffset), ebx);
3247
+
3248
+ // Push the allocated object to the stack. This is the object that will be
3249
+ // returned (after it is tagged).
3250
+ __ push(edx);
3251
+
3252
+ // eax: argc
3253
+ // edx: JSObject (untagged)
3254
+ // Load the address of the first in-object property into edx.
3255
+ __ lea(edx, Operand(edx, JSObject::kHeaderSize));
3256
+ // Calculate the location of the first argument. The stack contains the
3257
+ // allocated object and the return address on top of the argc arguments.
3258
+ __ lea(ecx, Operand(esp, eax, times_4, 1 * kPointerSize));
3259
+
3260
+ // Use edi for holding undefined which is used in several places below.
3261
+ __ mov(edi, factory()->undefined_value());
3262
+
3263
+ // eax: argc
3264
+ // ecx: first argument
3265
+ // edx: first in-object property of the JSObject
3266
+ // edi: undefined
3267
+ // Fill the initialized properties with a constant value or a passed argument
3268
+ // depending on the this.x = ...; assignment in the function.
3269
+ SharedFunctionInfo* shared = function->shared();
3270
+ for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3271
+ if (shared->IsThisPropertyAssignmentArgument(i)) {
3272
+ // Check if the argument assigned to the property is actually passed.
3273
+ // If argument is not passed the property is set to undefined,
3274
+ // otherwise find it on the stack.
3275
+ int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3276
+ __ mov(ebx, edi);
3277
+ __ cmp(eax, arg_number);
3278
+ if (CpuFeatures::IsSupported(CMOV)) {
3279
+ CpuFeatures::Scope use_cmov(CMOV);
3280
+ __ cmov(above, ebx, Operand(ecx, arg_number * -kPointerSize));
3281
+ } else {
3282
+ Label not_passed;
3283
+ __ j(below_equal, &not_passed);
3284
+ __ mov(ebx, Operand(ecx, arg_number * -kPointerSize));
3285
+ __ bind(&not_passed);
3286
+ }
3287
+ // Store value in the property.
3288
+ __ mov(Operand(edx, i * kPointerSize), ebx);
3289
+ } else {
3290
+ // Set the property to the constant value.
3291
+ Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3292
+ __ mov(Operand(edx, i * kPointerSize), Immediate(constant));
3293
+ }
3294
+ }
3295
+
3296
+ // Fill the unused in-object property fields with undefined.
3297
+ ASSERT(function->has_initial_map());
3298
+ for (int i = shared->this_property_assignments_count();
3299
+ i < function->initial_map()->inobject_properties();
3300
+ i++) {
3301
+ __ mov(Operand(edx, i * kPointerSize), edi);
3302
+ }
3303
+
3304
+ // Move argc to ebx and retrieve and tag the JSObject to return.
3305
+ __ mov(ebx, eax);
3306
+ __ pop(eax);
3307
+ __ or_(Operand(eax), Immediate(kHeapObjectTag));
3308
+
3309
+ // Remove caller arguments and receiver from the stack and return.
3310
+ __ pop(ecx);
3311
+ __ lea(esp, Operand(esp, ebx, times_pointer_size, 1 * kPointerSize));
3312
+ __ push(ecx);
3313
+ Counters* counters = isolate()->counters();
3314
+ __ IncrementCounter(counters->constructed_objects(), 1);
3315
+ __ IncrementCounter(counters->constructed_objects_stub(), 1);
3316
+ __ ret(0);
3317
+
3318
+ // Jump to the generic stub in case the specialized code cannot handle the
3319
+ // construction.
3320
+ __ bind(&generic_stub_call);
3321
+ Handle<Code> generic_construct_stub =
3322
+ isolate()->builtins()->JSConstructStubGeneric();
3323
+ __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
3324
+
3325
+ // Return the generated code.
3326
+ return GetCode();
3327
+ }
3328
+
3329
+
3330
+ MaybeObject* ExternalArrayLoadStubCompiler::CompileLoad(
3331
+ JSObject*receiver, ExternalArrayType array_type) {
3332
+ // ----------- S t a t e -------------
3333
+ // -- eax : key
3334
+ // -- edx : receiver
3335
+ // -- esp[0] : return address
3336
+ // -----------------------------------
3337
+ MaybeObject* maybe_stub =
3338
+ KeyedLoadExternalArrayStub(array_type).TryGetCode();
3339
+ Code* stub;
3340
+ if (!maybe_stub->To(&stub)) return maybe_stub;
3341
+ __ DispatchMap(edx,
3342
+ Handle<Map>(receiver->map()),
3343
+ Handle<Code>(stub),
3344
+ DO_SMI_CHECK);
3345
+
3346
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3347
+ __ jmp(ic, RelocInfo::CODE_TARGET);
3348
+
3349
+ // Return the generated code.
3350
+ return GetCode();
3351
+ }
3352
+
3353
+
3354
+ MaybeObject* ExternalArrayStoreStubCompiler::CompileStore(
3355
+ JSObject* receiver, ExternalArrayType array_type) {
3356
+ // ----------- S t a t e -------------
3357
+ // -- eax : value
3358
+ // -- ecx : key
3359
+ // -- edx : receiver
3360
+ // -- esp[0] : return address
3361
+ // -----------------------------------
3362
+ MaybeObject* maybe_stub =
3363
+ KeyedStoreExternalArrayStub(array_type).TryGetCode();
3364
+ Code* stub;
3365
+ if (!maybe_stub->To(&stub)) return maybe_stub;
3366
+ __ DispatchMap(edx,
3367
+ Handle<Map>(receiver->map()),
3368
+ Handle<Code>(stub),
3369
+ DO_SMI_CHECK);
3370
+
3371
+ Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3372
+ __ jmp(ic, RelocInfo::CODE_TARGET);
3373
+
3374
+ return GetCode();
3375
+ }
3376
+
3377
+
3378
+ #undef __
3379
+ #define __ ACCESS_MASM(masm)
3380
+
3381
+
3382
+ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3383
+ MacroAssembler* masm,
3384
+ ExternalArrayType array_type) {
3385
+ // ----------- S t a t e -------------
3386
+ // -- eax : key
3387
+ // -- edx : receiver
3388
+ // -- esp[0] : return address
3389
+ // -----------------------------------
3390
+ Label miss_force_generic, failed_allocation, slow;
3391
+
3392
+ // This stub is meant to be tail-jumped to, the receiver must already
3393
+ // have been verified by the caller to not be a smi.
3394
+
3395
+ // Check that the key is a smi.
3396
+ __ test(eax, Immediate(kSmiTagMask));
3397
+ __ j(not_zero, &miss_force_generic);
3398
+
3399
+ // Check that the index is in range.
3400
+ __ mov(ecx, eax);
3401
+ __ SmiUntag(ecx); // Untag the index.
3402
+ __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
3403
+ __ cmp(ecx, FieldOperand(ebx, ExternalArray::kLengthOffset));
3404
+ // Unsigned comparison catches both negative and too-large values.
3405
+ __ j(above_equal, &miss_force_generic);
3406
+ __ mov(ebx, FieldOperand(ebx, ExternalArray::kExternalPointerOffset));
3407
+ // ebx: base pointer of external storage
3408
+ switch (array_type) {
3409
+ case kExternalByteArray:
3410
+ __ movsx_b(eax, Operand(ebx, ecx, times_1, 0));
3411
+ break;
3412
+ case kExternalUnsignedByteArray:
3413
+ case kExternalPixelArray:
3414
+ __ movzx_b(eax, Operand(ebx, ecx, times_1, 0));
3415
+ break;
3416
+ case kExternalShortArray:
3417
+ __ movsx_w(eax, Operand(ebx, ecx, times_2, 0));
3418
+ break;
3419
+ case kExternalUnsignedShortArray:
3420
+ __ movzx_w(eax, Operand(ebx, ecx, times_2, 0));
3421
+ break;
3422
+ case kExternalIntArray:
3423
+ case kExternalUnsignedIntArray:
3424
+ __ mov(ecx, Operand(ebx, ecx, times_4, 0));
3425
+ break;
3426
+ case kExternalFloatArray:
3427
+ __ fld_s(Operand(ebx, ecx, times_4, 0));
3428
+ break;
3429
+ case kExternalDoubleArray:
3430
+ __ fld_d(Operand(ebx, ecx, times_8, 0));
3431
+ break;
3432
+ default:
3433
+ UNREACHABLE();
3434
+ break;
3435
+ }
3436
+
3437
+ // For integer array types:
3438
+ // ecx: value
3439
+ // For floating-point array type:
3440
+ // FP(0): value
3441
+
3442
+ if (array_type == kExternalIntArray ||
3443
+ array_type == kExternalUnsignedIntArray) {
3444
+ // For the Int and UnsignedInt array types, we need to see whether
3445
+ // the value can be represented in a Smi. If not, we need to convert
3446
+ // it to a HeapNumber.
3447
+ Label box_int;
3448
+ if (array_type == kExternalIntArray) {
3449
+ __ cmp(ecx, 0xC0000000);
3450
+ __ j(sign, &box_int);
3451
+ } else {
3452
+ ASSERT_EQ(array_type, kExternalUnsignedIntArray);
3453
+ // The test is different for unsigned int values. Since we need
3454
+ // the value to be in the range of a positive smi, we can't
3455
+ // handle either of the top two bits being set in the value.
3456
+ __ test(ecx, Immediate(0xC0000000));
3457
+ __ j(not_zero, &box_int);
3458
+ }
3459
+
3460
+ __ mov(eax, ecx);
3461
+ __ SmiTag(eax);
3462
+ __ ret(0);
3463
+
3464
+ __ bind(&box_int);
3465
+
3466
+ // Allocate a HeapNumber for the int and perform int-to-double
3467
+ // conversion.
3468
+ if (array_type == kExternalIntArray) {
3469
+ __ push(ecx);
3470
+ __ fild_s(Operand(esp, 0));
3471
+ __ pop(ecx);
3472
+ } else {
3473
+ ASSERT(array_type == kExternalUnsignedIntArray);
3474
+ // Need to zero-extend the value.
3475
+ // There's no fild variant for unsigned values, so zero-extend
3476
+ // to a 64-bit int manually.
3477
+ __ push(Immediate(0));
3478
+ __ push(ecx);
3479
+ __ fild_d(Operand(esp, 0));
3480
+ __ pop(ecx);
3481
+ __ pop(ecx);
3482
+ }
3483
+ // FP(0): value
3484
+ __ AllocateHeapNumber(ecx, ebx, edi, &failed_allocation);
3485
+ // Set the value.
3486
+ __ mov(eax, ecx);
3487
+ __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
3488
+ __ ret(0);
3489
+ } else if (array_type == kExternalFloatArray ||
3490
+ array_type == kExternalDoubleArray) {
3491
+ // For the floating-point array type, we need to always allocate a
3492
+ // HeapNumber.
3493
+ __ AllocateHeapNumber(ecx, ebx, edi, &failed_allocation);
3494
+ // Set the value.
3495
+ __ mov(eax, ecx);
3496
+ __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
3497
+ __ ret(0);
3498
+ } else {
3499
+ __ SmiTag(eax);
3500
+ __ ret(0);
3501
+ }
3502
+
3503
+ // If we fail allocation of the HeapNumber, we still have a value on
3504
+ // top of the FPU stack. Remove it.
3505
+ __ bind(&failed_allocation);
3506
+ __ ffree();
3507
+ __ fincstp();
3508
+ // Fall through to slow case.
3509
+
3510
+ // Slow case: Jump to runtime.
3511
+ __ bind(&slow);
3512
+ Counters* counters = masm->isolate()->counters();
3513
+ __ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
3514
+
3515
+ // ----------- S t a t e -------------
3516
+ // -- eax : key
3517
+ // -- edx : receiver
3518
+ // -- esp[0] : return address
3519
+ // -----------------------------------
3520
+
3521
+ Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Slow();
3522
+ __ jmp(ic, RelocInfo::CODE_TARGET);
3523
+
3524
+ // ----------- S t a t e -------------
3525
+ // -- eax : key
3526
+ // -- edx : receiver
3527
+ // -- esp[0] : return address
3528
+ // -----------------------------------
3529
+
3530
+ // Miss case: Jump to runtime.
3531
+ __ bind(&miss_force_generic);
3532
+ Handle<Code> miss_ic =
3533
+ masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3534
+ __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3535
+ }
3536
+
3537
+
3538
+ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3539
+ MacroAssembler* masm,
3540
+ ExternalArrayType array_type) {
3541
+ // ----------- S t a t e -------------
3542
+ // -- eax : key
3543
+ // -- edx : receiver
3544
+ // -- esp[0] : return address
3545
+ // -----------------------------------
3546
+ Label miss_force_generic, slow, check_heap_number;
3547
+
3548
+ // This stub is meant to be tail-jumped to, the receiver must already
3549
+ // have been verified by the caller to not be a smi.
3550
+
3551
+ // Check that the key is a smi.
3552
+ __ test(ecx, Immediate(kSmiTagMask));
3553
+ __ j(not_zero, &miss_force_generic);
3554
+
3555
+ // Check that the index is in range.
3556
+ __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
3557
+ __ mov(ebx, ecx);
3558
+ __ SmiUntag(ebx);
3559
+ __ cmp(ebx, FieldOperand(edi, ExternalArray::kLengthOffset));
3560
+ // Unsigned comparison catches both negative and too-large values.
3561
+ __ j(above_equal, &slow);
3562
+
3563
+ // Handle both smis and HeapNumbers in the fast path. Go to the
3564
+ // runtime for all other kinds of values.
3565
+ // eax: value
3566
+ // edx: receiver
3567
+ // ecx: key
3568
+ // edi: elements array
3569
+ // ebx: untagged index
3570
+ __ test(eax, Immediate(kSmiTagMask));
3571
+ if (array_type == kExternalPixelArray)
3572
+ __ j(not_equal, &slow);
3573
+ else
3574
+ __ j(not_equal, &check_heap_number);
3575
+
3576
+ // smi case
3577
+ __ mov(ecx, eax); // Preserve the value in eax. Key is no longer needed.
3578
+ __ SmiUntag(ecx);
3579
+ __ mov(edi, FieldOperand(edi, ExternalArray::kExternalPointerOffset));
3580
+ // ecx: base pointer of external storage
3581
+ switch (array_type) {
3582
+ case kExternalPixelArray:
3583
+ { // Clamp the value to [0..255].
3584
+ Label done;
3585
+ __ test(ecx, Immediate(0xFFFFFF00));
3586
+ __ j(zero, &done, Label::kNear);
3587
+ __ setcc(negative, ecx); // 1 if negative, 0 if positive.
3588
+ __ dec_b(ecx); // 0 if negative, 255 if positive.
3589
+ __ bind(&done);
3590
+ }
3591
+ __ mov_b(Operand(edi, ebx, times_1, 0), ecx);
3592
+ break;
3593
+ case kExternalByteArray:
3594
+ case kExternalUnsignedByteArray:
3595
+ __ mov_b(Operand(edi, ebx, times_1, 0), ecx);
3596
+ break;
3597
+ case kExternalShortArray:
3598
+ case kExternalUnsignedShortArray:
3599
+ __ mov_w(Operand(edi, ebx, times_2, 0), ecx);
3600
+ break;
3601
+ case kExternalIntArray:
3602
+ case kExternalUnsignedIntArray:
3603
+ __ mov(Operand(edi, ebx, times_4, 0), ecx);
3604
+ break;
3605
+ case kExternalFloatArray:
3606
+ case kExternalDoubleArray:
3607
+ // Need to perform int-to-float conversion.
3608
+ __ push(ecx);
3609
+ __ fild_s(Operand(esp, 0));
3610
+ __ pop(ecx);
3611
+ if (array_type == kExternalFloatArray) {
3612
+ __ fstp_s(Operand(edi, ebx, times_4, 0));
3613
+ } else { // array_type == kExternalDoubleArray.
3614
+ __ fstp_d(Operand(edi, ebx, times_8, 0));
3615
+ }
3616
+ break;
3617
+ default:
3618
+ UNREACHABLE();
3619
+ break;
3620
+ }
3621
+ __ ret(0); // Return the original value.
3622
+
3623
+ // TODO(danno): handle heap number -> pixel array conversion
3624
+ if (array_type != kExternalPixelArray) {
3625
+ __ bind(&check_heap_number);
3626
+ // eax: value
3627
+ // edx: receiver
3628
+ // ecx: key
3629
+ // edi: elements array
3630
+ // ebx: untagged index
3631
+ __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3632
+ Immediate(masm->isolate()->factory()->heap_number_map()));
3633
+ __ j(not_equal, &slow);
3634
+
3635
+ // The WebGL specification leaves the behavior of storing NaN and
3636
+ // +/-Infinity into integer arrays basically undefined. For more
3637
+ // reproducible behavior, convert these to zero.
3638
+ __ mov(edi, FieldOperand(edi, ExternalArray::kExternalPointerOffset));
3639
+ // ebx: untagged index
3640
+ // edi: base pointer of external storage
3641
+ if (array_type == kExternalFloatArray) {
3642
+ __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
3643
+ __ fstp_s(Operand(edi, ebx, times_4, 0));
3644
+ __ ret(0);
3645
+ } else if (array_type == kExternalDoubleArray) {
3646
+ __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
3647
+ __ fstp_d(Operand(edi, ebx, times_8, 0));
3648
+ __ ret(0);
3649
+ } else {
3650
+ // Perform float-to-int conversion with truncation (round-to-zero)
3651
+ // behavior.
3652
+
3653
+ // For the moment we make the slow call to the runtime on
3654
+ // processors that don't support SSE2. The code in IntegerConvert
3655
+ // (code-stubs-ia32.cc) is roughly what is needed here though the
3656
+ // conversion failure case does not need to be handled.
3657
+ if (CpuFeatures::IsSupported(SSE2)) {
3658
+ if (array_type != kExternalIntArray &&
3659
+ array_type != kExternalUnsignedIntArray) {
3660
+ ASSERT(CpuFeatures::IsSupported(SSE2));
3661
+ CpuFeatures::Scope scope(SSE2);
3662
+ __ cvttsd2si(ecx, FieldOperand(eax, HeapNumber::kValueOffset));
3663
+ // ecx: untagged integer value
3664
+ switch (array_type) {
3665
+ case kExternalPixelArray:
3666
+ { // Clamp the value to [0..255].
3667
+ Label done;
3668
+ __ test(ecx, Immediate(0xFFFFFF00));
3669
+ __ j(zero, &done, Label::kNear);
3670
+ __ setcc(negative, ecx); // 1 if negative, 0 if positive.
3671
+ __ dec_b(ecx); // 0 if negative, 255 if positive.
3672
+ __ bind(&done);
3673
+ }
3674
+ __ mov_b(Operand(edi, ebx, times_1, 0), ecx);
3675
+ break;
3676
+ case kExternalByteArray:
3677
+ case kExternalUnsignedByteArray:
3678
+ __ mov_b(Operand(edi, ebx, times_1, 0), ecx);
3679
+ break;
3680
+ case kExternalShortArray:
3681
+ case kExternalUnsignedShortArray:
3682
+ __ mov_w(Operand(edi, ebx, times_2, 0), ecx);
3683
+ break;
3684
+ default:
3685
+ UNREACHABLE();
3686
+ break;
3687
+ }
3688
+ } else {
3689
+ if (CpuFeatures::IsSupported(SSE3)) {
3690
+ CpuFeatures::Scope scope(SSE3);
3691
+ // fisttp stores values as signed integers. To represent the
3692
+ // entire range of int and unsigned int arrays, store as a
3693
+ // 64-bit int and discard the high 32 bits.
3694
+ // If the value is NaN or +/-infinity, the result is 0x80000000,
3695
+ // which is automatically zero when taken mod 2^n, n < 32.
3696
+ __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
3697
+ __ sub(Operand(esp), Immediate(2 * kPointerSize));
3698
+ __ fisttp_d(Operand(esp, 0));
3699
+ __ pop(ecx);
3700
+ __ add(Operand(esp), Immediate(kPointerSize));
3701
+ } else {
3702
+ ASSERT(CpuFeatures::IsSupported(SSE2));
3703
+ CpuFeatures::Scope scope(SSE2);
3704
+ // We can easily implement the correct rounding behavior for the
3705
+ // range [0, 2^31-1]. For the time being, to keep this code simple,
3706
+ // make the slow runtime call for values outside this range.
3707
+ // Note: we could do better for signed int arrays.
3708
+ __ movd(xmm0, FieldOperand(eax, HeapNumber::kValueOffset));
3709
+ // We will need the key if we have to make the slow runtime call.
3710
+ __ push(ecx);
3711
+ __ LoadPowerOf2(xmm1, ecx, 31);
3712
+ __ pop(ecx);
3713
+ __ ucomisd(xmm1, xmm0);
3714
+ __ j(above_equal, &slow);
3715
+ __ cvttsd2si(ecx, Operand(xmm0));
3716
+ }
3717
+ // ecx: untagged integer value
3718
+ __ mov(Operand(edi, ebx, times_4, 0), ecx);
3719
+ }
3720
+ __ ret(0); // Return original value.
3721
+ }
3722
+ }
3723
+ }
3724
+
3725
+ // Slow case: call runtime.
3726
+ __ bind(&slow);
3727
+ Counters* counters = masm->isolate()->counters();
3728
+ __ IncrementCounter(counters->keyed_store_external_array_slow(), 1);
3729
+
3730
+ // ----------- S t a t e -------------
3731
+ // -- eax : value
3732
+ // -- ecx : key
3733
+ // -- edx : receiver
3734
+ // -- esp[0] : return address
3735
+ // -----------------------------------
3736
+
3737
+ Handle<Code> ic = masm->isolate()->builtins()->KeyedStoreIC_Slow();
3738
+ __ jmp(ic, RelocInfo::CODE_TARGET);
3739
+
3740
+ // ----------- S t a t e -------------
3741
+ // -- eax : value
3742
+ // -- ecx : key
3743
+ // -- edx : receiver
3744
+ // -- esp[0] : return address
3745
+ // -----------------------------------
3746
+
3747
+ __ bind(&miss_force_generic);
3748
+ Handle<Code> miss_ic =
3749
+ masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
3750
+ __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3751
+ }
3752
+
3753
+
3754
+
3755
+
3756
+ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
3757
+ // ----------- S t a t e -------------
3758
+ // -- eax : key
3759
+ // -- edx : receiver
3760
+ // -- esp[0] : return address
3761
+ // -----------------------------------
3762
+ Label miss_force_generic;
3763
+
3764
+ // This stub is meant to be tail-jumped to, the receiver must already
3765
+ // have been verified by the caller to not be a smi.
3766
+
3767
+ // Check that the key is a smi.
3768
+ __ test(eax, Immediate(kSmiTagMask));
3769
+ __ j(not_zero, &miss_force_generic);
3770
+
3771
+ // Get the elements array.
3772
+ __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
3773
+ __ AssertFastElements(ecx);
3774
+
3775
+ // Check that the key is within bounds.
3776
+ __ cmp(eax, FieldOperand(ecx, FixedArray::kLengthOffset));
3777
+ __ j(above_equal, &miss_force_generic);
3778
+
3779
+ // Load the result and make sure it's not the hole.
3780
+ __ mov(ebx, Operand(ecx, eax, times_2,
3781
+ FixedArray::kHeaderSize - kHeapObjectTag));
3782
+ __ cmp(ebx, masm->isolate()->factory()->the_hole_value());
3783
+ __ j(equal, &miss_force_generic);
3784
+ __ mov(eax, ebx);
3785
+ __ ret(0);
3786
+
3787
+ __ bind(&miss_force_generic);
3788
+ Handle<Code> miss_ic =
3789
+ masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3790
+ __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3791
+ }
3792
+
3793
+
3794
+ void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
3795
+ bool is_js_array) {
3796
+ // ----------- S t a t e -------------
3797
+ // -- eax : key
3798
+ // -- edx : receiver
3799
+ // -- esp[0] : return address
3800
+ // -----------------------------------
3801
+ Label miss_force_generic;
3802
+
3803
+ // This stub is meant to be tail-jumped to, the receiver must already
3804
+ // have been verified by the caller to not be a smi.
3805
+
3806
+ // Check that the key is a smi.
3807
+ __ test(ecx, Immediate(kSmiTagMask));
3808
+ __ j(not_zero, &miss_force_generic);
3809
+
3810
+ // Get the elements array and make sure it is a fast element array, not 'cow'.
3811
+ __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
3812
+ __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
3813
+ Immediate(masm->isolate()->factory()->fixed_array_map()));
3814
+ __ j(not_equal, &miss_force_generic);
3815
+
3816
+ if (is_js_array) {
3817
+ // Check that the key is within bounds.
3818
+ __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // smis.
3819
+ __ j(above_equal, &miss_force_generic);
3820
+ } else {
3821
+ // Check that the key is within bounds.
3822
+ __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // smis.
3823
+ __ j(above_equal, &miss_force_generic);
3824
+ }
3825
+
3826
+ // Do the store and update the write barrier. Make sure to preserve
3827
+ // the value in register eax.
3828
+ __ mov(edx, Operand(eax));
3829
+ __ mov(FieldOperand(edi, ecx, times_2, FixedArray::kHeaderSize), eax);
3830
+ __ RecordWrite(edi, 0, edx, ecx);
3831
+
3832
+ // Done.
3833
+ __ ret(0);
3834
+
3835
+ // Handle store cache miss, replacing the ic with the generic stub.
3836
+ __ bind(&miss_force_generic);
3837
+ Handle<Code> ic_force_generic =
3838
+ masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
3839
+ __ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
3840
+ }
3841
+
3842
+
3843
+ #undef __
3844
+
3845
+ } } // namespace v8::internal
3846
+
3847
+ #endif // V8_TARGET_ARCH_IA32