libv8-sgonyea 3.3.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (500) hide show
  1. data/.gitignore +8 -0
  2. data/.gitmodules +3 -0
  3. data/Gemfile +4 -0
  4. data/README.md +76 -0
  5. data/Rakefile +113 -0
  6. data/ext/libv8/extconf.rb +28 -0
  7. data/lib/libv8.rb +15 -0
  8. data/lib/libv8/Makefile +30 -0
  9. data/lib/libv8/detect_cpu.rb +27 -0
  10. data/lib/libv8/fpic-on-linux-amd64.patch +13 -0
  11. data/lib/libv8/v8/.gitignore +35 -0
  12. data/lib/libv8/v8/AUTHORS +44 -0
  13. data/lib/libv8/v8/ChangeLog +2839 -0
  14. data/lib/libv8/v8/LICENSE +52 -0
  15. data/lib/libv8/v8/LICENSE.strongtalk +29 -0
  16. data/lib/libv8/v8/LICENSE.v8 +26 -0
  17. data/lib/libv8/v8/LICENSE.valgrind +45 -0
  18. data/lib/libv8/v8/SConstruct +1478 -0
  19. data/lib/libv8/v8/build/README.txt +49 -0
  20. data/lib/libv8/v8/build/all.gyp +18 -0
  21. data/lib/libv8/v8/build/armu.gypi +32 -0
  22. data/lib/libv8/v8/build/common.gypi +144 -0
  23. data/lib/libv8/v8/build/gyp_v8 +145 -0
  24. data/lib/libv8/v8/include/v8-debug.h +395 -0
  25. data/lib/libv8/v8/include/v8-preparser.h +117 -0
  26. data/lib/libv8/v8/include/v8-profiler.h +505 -0
  27. data/lib/libv8/v8/include/v8-testing.h +104 -0
  28. data/lib/libv8/v8/include/v8.h +4124 -0
  29. data/lib/libv8/v8/include/v8stdint.h +53 -0
  30. data/lib/libv8/v8/preparser/SConscript +38 -0
  31. data/lib/libv8/v8/preparser/preparser-process.cc +379 -0
  32. data/lib/libv8/v8/src/SConscript +368 -0
  33. data/lib/libv8/v8/src/accessors.cc +767 -0
  34. data/lib/libv8/v8/src/accessors.h +123 -0
  35. data/lib/libv8/v8/src/allocation-inl.h +49 -0
  36. data/lib/libv8/v8/src/allocation.cc +122 -0
  37. data/lib/libv8/v8/src/allocation.h +143 -0
  38. data/lib/libv8/v8/src/api.cc +5845 -0
  39. data/lib/libv8/v8/src/api.h +574 -0
  40. data/lib/libv8/v8/src/apinatives.js +110 -0
  41. data/lib/libv8/v8/src/apiutils.h +73 -0
  42. data/lib/libv8/v8/src/arguments.h +118 -0
  43. data/lib/libv8/v8/src/arm/assembler-arm-inl.h +353 -0
  44. data/lib/libv8/v8/src/arm/assembler-arm.cc +2661 -0
  45. data/lib/libv8/v8/src/arm/assembler-arm.h +1375 -0
  46. data/lib/libv8/v8/src/arm/builtins-arm.cc +1658 -0
  47. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +6398 -0
  48. data/lib/libv8/v8/src/arm/code-stubs-arm.h +673 -0
  49. data/lib/libv8/v8/src/arm/codegen-arm.cc +52 -0
  50. data/lib/libv8/v8/src/arm/codegen-arm.h +91 -0
  51. data/lib/libv8/v8/src/arm/constants-arm.cc +152 -0
  52. data/lib/libv8/v8/src/arm/constants-arm.h +775 -0
  53. data/lib/libv8/v8/src/arm/cpu-arm.cc +120 -0
  54. data/lib/libv8/v8/src/arm/debug-arm.cc +317 -0
  55. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +754 -0
  56. data/lib/libv8/v8/src/arm/disasm-arm.cc +1506 -0
  57. data/lib/libv8/v8/src/arm/frames-arm.cc +45 -0
  58. data/lib/libv8/v8/src/arm/frames-arm.h +168 -0
  59. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +4375 -0
  60. data/lib/libv8/v8/src/arm/ic-arm.cc +1562 -0
  61. data/lib/libv8/v8/src/arm/lithium-arm.cc +2206 -0
  62. data/lib/libv8/v8/src/arm/lithium-arm.h +2348 -0
  63. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +4526 -0
  64. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +403 -0
  65. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
  66. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.h +84 -0
  67. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +3163 -0
  68. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +1126 -0
  69. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
  70. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
  71. data/lib/libv8/v8/src/arm/simulator-arm.cc +3424 -0
  72. data/lib/libv8/v8/src/arm/simulator-arm.h +431 -0
  73. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +4243 -0
  74. data/lib/libv8/v8/src/array.js +1366 -0
  75. data/lib/libv8/v8/src/assembler.cc +1207 -0
  76. data/lib/libv8/v8/src/assembler.h +858 -0
  77. data/lib/libv8/v8/src/ast-inl.h +112 -0
  78. data/lib/libv8/v8/src/ast.cc +1146 -0
  79. data/lib/libv8/v8/src/ast.h +2188 -0
  80. data/lib/libv8/v8/src/atomicops.h +167 -0
  81. data/lib/libv8/v8/src/atomicops_internals_arm_gcc.h +145 -0
  82. data/lib/libv8/v8/src/atomicops_internals_mips_gcc.h +169 -0
  83. data/lib/libv8/v8/src/atomicops_internals_x86_gcc.cc +133 -0
  84. data/lib/libv8/v8/src/atomicops_internals_x86_gcc.h +287 -0
  85. data/lib/libv8/v8/src/atomicops_internals_x86_macosx.h +301 -0
  86. data/lib/libv8/v8/src/atomicops_internals_x86_msvc.h +203 -0
  87. data/lib/libv8/v8/src/bignum-dtoa.cc +655 -0
  88. data/lib/libv8/v8/src/bignum-dtoa.h +81 -0
  89. data/lib/libv8/v8/src/bignum.cc +768 -0
  90. data/lib/libv8/v8/src/bignum.h +140 -0
  91. data/lib/libv8/v8/src/bootstrapper.cc +2184 -0
  92. data/lib/libv8/v8/src/bootstrapper.h +188 -0
  93. data/lib/libv8/v8/src/builtins.cc +1707 -0
  94. data/lib/libv8/v8/src/builtins.h +371 -0
  95. data/lib/libv8/v8/src/bytecodes-irregexp.h +105 -0
  96. data/lib/libv8/v8/src/cached-powers.cc +177 -0
  97. data/lib/libv8/v8/src/cached-powers.h +65 -0
  98. data/lib/libv8/v8/src/char-predicates-inl.h +94 -0
  99. data/lib/libv8/v8/src/char-predicates.h +67 -0
  100. data/lib/libv8/v8/src/checks.cc +110 -0
  101. data/lib/libv8/v8/src/checks.h +296 -0
  102. data/lib/libv8/v8/src/circular-queue-inl.h +53 -0
  103. data/lib/libv8/v8/src/circular-queue.cc +122 -0
  104. data/lib/libv8/v8/src/circular-queue.h +103 -0
  105. data/lib/libv8/v8/src/code-stubs.cc +267 -0
  106. data/lib/libv8/v8/src/code-stubs.h +1011 -0
  107. data/lib/libv8/v8/src/code.h +70 -0
  108. data/lib/libv8/v8/src/codegen.cc +231 -0
  109. data/lib/libv8/v8/src/codegen.h +84 -0
  110. data/lib/libv8/v8/src/compilation-cache.cc +540 -0
  111. data/lib/libv8/v8/src/compilation-cache.h +287 -0
  112. data/lib/libv8/v8/src/compiler.cc +786 -0
  113. data/lib/libv8/v8/src/compiler.h +312 -0
  114. data/lib/libv8/v8/src/contexts.cc +347 -0
  115. data/lib/libv8/v8/src/contexts.h +391 -0
  116. data/lib/libv8/v8/src/conversions-inl.h +106 -0
  117. data/lib/libv8/v8/src/conversions.cc +1131 -0
  118. data/lib/libv8/v8/src/conversions.h +135 -0
  119. data/lib/libv8/v8/src/counters.cc +93 -0
  120. data/lib/libv8/v8/src/counters.h +254 -0
  121. data/lib/libv8/v8/src/cpu-profiler-inl.h +101 -0
  122. data/lib/libv8/v8/src/cpu-profiler.cc +609 -0
  123. data/lib/libv8/v8/src/cpu-profiler.h +302 -0
  124. data/lib/libv8/v8/src/cpu.h +69 -0
  125. data/lib/libv8/v8/src/d8-debug.cc +367 -0
  126. data/lib/libv8/v8/src/d8-debug.h +158 -0
  127. data/lib/libv8/v8/src/d8-posix.cc +695 -0
  128. data/lib/libv8/v8/src/d8-readline.cc +130 -0
  129. data/lib/libv8/v8/src/d8-windows.cc +42 -0
  130. data/lib/libv8/v8/src/d8.cc +803 -0
  131. data/lib/libv8/v8/src/d8.gyp +91 -0
  132. data/lib/libv8/v8/src/d8.h +235 -0
  133. data/lib/libv8/v8/src/d8.js +2798 -0
  134. data/lib/libv8/v8/src/data-flow.cc +66 -0
  135. data/lib/libv8/v8/src/data-flow.h +205 -0
  136. data/lib/libv8/v8/src/date.js +1103 -0
  137. data/lib/libv8/v8/src/dateparser-inl.h +127 -0
  138. data/lib/libv8/v8/src/dateparser.cc +178 -0
  139. data/lib/libv8/v8/src/dateparser.h +266 -0
  140. data/lib/libv8/v8/src/debug-agent.cc +447 -0
  141. data/lib/libv8/v8/src/debug-agent.h +129 -0
  142. data/lib/libv8/v8/src/debug-debugger.js +2569 -0
  143. data/lib/libv8/v8/src/debug.cc +3165 -0
  144. data/lib/libv8/v8/src/debug.h +1057 -0
  145. data/lib/libv8/v8/src/deoptimizer.cc +1256 -0
  146. data/lib/libv8/v8/src/deoptimizer.h +602 -0
  147. data/lib/libv8/v8/src/disasm.h +80 -0
  148. data/lib/libv8/v8/src/disassembler.cc +343 -0
  149. data/lib/libv8/v8/src/disassembler.h +58 -0
  150. data/lib/libv8/v8/src/diy-fp.cc +58 -0
  151. data/lib/libv8/v8/src/diy-fp.h +117 -0
  152. data/lib/libv8/v8/src/double.h +238 -0
  153. data/lib/libv8/v8/src/dtoa.cc +103 -0
  154. data/lib/libv8/v8/src/dtoa.h +85 -0
  155. data/lib/libv8/v8/src/execution.cc +849 -0
  156. data/lib/libv8/v8/src/execution.h +297 -0
  157. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +250 -0
  158. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +89 -0
  159. data/lib/libv8/v8/src/extensions/experimental/collator.cc +218 -0
  160. data/lib/libv8/v8/src/extensions/experimental/collator.h +69 -0
  161. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +94 -0
  162. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +78 -0
  163. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +54 -0
  164. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +112 -0
  165. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +60 -0
  166. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +43 -0
  167. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +49 -0
  168. data/lib/libv8/v8/src/extensions/experimental/i18n.js +180 -0
  169. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +251 -0
  170. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +95 -0
  171. data/lib/libv8/v8/src/extensions/externalize-string-extension.cc +141 -0
  172. data/lib/libv8/v8/src/extensions/externalize-string-extension.h +50 -0
  173. data/lib/libv8/v8/src/extensions/gc-extension.cc +58 -0
  174. data/lib/libv8/v8/src/extensions/gc-extension.h +49 -0
  175. data/lib/libv8/v8/src/factory.cc +1222 -0
  176. data/lib/libv8/v8/src/factory.h +442 -0
  177. data/lib/libv8/v8/src/fast-dtoa.cc +736 -0
  178. data/lib/libv8/v8/src/fast-dtoa.h +83 -0
  179. data/lib/libv8/v8/src/fixed-dtoa.cc +405 -0
  180. data/lib/libv8/v8/src/fixed-dtoa.h +55 -0
  181. data/lib/libv8/v8/src/flag-definitions.h +560 -0
  182. data/lib/libv8/v8/src/flags.cc +551 -0
  183. data/lib/libv8/v8/src/flags.h +79 -0
  184. data/lib/libv8/v8/src/frames-inl.h +247 -0
  185. data/lib/libv8/v8/src/frames.cc +1243 -0
  186. data/lib/libv8/v8/src/frames.h +870 -0
  187. data/lib/libv8/v8/src/full-codegen.cc +1374 -0
  188. data/lib/libv8/v8/src/full-codegen.h +771 -0
  189. data/lib/libv8/v8/src/func-name-inferrer.cc +92 -0
  190. data/lib/libv8/v8/src/func-name-inferrer.h +111 -0
  191. data/lib/libv8/v8/src/gdb-jit.cc +1555 -0
  192. data/lib/libv8/v8/src/gdb-jit.h +143 -0
  193. data/lib/libv8/v8/src/global-handles.cc +665 -0
  194. data/lib/libv8/v8/src/global-handles.h +284 -0
  195. data/lib/libv8/v8/src/globals.h +325 -0
  196. data/lib/libv8/v8/src/handles-inl.h +177 -0
  197. data/lib/libv8/v8/src/handles.cc +987 -0
  198. data/lib/libv8/v8/src/handles.h +382 -0
  199. data/lib/libv8/v8/src/hashmap.cc +230 -0
  200. data/lib/libv8/v8/src/hashmap.h +123 -0
  201. data/lib/libv8/v8/src/heap-inl.h +704 -0
  202. data/lib/libv8/v8/src/heap-profiler.cc +1173 -0
  203. data/lib/libv8/v8/src/heap-profiler.h +397 -0
  204. data/lib/libv8/v8/src/heap.cc +5930 -0
  205. data/lib/libv8/v8/src/heap.h +2268 -0
  206. data/lib/libv8/v8/src/hydrogen-instructions.cc +1769 -0
  207. data/lib/libv8/v8/src/hydrogen-instructions.h +3971 -0
  208. data/lib/libv8/v8/src/hydrogen.cc +6239 -0
  209. data/lib/libv8/v8/src/hydrogen.h +1202 -0
  210. data/lib/libv8/v8/src/ia32/assembler-ia32-inl.h +446 -0
  211. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +2487 -0
  212. data/lib/libv8/v8/src/ia32/assembler-ia32.h +1144 -0
  213. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +1621 -0
  214. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +6198 -0
  215. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +517 -0
  216. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +265 -0
  217. data/lib/libv8/v8/src/ia32/codegen-ia32.h +79 -0
  218. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +88 -0
  219. data/lib/libv8/v8/src/ia32/debug-ia32.cc +312 -0
  220. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +774 -0
  221. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +1628 -0
  222. data/lib/libv8/v8/src/ia32/frames-ia32.cc +45 -0
  223. data/lib/libv8/v8/src/ia32/frames-ia32.h +142 -0
  224. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +4338 -0
  225. data/lib/libv8/v8/src/ia32/ic-ia32.cc +1597 -0
  226. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +4461 -0
  227. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +375 -0
  228. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +475 -0
  229. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  230. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +2261 -0
  231. data/lib/libv8/v8/src/ia32/lithium-ia32.h +2396 -0
  232. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +2136 -0
  233. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +775 -0
  234. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +1263 -0
  235. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
  236. data/lib/libv8/v8/src/ia32/simulator-ia32.cc +30 -0
  237. data/lib/libv8/v8/src/ia32/simulator-ia32.h +74 -0
  238. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +3847 -0
  239. data/lib/libv8/v8/src/ic-inl.h +130 -0
  240. data/lib/libv8/v8/src/ic.cc +2577 -0
  241. data/lib/libv8/v8/src/ic.h +736 -0
  242. data/lib/libv8/v8/src/inspector.cc +63 -0
  243. data/lib/libv8/v8/src/inspector.h +62 -0
  244. data/lib/libv8/v8/src/interpreter-irregexp.cc +659 -0
  245. data/lib/libv8/v8/src/interpreter-irregexp.h +49 -0
  246. data/lib/libv8/v8/src/isolate-inl.h +50 -0
  247. data/lib/libv8/v8/src/isolate.cc +1869 -0
  248. data/lib/libv8/v8/src/isolate.h +1382 -0
  249. data/lib/libv8/v8/src/json-parser.cc +504 -0
  250. data/lib/libv8/v8/src/json-parser.h +161 -0
  251. data/lib/libv8/v8/src/json.js +342 -0
  252. data/lib/libv8/v8/src/jsregexp.cc +5385 -0
  253. data/lib/libv8/v8/src/jsregexp.h +1492 -0
  254. data/lib/libv8/v8/src/list-inl.h +212 -0
  255. data/lib/libv8/v8/src/list.h +174 -0
  256. data/lib/libv8/v8/src/lithium-allocator-inl.h +142 -0
  257. data/lib/libv8/v8/src/lithium-allocator.cc +2123 -0
  258. data/lib/libv8/v8/src/lithium-allocator.h +630 -0
  259. data/lib/libv8/v8/src/lithium.cc +190 -0
  260. data/lib/libv8/v8/src/lithium.h +597 -0
  261. data/lib/libv8/v8/src/liveedit-debugger.js +1082 -0
  262. data/lib/libv8/v8/src/liveedit.cc +1691 -0
  263. data/lib/libv8/v8/src/liveedit.h +180 -0
  264. data/lib/libv8/v8/src/liveobjectlist-inl.h +126 -0
  265. data/lib/libv8/v8/src/liveobjectlist.cc +2589 -0
  266. data/lib/libv8/v8/src/liveobjectlist.h +322 -0
  267. data/lib/libv8/v8/src/log-inl.h +59 -0
  268. data/lib/libv8/v8/src/log-utils.cc +428 -0
  269. data/lib/libv8/v8/src/log-utils.h +231 -0
  270. data/lib/libv8/v8/src/log.cc +1993 -0
  271. data/lib/libv8/v8/src/log.h +476 -0
  272. data/lib/libv8/v8/src/macro-assembler.h +120 -0
  273. data/lib/libv8/v8/src/macros.py +178 -0
  274. data/lib/libv8/v8/src/mark-compact.cc +3143 -0
  275. data/lib/libv8/v8/src/mark-compact.h +506 -0
  276. data/lib/libv8/v8/src/math.js +264 -0
  277. data/lib/libv8/v8/src/messages.cc +179 -0
  278. data/lib/libv8/v8/src/messages.h +113 -0
  279. data/lib/libv8/v8/src/messages.js +1096 -0
  280. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +312 -0
  281. data/lib/libv8/v8/src/mips/assembler-mips.cc +1960 -0
  282. data/lib/libv8/v8/src/mips/assembler-mips.h +1138 -0
  283. data/lib/libv8/v8/src/mips/builtins-mips.cc +1628 -0
  284. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +6656 -0
  285. data/lib/libv8/v8/src/mips/code-stubs-mips.h +682 -0
  286. data/lib/libv8/v8/src/mips/codegen-mips.cc +52 -0
  287. data/lib/libv8/v8/src/mips/codegen-mips.h +98 -0
  288. data/lib/libv8/v8/src/mips/constants-mips.cc +352 -0
  289. data/lib/libv8/v8/src/mips/constants-mips.h +739 -0
  290. data/lib/libv8/v8/src/mips/cpu-mips.cc +96 -0
  291. data/lib/libv8/v8/src/mips/debug-mips.cc +308 -0
  292. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +91 -0
  293. data/lib/libv8/v8/src/mips/disasm-mips.cc +1050 -0
  294. data/lib/libv8/v8/src/mips/frames-mips.cc +47 -0
  295. data/lib/libv8/v8/src/mips/frames-mips.h +219 -0
  296. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +4388 -0
  297. data/lib/libv8/v8/src/mips/ic-mips.cc +1580 -0
  298. data/lib/libv8/v8/src/mips/lithium-codegen-mips.h +65 -0
  299. data/lib/libv8/v8/src/mips/lithium-mips.h +307 -0
  300. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +4056 -0
  301. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +1214 -0
  302. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +1251 -0
  303. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +252 -0
  304. data/lib/libv8/v8/src/mips/simulator-mips.cc +2621 -0
  305. data/lib/libv8/v8/src/mips/simulator-mips.h +401 -0
  306. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +4285 -0
  307. data/lib/libv8/v8/src/mirror-debugger.js +2382 -0
  308. data/lib/libv8/v8/src/mksnapshot.cc +328 -0
  309. data/lib/libv8/v8/src/natives.h +64 -0
  310. data/lib/libv8/v8/src/objects-debug.cc +738 -0
  311. data/lib/libv8/v8/src/objects-inl.h +4323 -0
  312. data/lib/libv8/v8/src/objects-printer.cc +829 -0
  313. data/lib/libv8/v8/src/objects-visiting.cc +148 -0
  314. data/lib/libv8/v8/src/objects-visiting.h +424 -0
  315. data/lib/libv8/v8/src/objects.cc +10585 -0
  316. data/lib/libv8/v8/src/objects.h +6838 -0
  317. data/lib/libv8/v8/src/parser.cc +4997 -0
  318. data/lib/libv8/v8/src/parser.h +765 -0
  319. data/lib/libv8/v8/src/platform-cygwin.cc +779 -0
  320. data/lib/libv8/v8/src/platform-freebsd.cc +826 -0
  321. data/lib/libv8/v8/src/platform-linux.cc +1149 -0
  322. data/lib/libv8/v8/src/platform-macos.cc +830 -0
  323. data/lib/libv8/v8/src/platform-nullos.cc +479 -0
  324. data/lib/libv8/v8/src/platform-openbsd.cc +640 -0
  325. data/lib/libv8/v8/src/platform-posix.cc +424 -0
  326. data/lib/libv8/v8/src/platform-solaris.cc +762 -0
  327. data/lib/libv8/v8/src/platform-tls-mac.h +62 -0
  328. data/lib/libv8/v8/src/platform-tls-win32.h +62 -0
  329. data/lib/libv8/v8/src/platform-tls.h +50 -0
  330. data/lib/libv8/v8/src/platform-win32.cc +2021 -0
  331. data/lib/libv8/v8/src/platform.h +667 -0
  332. data/lib/libv8/v8/src/preparse-data-format.h +62 -0
  333. data/lib/libv8/v8/src/preparse-data.cc +183 -0
  334. data/lib/libv8/v8/src/preparse-data.h +225 -0
  335. data/lib/libv8/v8/src/preparser-api.cc +220 -0
  336. data/lib/libv8/v8/src/preparser.cc +1450 -0
  337. data/lib/libv8/v8/src/preparser.h +493 -0
  338. data/lib/libv8/v8/src/prettyprinter.cc +1493 -0
  339. data/lib/libv8/v8/src/prettyprinter.h +223 -0
  340. data/lib/libv8/v8/src/profile-generator-inl.h +128 -0
  341. data/lib/libv8/v8/src/profile-generator.cc +3098 -0
  342. data/lib/libv8/v8/src/profile-generator.h +1126 -0
  343. data/lib/libv8/v8/src/property.cc +105 -0
  344. data/lib/libv8/v8/src/property.h +365 -0
  345. data/lib/libv8/v8/src/proxy.js +83 -0
  346. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp-inl.h +78 -0
  347. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.cc +471 -0
  348. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.h +142 -0
  349. data/lib/libv8/v8/src/regexp-macro-assembler-tracer.cc +373 -0
  350. data/lib/libv8/v8/src/regexp-macro-assembler-tracer.h +104 -0
  351. data/lib/libv8/v8/src/regexp-macro-assembler.cc +267 -0
  352. data/lib/libv8/v8/src/regexp-macro-assembler.h +243 -0
  353. data/lib/libv8/v8/src/regexp-stack.cc +111 -0
  354. data/lib/libv8/v8/src/regexp-stack.h +147 -0
  355. data/lib/libv8/v8/src/regexp.js +483 -0
  356. data/lib/libv8/v8/src/rewriter.cc +360 -0
  357. data/lib/libv8/v8/src/rewriter.h +50 -0
  358. data/lib/libv8/v8/src/runtime-profiler.cc +489 -0
  359. data/lib/libv8/v8/src/runtime-profiler.h +201 -0
  360. data/lib/libv8/v8/src/runtime.cc +12227 -0
  361. data/lib/libv8/v8/src/runtime.h +652 -0
  362. data/lib/libv8/v8/src/runtime.js +649 -0
  363. data/lib/libv8/v8/src/safepoint-table.cc +256 -0
  364. data/lib/libv8/v8/src/safepoint-table.h +270 -0
  365. data/lib/libv8/v8/src/scanner-base.cc +952 -0
  366. data/lib/libv8/v8/src/scanner-base.h +670 -0
  367. data/lib/libv8/v8/src/scanner.cc +345 -0
  368. data/lib/libv8/v8/src/scanner.h +146 -0
  369. data/lib/libv8/v8/src/scopeinfo.cc +646 -0
  370. data/lib/libv8/v8/src/scopeinfo.h +254 -0
  371. data/lib/libv8/v8/src/scopes.cc +1150 -0
  372. data/lib/libv8/v8/src/scopes.h +507 -0
  373. data/lib/libv8/v8/src/serialize.cc +1574 -0
  374. data/lib/libv8/v8/src/serialize.h +589 -0
  375. data/lib/libv8/v8/src/shell.h +55 -0
  376. data/lib/libv8/v8/src/simulator.h +43 -0
  377. data/lib/libv8/v8/src/small-pointer-list.h +163 -0
  378. data/lib/libv8/v8/src/smart-pointer.h +109 -0
  379. data/lib/libv8/v8/src/snapshot-common.cc +83 -0
  380. data/lib/libv8/v8/src/snapshot-empty.cc +54 -0
  381. data/lib/libv8/v8/src/snapshot.h +91 -0
  382. data/lib/libv8/v8/src/spaces-inl.h +529 -0
  383. data/lib/libv8/v8/src/spaces.cc +3145 -0
  384. data/lib/libv8/v8/src/spaces.h +2369 -0
  385. data/lib/libv8/v8/src/splay-tree-inl.h +310 -0
  386. data/lib/libv8/v8/src/splay-tree.h +205 -0
  387. data/lib/libv8/v8/src/string-search.cc +41 -0
  388. data/lib/libv8/v8/src/string-search.h +568 -0
  389. data/lib/libv8/v8/src/string-stream.cc +592 -0
  390. data/lib/libv8/v8/src/string-stream.h +191 -0
  391. data/lib/libv8/v8/src/string.js +994 -0
  392. data/lib/libv8/v8/src/strtod.cc +440 -0
  393. data/lib/libv8/v8/src/strtod.h +40 -0
  394. data/lib/libv8/v8/src/stub-cache.cc +1965 -0
  395. data/lib/libv8/v8/src/stub-cache.h +924 -0
  396. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +3925 -0
  397. data/lib/libv8/v8/src/token.cc +63 -0
  398. data/lib/libv8/v8/src/token.h +288 -0
  399. data/lib/libv8/v8/src/type-info.cc +507 -0
  400. data/lib/libv8/v8/src/type-info.h +272 -0
  401. data/lib/libv8/v8/src/unbound-queue-inl.h +95 -0
  402. data/lib/libv8/v8/src/unbound-queue.h +69 -0
  403. data/lib/libv8/v8/src/unicode-inl.h +238 -0
  404. data/lib/libv8/v8/src/unicode.cc +1624 -0
  405. data/lib/libv8/v8/src/unicode.h +280 -0
  406. data/lib/libv8/v8/src/uri.js +408 -0
  407. data/lib/libv8/v8/src/utils-inl.h +48 -0
  408. data/lib/libv8/v8/src/utils.cc +371 -0
  409. data/lib/libv8/v8/src/utils.h +800 -0
  410. data/lib/libv8/v8/src/v8-counters.cc +62 -0
  411. data/lib/libv8/v8/src/v8-counters.h +314 -0
  412. data/lib/libv8/v8/src/v8.cc +213 -0
  413. data/lib/libv8/v8/src/v8.h +131 -0
  414. data/lib/libv8/v8/src/v8checks.h +64 -0
  415. data/lib/libv8/v8/src/v8dll-main.cc +44 -0
  416. data/lib/libv8/v8/src/v8globals.h +512 -0
  417. data/lib/libv8/v8/src/v8memory.h +82 -0
  418. data/lib/libv8/v8/src/v8natives.js +1310 -0
  419. data/lib/libv8/v8/src/v8preparserdll-main.cc +39 -0
  420. data/lib/libv8/v8/src/v8threads.cc +464 -0
  421. data/lib/libv8/v8/src/v8threads.h +165 -0
  422. data/lib/libv8/v8/src/v8utils.h +319 -0
  423. data/lib/libv8/v8/src/variables.cc +114 -0
  424. data/lib/libv8/v8/src/variables.h +167 -0
  425. data/lib/libv8/v8/src/version.cc +116 -0
  426. data/lib/libv8/v8/src/version.h +68 -0
  427. data/lib/libv8/v8/src/vm-state-inl.h +138 -0
  428. data/lib/libv8/v8/src/vm-state.h +71 -0
  429. data/lib/libv8/v8/src/win32-headers.h +96 -0
  430. data/lib/libv8/v8/src/x64/assembler-x64-inl.h +462 -0
  431. data/lib/libv8/v8/src/x64/assembler-x64.cc +3027 -0
  432. data/lib/libv8/v8/src/x64/assembler-x64.h +1633 -0
  433. data/lib/libv8/v8/src/x64/builtins-x64.cc +1520 -0
  434. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +5132 -0
  435. data/lib/libv8/v8/src/x64/code-stubs-x64.h +514 -0
  436. data/lib/libv8/v8/src/x64/codegen-x64.cc +146 -0
  437. data/lib/libv8/v8/src/x64/codegen-x64.h +76 -0
  438. data/lib/libv8/v8/src/x64/cpu-x64.cc +88 -0
  439. data/lib/libv8/v8/src/x64/debug-x64.cc +319 -0
  440. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +815 -0
  441. data/lib/libv8/v8/src/x64/disasm-x64.cc +1832 -0
  442. data/lib/libv8/v8/src/x64/frames-x64.cc +45 -0
  443. data/lib/libv8/v8/src/x64/frames-x64.h +130 -0
  444. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +4318 -0
  445. data/lib/libv8/v8/src/x64/ic-x64.cc +1608 -0
  446. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +4267 -0
  447. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +367 -0
  448. data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  449. data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.h +74 -0
  450. data/lib/libv8/v8/src/x64/lithium-x64.cc +2202 -0
  451. data/lib/libv8/v8/src/x64/lithium-x64.h +2333 -0
  452. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +3745 -0
  453. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +1290 -0
  454. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
  455. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
  456. data/lib/libv8/v8/src/x64/simulator-x64.cc +27 -0
  457. data/lib/libv8/v8/src/x64/simulator-x64.h +72 -0
  458. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +3610 -0
  459. data/lib/libv8/v8/src/zone-inl.h +140 -0
  460. data/lib/libv8/v8/src/zone.cc +196 -0
  461. data/lib/libv8/v8/src/zone.h +240 -0
  462. data/lib/libv8/v8/tools/codemap.js +265 -0
  463. data/lib/libv8/v8/tools/consarray.js +93 -0
  464. data/lib/libv8/v8/tools/csvparser.js +78 -0
  465. data/lib/libv8/v8/tools/disasm.py +92 -0
  466. data/lib/libv8/v8/tools/freebsd-tick-processor +10 -0
  467. data/lib/libv8/v8/tools/gc-nvp-trace-processor.py +342 -0
  468. data/lib/libv8/v8/tools/gcmole/README +62 -0
  469. data/lib/libv8/v8/tools/gcmole/gccause.lua +60 -0
  470. data/lib/libv8/v8/tools/gcmole/gcmole.cc +1261 -0
  471. data/lib/libv8/v8/tools/gcmole/gcmole.lua +378 -0
  472. data/lib/libv8/v8/tools/generate-ten-powers.scm +286 -0
  473. data/lib/libv8/v8/tools/grokdump.py +841 -0
  474. data/lib/libv8/v8/tools/gyp/v8.gyp +995 -0
  475. data/lib/libv8/v8/tools/js2c.py +364 -0
  476. data/lib/libv8/v8/tools/jsmin.py +280 -0
  477. data/lib/libv8/v8/tools/linux-tick-processor +35 -0
  478. data/lib/libv8/v8/tools/ll_prof.py +942 -0
  479. data/lib/libv8/v8/tools/logreader.js +185 -0
  480. data/lib/libv8/v8/tools/mac-nm +18 -0
  481. data/lib/libv8/v8/tools/mac-tick-processor +6 -0
  482. data/lib/libv8/v8/tools/oom_dump/README +31 -0
  483. data/lib/libv8/v8/tools/oom_dump/SConstruct +42 -0
  484. data/lib/libv8/v8/tools/oom_dump/oom_dump.cc +288 -0
  485. data/lib/libv8/v8/tools/presubmit.py +305 -0
  486. data/lib/libv8/v8/tools/process-heap-prof.py +120 -0
  487. data/lib/libv8/v8/tools/profile.js +751 -0
  488. data/lib/libv8/v8/tools/profile_view.js +219 -0
  489. data/lib/libv8/v8/tools/run-valgrind.py +77 -0
  490. data/lib/libv8/v8/tools/splaytree.js +316 -0
  491. data/lib/libv8/v8/tools/stats-viewer.py +468 -0
  492. data/lib/libv8/v8/tools/test.py +1510 -0
  493. data/lib/libv8/v8/tools/tickprocessor-driver.js +59 -0
  494. data/lib/libv8/v8/tools/tickprocessor.js +877 -0
  495. data/lib/libv8/v8/tools/utils.py +96 -0
  496. data/lib/libv8/v8/tools/visual_studio/README.txt +12 -0
  497. data/lib/libv8/v8/tools/windows-tick-processor.bat +30 -0
  498. data/lib/libv8/version.rb +5 -0
  499. data/libv8.gemspec +36 -0
  500. metadata +578 -0
@@ -0,0 +1,431 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+
29
+ // Declares a Simulator for ARM instructions if we are not generating a native
30
+ // ARM binary. This Simulator allows us to run and debug ARM code generation on
31
+ // regular desktop machines.
32
+ // V8 calls into generated code by "calling" the CALL_GENERATED_CODE macro,
33
+ // which will start execution in the Simulator or forwards to the real entry
34
+ // on a ARM HW platform.
35
+
36
+ #ifndef V8_ARM_SIMULATOR_ARM_H_
37
+ #define V8_ARM_SIMULATOR_ARM_H_
38
+
39
+ #include "allocation.h"
40
+
41
+ #if !defined(USE_SIMULATOR)
42
+ // Running without a simulator on a native arm platform.
43
+
44
+ namespace v8 {
45
+ namespace internal {
46
+
47
+ // When running without a simulator we call the entry directly.
48
+ #define CALL_GENERATED_CODE(entry, p0, p1, p2, p3, p4) \
49
+ (entry(p0, p1, p2, p3, p4))
50
+
51
+ typedef int (*arm_regexp_matcher)(String*, int, const byte*, const byte*,
52
+ void*, int*, Address, int, Isolate*);
53
+
54
+
55
+ // Call the generated regexp code directly. The code at the entry address
56
+ // should act as a function matching the type arm_regexp_matcher.
57
+ // The fifth argument is a dummy that reserves the space used for
58
+ // the return address added by the ExitFrame in native calls.
59
+ #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
60
+ (FUNCTION_CAST<arm_regexp_matcher>(entry)( \
61
+ p0, p1, p2, p3, NULL, p4, p5, p6, p7))
62
+
63
+ #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
64
+ reinterpret_cast<TryCatch*>(try_catch_address)
65
+
66
+ // The stack limit beyond which we will throw stack overflow errors in
67
+ // generated code. Because generated code on arm uses the C stack, we
68
+ // just use the C stack limit.
69
+ class SimulatorStack : public v8::internal::AllStatic {
70
+ public:
71
+ static inline uintptr_t JsLimitFromCLimit(v8::internal::Isolate* isolate,
72
+ uintptr_t c_limit) {
73
+ USE(isolate);
74
+ return c_limit;
75
+ }
76
+
77
+ static inline uintptr_t RegisterCTryCatch(uintptr_t try_catch_address) {
78
+ return try_catch_address;
79
+ }
80
+
81
+ static inline void UnregisterCTryCatch() { }
82
+ };
83
+
84
+ } } // namespace v8::internal
85
+
86
+ #else // !defined(USE_SIMULATOR)
87
+ // Running with a simulator.
88
+
89
+ #include "constants-arm.h"
90
+ #include "hashmap.h"
91
+ #include "assembler.h"
92
+
93
+ namespace v8 {
94
+ namespace internal {
95
+
96
+ class CachePage {
97
+ public:
98
+ static const int LINE_VALID = 0;
99
+ static const int LINE_INVALID = 1;
100
+
101
+ static const int kPageShift = 12;
102
+ static const int kPageSize = 1 << kPageShift;
103
+ static const int kPageMask = kPageSize - 1;
104
+ static const int kLineShift = 2; // The cache line is only 4 bytes right now.
105
+ static const int kLineLength = 1 << kLineShift;
106
+ static const int kLineMask = kLineLength - 1;
107
+
108
+ CachePage() {
109
+ memset(&validity_map_, LINE_INVALID, sizeof(validity_map_));
110
+ }
111
+
112
+ char* ValidityByte(int offset) {
113
+ return &validity_map_[offset >> kLineShift];
114
+ }
115
+
116
+ char* CachedData(int offset) {
117
+ return &data_[offset];
118
+ }
119
+
120
+ private:
121
+ char data_[kPageSize]; // The cached data.
122
+ static const int kValidityMapSize = kPageSize >> kLineShift;
123
+ char validity_map_[kValidityMapSize]; // One byte per line.
124
+ };
125
+
126
+
127
+ class Simulator {
128
+ public:
129
+ friend class ArmDebugger;
130
+ enum Register {
131
+ no_reg = -1,
132
+ r0 = 0, r1, r2, r3, r4, r5, r6, r7,
133
+ r8, r9, r10, r11, r12, r13, r14, r15,
134
+ num_registers,
135
+ sp = 13,
136
+ lr = 14,
137
+ pc = 15,
138
+ s0 = 0, s1, s2, s3, s4, s5, s6, s7,
139
+ s8, s9, s10, s11, s12, s13, s14, s15,
140
+ s16, s17, s18, s19, s20, s21, s22, s23,
141
+ s24, s25, s26, s27, s28, s29, s30, s31,
142
+ num_s_registers = 32,
143
+ d0 = 0, d1, d2, d3, d4, d5, d6, d7,
144
+ d8, d9, d10, d11, d12, d13, d14, d15,
145
+ num_d_registers = 16
146
+ };
147
+
148
+ explicit Simulator(Isolate* isolate);
149
+ ~Simulator();
150
+
151
+ // The currently executing Simulator instance. Potentially there can be one
152
+ // for each native thread.
153
+ static Simulator* current(v8::internal::Isolate* isolate);
154
+
155
+ // Accessors for register state. Reading the pc value adheres to the ARM
156
+ // architecture specification and is off by a 8 from the currently executing
157
+ // instruction.
158
+ void set_register(int reg, int32_t value);
159
+ int32_t get_register(int reg) const;
160
+ double get_double_from_register_pair(int reg);
161
+ void set_dw_register(int dreg, const int* dbl);
162
+
163
+ // Support for VFP.
164
+ void set_s_register(int reg, unsigned int value);
165
+ unsigned int get_s_register(int reg) const;
166
+ void set_d_register_from_double(int dreg, const double& dbl);
167
+ double get_double_from_d_register(int dreg);
168
+ void set_s_register_from_float(int sreg, const float dbl);
169
+ float get_float_from_s_register(int sreg);
170
+ void set_s_register_from_sinteger(int reg, const int value);
171
+ int get_sinteger_from_s_register(int reg);
172
+
173
+ // Special case of set_register and get_register to access the raw PC value.
174
+ void set_pc(int32_t value);
175
+ int32_t get_pc() const;
176
+
177
+ // Accessor to the internal simulator stack area.
178
+ uintptr_t StackLimit() const;
179
+
180
+ // Executes ARM instructions until the PC reaches end_sim_pc.
181
+ void Execute();
182
+
183
+ // Call on program start.
184
+ static void Initialize(Isolate* isolate);
185
+
186
+ // V8 generally calls into generated JS code with 5 parameters and into
187
+ // generated RegExp code with 7 parameters. This is a convenience function,
188
+ // which sets up the simulator state and grabs the result on return.
189
+ int32_t Call(byte* entry, int argument_count, ...);
190
+
191
+ // Push an address onto the JS stack.
192
+ uintptr_t PushAddress(uintptr_t address);
193
+
194
+ // Pop an address from the JS stack.
195
+ uintptr_t PopAddress();
196
+
197
+ // ICache checking.
198
+ static void FlushICache(v8::internal::HashMap* i_cache, void* start,
199
+ size_t size);
200
+
201
+ // Returns true if pc register contains one of the 'special_values' defined
202
+ // below (bad_lr, end_sim_pc).
203
+ bool has_bad_pc() const;
204
+
205
+ // EABI variant for double arguments in use.
206
+ bool use_eabi_hardfloat() {
207
+ #if USE_EABI_HARDFLOAT
208
+ return true;
209
+ #else
210
+ return false;
211
+ #endif
212
+ }
213
+
214
+ private:
215
+ enum special_values {
216
+ // Known bad pc value to ensure that the simulator does not execute
217
+ // without being properly setup.
218
+ bad_lr = -1,
219
+ // A pc value used to signal the simulator to stop execution. Generally
220
+ // the lr is set to this value on transition from native C code to
221
+ // simulated execution, so that the simulator can "return" to the native
222
+ // C code.
223
+ end_sim_pc = -2
224
+ };
225
+
226
+ // Unsupported instructions use Format to print an error and stop execution.
227
+ void Format(Instruction* instr, const char* format);
228
+
229
+ // Checks if the current instruction should be executed based on its
230
+ // condition bits.
231
+ bool ConditionallyExecute(Instruction* instr);
232
+
233
+ // Helper functions to set the conditional flags in the architecture state.
234
+ void SetNZFlags(int32_t val);
235
+ void SetCFlag(bool val);
236
+ void SetVFlag(bool val);
237
+ bool CarryFrom(int32_t left, int32_t right, int32_t carry = 0);
238
+ bool BorrowFrom(int32_t left, int32_t right);
239
+ bool OverflowFrom(int32_t alu_out,
240
+ int32_t left,
241
+ int32_t right,
242
+ bool addition);
243
+
244
+ inline int GetCarry() {
245
+ return c_flag_ ? 1 : 0;
246
+ };
247
+
248
+ // Support for VFP.
249
+ void Compute_FPSCR_Flags(double val1, double val2);
250
+ void Copy_FPSCR_to_APSR();
251
+
252
+ // Helper functions to decode common "addressing" modes
253
+ int32_t GetShiftRm(Instruction* instr, bool* carry_out);
254
+ int32_t GetImm(Instruction* instr, bool* carry_out);
255
+ void ProcessPUW(Instruction* instr,
256
+ int num_regs,
257
+ int operand_size,
258
+ intptr_t* start_address,
259
+ intptr_t* end_address);
260
+ void HandleRList(Instruction* instr, bool load);
261
+ void HandleVList(Instruction* inst);
262
+ void SoftwareInterrupt(Instruction* instr);
263
+
264
+ // Stop helper functions.
265
+ inline bool isStopInstruction(Instruction* instr);
266
+ inline bool isWatchedStop(uint32_t bkpt_code);
267
+ inline bool isEnabledStop(uint32_t bkpt_code);
268
+ inline void EnableStop(uint32_t bkpt_code);
269
+ inline void DisableStop(uint32_t bkpt_code);
270
+ inline void IncreaseStopCounter(uint32_t bkpt_code);
271
+ void PrintStopInfo(uint32_t code);
272
+
273
+ // Read and write memory.
274
+ inline uint8_t ReadBU(int32_t addr);
275
+ inline int8_t ReadB(int32_t addr);
276
+ inline void WriteB(int32_t addr, uint8_t value);
277
+ inline void WriteB(int32_t addr, int8_t value);
278
+
279
+ inline uint16_t ReadHU(int32_t addr, Instruction* instr);
280
+ inline int16_t ReadH(int32_t addr, Instruction* instr);
281
+ // Note: Overloaded on the sign of the value.
282
+ inline void WriteH(int32_t addr, uint16_t value, Instruction* instr);
283
+ inline void WriteH(int32_t addr, int16_t value, Instruction* instr);
284
+
285
+ inline int ReadW(int32_t addr, Instruction* instr);
286
+ inline void WriteW(int32_t addr, int value, Instruction* instr);
287
+
288
+ int32_t* ReadDW(int32_t addr);
289
+ void WriteDW(int32_t addr, int32_t value1, int32_t value2);
290
+
291
+ // Executing is handled based on the instruction type.
292
+ // Both type 0 and type 1 rolled into one.
293
+ void DecodeType01(Instruction* instr);
294
+ void DecodeType2(Instruction* instr);
295
+ void DecodeType3(Instruction* instr);
296
+ void DecodeType4(Instruction* instr);
297
+ void DecodeType5(Instruction* instr);
298
+ void DecodeType6(Instruction* instr);
299
+ void DecodeType7(Instruction* instr);
300
+
301
+ // Support for VFP.
302
+ void DecodeTypeVFP(Instruction* instr);
303
+ void DecodeType6CoprocessorIns(Instruction* instr);
304
+
305
+ void DecodeVMOVBetweenCoreAndSinglePrecisionRegisters(Instruction* instr);
306
+ void DecodeVCMP(Instruction* instr);
307
+ void DecodeVCVTBetweenDoubleAndSingle(Instruction* instr);
308
+ void DecodeVCVTBetweenFloatingPointAndInteger(Instruction* instr);
309
+
310
+ // Executes one instruction.
311
+ void InstructionDecode(Instruction* instr);
312
+
313
+ // ICache.
314
+ static void CheckICache(v8::internal::HashMap* i_cache, Instruction* instr);
315
+ static void FlushOnePage(v8::internal::HashMap* i_cache, intptr_t start,
316
+ int size);
317
+ static CachePage* GetCachePage(v8::internal::HashMap* i_cache, void* page);
318
+
319
+ // Runtime call support.
320
+ static void* RedirectExternalReference(
321
+ void* external_function,
322
+ v8::internal::ExternalReference::Type type);
323
+
324
+ // For use in calls that take double value arguments.
325
+ void GetFpArgs(double* x, double* y);
326
+ void GetFpArgs(double* x);
327
+ void GetFpArgs(double* x, int32_t* y);
328
+ void SetFpResult(const double& result);
329
+ void TrashCallerSaveRegisters();
330
+
331
+ // Architecture state.
332
+ // Saturating instructions require a Q flag to indicate saturation.
333
+ // There is currently no way to read the CPSR directly, and thus read the Q
334
+ // flag, so this is left unimplemented.
335
+ int32_t registers_[16];
336
+ bool n_flag_;
337
+ bool z_flag_;
338
+ bool c_flag_;
339
+ bool v_flag_;
340
+
341
+ // VFP architecture state.
342
+ unsigned int vfp_register[num_s_registers];
343
+ bool n_flag_FPSCR_;
344
+ bool z_flag_FPSCR_;
345
+ bool c_flag_FPSCR_;
346
+ bool v_flag_FPSCR_;
347
+
348
+ // VFP rounding mode. See ARM DDI 0406B Page A2-29.
349
+ VFPRoundingMode FPSCR_rounding_mode_;
350
+
351
+ // VFP FP exception flags architecture state.
352
+ bool inv_op_vfp_flag_;
353
+ bool div_zero_vfp_flag_;
354
+ bool overflow_vfp_flag_;
355
+ bool underflow_vfp_flag_;
356
+ bool inexact_vfp_flag_;
357
+
358
+ // Simulator support.
359
+ char* stack_;
360
+ bool pc_modified_;
361
+ int icount_;
362
+
363
+ // Icache simulation
364
+ v8::internal::HashMap* i_cache_;
365
+
366
+ // Registered breakpoints.
367
+ Instruction* break_pc_;
368
+ Instr break_instr_;
369
+
370
+ v8::internal::Isolate* isolate_;
371
+
372
+ // A stop is watched if its code is less than kNumOfWatchedStops.
373
+ // Only watched stops support enabling/disabling and the counter feature.
374
+ static const uint32_t kNumOfWatchedStops = 256;
375
+
376
+ // Breakpoint is disabled if bit 31 is set.
377
+ static const uint32_t kStopDisabledBit = 1 << 31;
378
+
379
+ // A stop is enabled, meaning the simulator will stop when meeting the
380
+ // instruction, if bit 31 of watched_stops[code].count is unset.
381
+ // The value watched_stops[code].count & ~(1 << 31) indicates how many times
382
+ // the breakpoint was hit or gone through.
383
+ struct StopCountAndDesc {
384
+ uint32_t count;
385
+ char* desc;
386
+ };
387
+ StopCountAndDesc watched_stops[kNumOfWatchedStops];
388
+ };
389
+
390
+
391
+ // When running with the simulator transition into simulated execution at this
392
+ // point.
393
+ #define CALL_GENERATED_CODE(entry, p0, p1, p2, p3, p4) \
394
+ reinterpret_cast<Object*>(Simulator::current(Isolate::Current())->Call( \
395
+ FUNCTION_ADDR(entry), 5, p0, p1, p2, p3, p4))
396
+
397
+ #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
398
+ Simulator::current(Isolate::Current())->Call( \
399
+ entry, 9, p0, p1, p2, p3, NULL, p4, p5, p6, p7)
400
+
401
+ #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
402
+ try_catch_address == NULL ? \
403
+ NULL : *(reinterpret_cast<TryCatch**>(try_catch_address))
404
+
405
+
406
+ // The simulator has its own stack. Thus it has a different stack limit from
407
+ // the C-based native code. Setting the c_limit to indicate a very small
408
+ // stack cause stack overflow errors, since the simulator ignores the input.
409
+ // This is unlikely to be an issue in practice, though it might cause testing
410
+ // trouble down the line.
411
+ class SimulatorStack : public v8::internal::AllStatic {
412
+ public:
413
+ static inline uintptr_t JsLimitFromCLimit(v8::internal::Isolate* isolate,
414
+ uintptr_t c_limit) {
415
+ return Simulator::current(isolate)->StackLimit();
416
+ }
417
+
418
+ static inline uintptr_t RegisterCTryCatch(uintptr_t try_catch_address) {
419
+ Simulator* sim = Simulator::current(Isolate::Current());
420
+ return sim->PushAddress(try_catch_address);
421
+ }
422
+
423
+ static inline void UnregisterCTryCatch() {
424
+ Simulator::current(Isolate::Current())->PopAddress();
425
+ }
426
+ };
427
+
428
+ } } // namespace v8::internal
429
+
430
+ #endif // !defined(USE_SIMULATOR)
431
+ #endif // V8_ARM_SIMULATOR_ARM_H_
@@ -0,0 +1,4243 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_ARM)
31
+
32
+ #include "ic-inl.h"
33
+ #include "codegen.h"
34
+ #include "stub-cache.h"
35
+
36
+ namespace v8 {
37
+ namespace internal {
38
+
39
+ #define __ ACCESS_MASM(masm)
40
+
41
+
42
+ static void ProbeTable(Isolate* isolate,
43
+ MacroAssembler* masm,
44
+ Code::Flags flags,
45
+ StubCache::Table table,
46
+ Register name,
47
+ Register offset,
48
+ Register scratch,
49
+ Register scratch2) {
50
+ ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
51
+ ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
52
+
53
+ uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
54
+ uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
55
+
56
+ // Check the relative positions of the address fields.
57
+ ASSERT(value_off_addr > key_off_addr);
58
+ ASSERT((value_off_addr - key_off_addr) % 4 == 0);
59
+ ASSERT((value_off_addr - key_off_addr) < (256 * 4));
60
+
61
+ Label miss;
62
+ Register offsets_base_addr = scratch;
63
+
64
+ // Check that the key in the entry matches the name.
65
+ __ mov(offsets_base_addr, Operand(key_offset));
66
+ __ ldr(ip, MemOperand(offsets_base_addr, offset, LSL, 1));
67
+ __ cmp(name, ip);
68
+ __ b(ne, &miss);
69
+
70
+ // Get the code entry from the cache.
71
+ __ add(offsets_base_addr, offsets_base_addr,
72
+ Operand(value_off_addr - key_off_addr));
73
+ __ ldr(scratch2, MemOperand(offsets_base_addr, offset, LSL, 1));
74
+
75
+ // Check that the flags match what we're looking for.
76
+ __ ldr(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
77
+ __ bic(scratch2, scratch2, Operand(Code::kFlagsNotUsedInLookup));
78
+ __ cmp(scratch2, Operand(flags));
79
+ __ b(ne, &miss);
80
+
81
+ // Re-load code entry from cache.
82
+ __ ldr(offset, MemOperand(offsets_base_addr, offset, LSL, 1));
83
+
84
+ // Jump to the first instruction in the code stub.
85
+ __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
86
+ __ Jump(offset);
87
+
88
+ // Miss: fall through.
89
+ __ bind(&miss);
90
+ }
91
+
92
+
93
+ // Helper function used to check that the dictionary doesn't contain
94
+ // the property. This function may return false negatives, so miss_label
95
+ // must always call a backup property check that is complete.
96
+ // This function is safe to call if the receiver has fast properties.
97
+ // Name must be a symbol and receiver must be a heap object.
98
+ MUST_USE_RESULT static MaybeObject* GenerateDictionaryNegativeLookup(
99
+ MacroAssembler* masm,
100
+ Label* miss_label,
101
+ Register receiver,
102
+ String* name,
103
+ Register scratch0,
104
+ Register scratch1) {
105
+ ASSERT(name->IsSymbol());
106
+ Counters* counters = masm->isolate()->counters();
107
+ __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
108
+ __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
109
+
110
+ Label done;
111
+
112
+ const int kInterceptorOrAccessCheckNeededMask =
113
+ (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
114
+
115
+ // Bail out if the receiver has a named interceptor or requires access checks.
116
+ Register map = scratch1;
117
+ __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
118
+ __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
119
+ __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
120
+ __ b(ne, miss_label);
121
+
122
+ // Check that receiver is a JSObject.
123
+ __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
124
+ __ cmp(scratch0, Operand(FIRST_JS_OBJECT_TYPE));
125
+ __ b(lt, miss_label);
126
+
127
+ // Load properties array.
128
+ Register properties = scratch0;
129
+ __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
130
+ // Check that the properties array is a dictionary.
131
+ __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
132
+ Register tmp = properties;
133
+ __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
134
+ __ cmp(map, tmp);
135
+ __ b(ne, miss_label);
136
+
137
+ // Restore the temporarily used register.
138
+ __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
139
+
140
+
141
+ MaybeObject* result = StringDictionaryLookupStub::GenerateNegativeLookup(
142
+ masm,
143
+ miss_label,
144
+ &done,
145
+ receiver,
146
+ properties,
147
+ name,
148
+ scratch1);
149
+ if (result->IsFailure()) return result;
150
+
151
+ __ bind(&done);
152
+ __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
153
+
154
+ return result;
155
+ }
156
+
157
+
158
+ void StubCache::GenerateProbe(MacroAssembler* masm,
159
+ Code::Flags flags,
160
+ Register receiver,
161
+ Register name,
162
+ Register scratch,
163
+ Register extra,
164
+ Register extra2) {
165
+ Isolate* isolate = masm->isolate();
166
+ Label miss;
167
+
168
+ // Make sure that code is valid. The shifting code relies on the
169
+ // entry size being 8.
170
+ ASSERT(sizeof(Entry) == 8);
171
+
172
+ // Make sure the flags does not name a specific type.
173
+ ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
174
+
175
+ // Make sure that there are no register conflicts.
176
+ ASSERT(!scratch.is(receiver));
177
+ ASSERT(!scratch.is(name));
178
+ ASSERT(!extra.is(receiver));
179
+ ASSERT(!extra.is(name));
180
+ ASSERT(!extra.is(scratch));
181
+ ASSERT(!extra2.is(receiver));
182
+ ASSERT(!extra2.is(name));
183
+ ASSERT(!extra2.is(scratch));
184
+ ASSERT(!extra2.is(extra));
185
+
186
+ // Check scratch, extra and extra2 registers are valid.
187
+ ASSERT(!scratch.is(no_reg));
188
+ ASSERT(!extra.is(no_reg));
189
+ ASSERT(!extra2.is(no_reg));
190
+
191
+ // Check that the receiver isn't a smi.
192
+ __ tst(receiver, Operand(kSmiTagMask));
193
+ __ b(eq, &miss);
194
+
195
+ // Get the map of the receiver and compute the hash.
196
+ __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
197
+ __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
198
+ __ add(scratch, scratch, Operand(ip));
199
+ __ eor(scratch, scratch, Operand(flags));
200
+ __ and_(scratch,
201
+ scratch,
202
+ Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
203
+
204
+ // Probe the primary table.
205
+ ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
206
+
207
+ // Primary miss: Compute hash for secondary probe.
208
+ __ sub(scratch, scratch, Operand(name));
209
+ __ add(scratch, scratch, Operand(flags));
210
+ __ and_(scratch,
211
+ scratch,
212
+ Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
213
+
214
+ // Probe the secondary table.
215
+ ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
216
+
217
+ // Cache miss: Fall-through and let caller handle the miss by
218
+ // entering the runtime system.
219
+ __ bind(&miss);
220
+ }
221
+
222
+
223
+ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
224
+ int index,
225
+ Register prototype) {
226
+ // Load the global or builtins object from the current context.
227
+ __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
228
+ // Load the global context from the global or builtins object.
229
+ __ ldr(prototype,
230
+ FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
231
+ // Load the function from the global context.
232
+ __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
233
+ // Load the initial map. The global functions all have initial maps.
234
+ __ ldr(prototype,
235
+ FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
236
+ // Load the prototype from the initial map.
237
+ __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
238
+ }
239
+
240
+
241
+ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
242
+ MacroAssembler* masm, int index, Register prototype, Label* miss) {
243
+ Isolate* isolate = masm->isolate();
244
+ // Check we're still in the same context.
245
+ __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
246
+ __ Move(ip, isolate->global());
247
+ __ cmp(prototype, ip);
248
+ __ b(ne, miss);
249
+ // Get the global function with the given index.
250
+ JSFunction* function =
251
+ JSFunction::cast(isolate->global_context()->get(index));
252
+ // Load its initial map. The global functions all have initial maps.
253
+ __ Move(prototype, Handle<Map>(function->initial_map()));
254
+ // Load the prototype from the initial map.
255
+ __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
256
+ }
257
+
258
+
259
+ // Load a fast property out of a holder object (src). In-object properties
260
+ // are loaded directly otherwise the property is loaded from the properties
261
+ // fixed array.
262
+ void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
263
+ Register dst, Register src,
264
+ JSObject* holder, int index) {
265
+ // Adjust for the number of properties stored in the holder.
266
+ index -= holder->map()->inobject_properties();
267
+ if (index < 0) {
268
+ // Get the property straight out of the holder.
269
+ int offset = holder->map()->instance_size() + (index * kPointerSize);
270
+ __ ldr(dst, FieldMemOperand(src, offset));
271
+ } else {
272
+ // Calculate the offset into the properties array.
273
+ int offset = index * kPointerSize + FixedArray::kHeaderSize;
274
+ __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
275
+ __ ldr(dst, FieldMemOperand(dst, offset));
276
+ }
277
+ }
278
+
279
+
280
+ void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
281
+ Register receiver,
282
+ Register scratch,
283
+ Label* miss_label) {
284
+ // Check that the receiver isn't a smi.
285
+ __ tst(receiver, Operand(kSmiTagMask));
286
+ __ b(eq, miss_label);
287
+
288
+ // Check that the object is a JS array.
289
+ __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
290
+ __ b(ne, miss_label);
291
+
292
+ // Load length directly from the JS array.
293
+ __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
294
+ __ Ret();
295
+ }
296
+
297
+
298
+ // Generate code to check if an object is a string. If the object is a
299
+ // heap object, its map's instance type is left in the scratch1 register.
300
+ // If this is not needed, scratch1 and scratch2 may be the same register.
301
+ static void GenerateStringCheck(MacroAssembler* masm,
302
+ Register receiver,
303
+ Register scratch1,
304
+ Register scratch2,
305
+ Label* smi,
306
+ Label* non_string_object) {
307
+ // Check that the receiver isn't a smi.
308
+ __ tst(receiver, Operand(kSmiTagMask));
309
+ __ b(eq, smi);
310
+
311
+ // Check that the object is a string.
312
+ __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
313
+ __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
314
+ __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
315
+ // The cast is to resolve the overload for the argument of 0x0.
316
+ __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
317
+ __ b(ne, non_string_object);
318
+ }
319
+
320
+
321
+ // Generate code to load the length from a string object and return the length.
322
+ // If the receiver object is not a string or a wrapped string object the
323
+ // execution continues at the miss label. The register containing the
324
+ // receiver is potentially clobbered.
325
+ void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
326
+ Register receiver,
327
+ Register scratch1,
328
+ Register scratch2,
329
+ Label* miss,
330
+ bool support_wrappers) {
331
+ Label check_wrapper;
332
+
333
+ // Check if the object is a string leaving the instance type in the
334
+ // scratch1 register.
335
+ GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
336
+ support_wrappers ? &check_wrapper : miss);
337
+
338
+ // Load length directly from the string.
339
+ __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
340
+ __ Ret();
341
+
342
+ if (support_wrappers) {
343
+ // Check if the object is a JSValue wrapper.
344
+ __ bind(&check_wrapper);
345
+ __ cmp(scratch1, Operand(JS_VALUE_TYPE));
346
+ __ b(ne, miss);
347
+
348
+ // Unwrap the value and check if the wrapped value is a string.
349
+ __ ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
350
+ GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
351
+ __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset));
352
+ __ Ret();
353
+ }
354
+ }
355
+
356
+
357
+ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
358
+ Register receiver,
359
+ Register scratch1,
360
+ Register scratch2,
361
+ Label* miss_label) {
362
+ __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
363
+ __ mov(r0, scratch1);
364
+ __ Ret();
365
+ }
366
+
367
+
368
+ // Generate StoreField code, value is passed in r0 register.
369
+ // When leaving generated code after success, the receiver_reg and name_reg
370
+ // may be clobbered. Upon branch to miss_label, the receiver and name
371
+ // registers have their original values.
372
+ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
373
+ JSObject* object,
374
+ int index,
375
+ Map* transition,
376
+ Register receiver_reg,
377
+ Register name_reg,
378
+ Register scratch,
379
+ Label* miss_label) {
380
+ // r0 : value
381
+ Label exit;
382
+
383
+ // Check that the receiver isn't a smi.
384
+ __ tst(receiver_reg, Operand(kSmiTagMask));
385
+ __ b(eq, miss_label);
386
+
387
+ // Check that the map of the receiver hasn't changed.
388
+ __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
389
+ __ cmp(scratch, Operand(Handle<Map>(object->map())));
390
+ __ b(ne, miss_label);
391
+
392
+ // Perform global security token check if needed.
393
+ if (object->IsJSGlobalProxy()) {
394
+ __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
395
+ }
396
+
397
+ // Stub never generated for non-global objects that require access
398
+ // checks.
399
+ ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
400
+
401
+ // Perform map transition for the receiver if necessary.
402
+ if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
403
+ // The properties must be extended before we can store the value.
404
+ // We jump to a runtime call that extends the properties array.
405
+ __ push(receiver_reg);
406
+ __ mov(r2, Operand(Handle<Map>(transition)));
407
+ __ Push(r2, r0);
408
+ __ TailCallExternalReference(
409
+ ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
410
+ masm->isolate()),
411
+ 3,
412
+ 1);
413
+ return;
414
+ }
415
+
416
+ if (transition != NULL) {
417
+ // Update the map of the object; no write barrier updating is
418
+ // needed because the map is never in new space.
419
+ __ mov(ip, Operand(Handle<Map>(transition)));
420
+ __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
421
+ }
422
+
423
+ // Adjust for the number of properties stored in the object. Even in the
424
+ // face of a transition we can use the old map here because the size of the
425
+ // object and the number of in-object properties is not going to change.
426
+ index -= object->map()->inobject_properties();
427
+
428
+ if (index < 0) {
429
+ // Set the property straight into the object.
430
+ int offset = object->map()->instance_size() + (index * kPointerSize);
431
+ __ str(r0, FieldMemOperand(receiver_reg, offset));
432
+
433
+ // Skip updating write barrier if storing a smi.
434
+ __ tst(r0, Operand(kSmiTagMask));
435
+ __ b(eq, &exit);
436
+
437
+ // Update the write barrier for the array address.
438
+ // Pass the now unused name_reg as a scratch register.
439
+ __ RecordWrite(receiver_reg, Operand(offset), name_reg, scratch);
440
+ } else {
441
+ // Write to the properties array.
442
+ int offset = index * kPointerSize + FixedArray::kHeaderSize;
443
+ // Get the properties array
444
+ __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
445
+ __ str(r0, FieldMemOperand(scratch, offset));
446
+
447
+ // Skip updating write barrier if storing a smi.
448
+ __ tst(r0, Operand(kSmiTagMask));
449
+ __ b(eq, &exit);
450
+
451
+ // Update the write barrier for the array address.
452
+ // Ok to clobber receiver_reg and name_reg, since we return.
453
+ __ RecordWrite(scratch, Operand(offset), name_reg, receiver_reg);
454
+ }
455
+
456
+ // Return the value (register r0).
457
+ __ bind(&exit);
458
+ __ Ret();
459
+ }
460
+
461
+
462
+ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
463
+ ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
464
+ Code* code = NULL;
465
+ if (kind == Code::LOAD_IC) {
466
+ code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
467
+ } else {
468
+ code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
469
+ }
470
+
471
+ Handle<Code> ic(code);
472
+ __ Jump(ic, RelocInfo::CODE_TARGET);
473
+ }
474
+
475
+
476
+ static void GenerateCallFunction(MacroAssembler* masm,
477
+ Object* object,
478
+ const ParameterCount& arguments,
479
+ Label* miss) {
480
+ // ----------- S t a t e -------------
481
+ // -- r0: receiver
482
+ // -- r1: function to call
483
+ // -----------------------------------
484
+
485
+ // Check that the function really is a function.
486
+ __ JumpIfSmi(r1, miss);
487
+ __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
488
+ __ b(ne, miss);
489
+
490
+ // Patch the receiver on the stack with the global proxy if
491
+ // necessary.
492
+ if (object->IsGlobalObject()) {
493
+ __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
494
+ __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
495
+ }
496
+
497
+ // Invoke the function.
498
+ __ InvokeFunction(r1, arguments, JUMP_FUNCTION);
499
+ }
500
+
501
+
502
+ static void PushInterceptorArguments(MacroAssembler* masm,
503
+ Register receiver,
504
+ Register holder,
505
+ Register name,
506
+ JSObject* holder_obj) {
507
+ __ push(name);
508
+ InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
509
+ ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
510
+ Register scratch = name;
511
+ __ mov(scratch, Operand(Handle<Object>(interceptor)));
512
+ __ push(scratch);
513
+ __ push(receiver);
514
+ __ push(holder);
515
+ __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
516
+ __ push(scratch);
517
+ }
518
+
519
+
520
+ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
521
+ Register receiver,
522
+ Register holder,
523
+ Register name,
524
+ JSObject* holder_obj) {
525
+ PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
526
+
527
+ ExternalReference ref =
528
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
529
+ masm->isolate());
530
+ __ mov(r0, Operand(5));
531
+ __ mov(r1, Operand(ref));
532
+
533
+ CEntryStub stub(1);
534
+ __ CallStub(&stub);
535
+ }
536
+
537
+ static const int kFastApiCallArguments = 3;
538
+
539
+ // Reserves space for the extra arguments to FastHandleApiCall in the
540
+ // caller's frame.
541
+ //
542
+ // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
543
+ static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
544
+ Register scratch) {
545
+ __ mov(scratch, Operand(Smi::FromInt(0)));
546
+ for (int i = 0; i < kFastApiCallArguments; i++) {
547
+ __ push(scratch);
548
+ }
549
+ }
550
+
551
+
552
+ // Undoes the effects of ReserveSpaceForFastApiCall.
553
+ static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
554
+ __ Drop(kFastApiCallArguments);
555
+ }
556
+
557
+
558
+ static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
559
+ const CallOptimization& optimization,
560
+ int argc) {
561
+ // ----------- S t a t e -------------
562
+ // -- sp[0] : holder (set by CheckPrototypes)
563
+ // -- sp[4] : callee js function
564
+ // -- sp[8] : call data
565
+ // -- sp[12] : last js argument
566
+ // -- ...
567
+ // -- sp[(argc + 3) * 4] : first js argument
568
+ // -- sp[(argc + 4) * 4] : receiver
569
+ // -----------------------------------
570
+ // Get the function and setup the context.
571
+ JSFunction* function = optimization.constant_function();
572
+ __ mov(r5, Operand(Handle<JSFunction>(function)));
573
+ __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
574
+
575
+ // Pass the additional arguments FastHandleApiCall expects.
576
+ Object* call_data = optimization.api_call_info()->data();
577
+ Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
578
+ if (masm->isolate()->heap()->InNewSpace(call_data)) {
579
+ __ Move(r0, api_call_info_handle);
580
+ __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
581
+ } else {
582
+ __ Move(r6, Handle<Object>(call_data));
583
+ }
584
+ // Store js function and call data.
585
+ __ stm(ib, sp, r5.bit() | r6.bit());
586
+
587
+ // r2 points to call data as expected by Arguments
588
+ // (refer to layout above).
589
+ __ add(r2, sp, Operand(2 * kPointerSize));
590
+
591
+ Object* callback = optimization.api_call_info()->callback();
592
+ Address api_function_address = v8::ToCData<Address>(callback);
593
+ ApiFunction fun(api_function_address);
594
+
595
+ const int kApiStackSpace = 4;
596
+ __ EnterExitFrame(false, kApiStackSpace);
597
+
598
+ // r0 = v8::Arguments&
599
+ // Arguments is after the return address.
600
+ __ add(r0, sp, Operand(1 * kPointerSize));
601
+ // v8::Arguments::implicit_args = data
602
+ __ str(r2, MemOperand(r0, 0 * kPointerSize));
603
+ // v8::Arguments::values = last argument
604
+ __ add(ip, r2, Operand(argc * kPointerSize));
605
+ __ str(ip, MemOperand(r0, 1 * kPointerSize));
606
+ // v8::Arguments::length_ = argc
607
+ __ mov(ip, Operand(argc));
608
+ __ str(ip, MemOperand(r0, 2 * kPointerSize));
609
+ // v8::Arguments::is_construct_call = 0
610
+ __ mov(ip, Operand(0));
611
+ __ str(ip, MemOperand(r0, 3 * kPointerSize));
612
+
613
+ // Emitting a stub call may try to allocate (if the code is not
614
+ // already generated). Do not allow the assembler to perform a
615
+ // garbage collection but instead return the allocation failure
616
+ // object.
617
+ const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
618
+ ExternalReference ref = ExternalReference(&fun,
619
+ ExternalReference::DIRECT_API_CALL,
620
+ masm->isolate());
621
+ return masm->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
622
+ }
623
+
624
+ class CallInterceptorCompiler BASE_EMBEDDED {
625
+ public:
626
+ CallInterceptorCompiler(StubCompiler* stub_compiler,
627
+ const ParameterCount& arguments,
628
+ Register name)
629
+ : stub_compiler_(stub_compiler),
630
+ arguments_(arguments),
631
+ name_(name) {}
632
+
633
+ MaybeObject* Compile(MacroAssembler* masm,
634
+ JSObject* object,
635
+ JSObject* holder,
636
+ String* name,
637
+ LookupResult* lookup,
638
+ Register receiver,
639
+ Register scratch1,
640
+ Register scratch2,
641
+ Register scratch3,
642
+ Label* miss) {
643
+ ASSERT(holder->HasNamedInterceptor());
644
+ ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
645
+
646
+ // Check that the receiver isn't a smi.
647
+ __ JumpIfSmi(receiver, miss);
648
+
649
+ CallOptimization optimization(lookup);
650
+
651
+ if (optimization.is_constant_call()) {
652
+ return CompileCacheable(masm,
653
+ object,
654
+ receiver,
655
+ scratch1,
656
+ scratch2,
657
+ scratch3,
658
+ holder,
659
+ lookup,
660
+ name,
661
+ optimization,
662
+ miss);
663
+ } else {
664
+ CompileRegular(masm,
665
+ object,
666
+ receiver,
667
+ scratch1,
668
+ scratch2,
669
+ scratch3,
670
+ name,
671
+ holder,
672
+ miss);
673
+ return masm->isolate()->heap()->undefined_value();
674
+ }
675
+ }
676
+
677
+ private:
678
+ MaybeObject* CompileCacheable(MacroAssembler* masm,
679
+ JSObject* object,
680
+ Register receiver,
681
+ Register scratch1,
682
+ Register scratch2,
683
+ Register scratch3,
684
+ JSObject* interceptor_holder,
685
+ LookupResult* lookup,
686
+ String* name,
687
+ const CallOptimization& optimization,
688
+ Label* miss_label) {
689
+ ASSERT(optimization.is_constant_call());
690
+ ASSERT(!lookup->holder()->IsGlobalObject());
691
+
692
+ Counters* counters = masm->isolate()->counters();
693
+
694
+ int depth1 = kInvalidProtoDepth;
695
+ int depth2 = kInvalidProtoDepth;
696
+ bool can_do_fast_api_call = false;
697
+ if (optimization.is_simple_api_call() &&
698
+ !lookup->holder()->IsGlobalObject()) {
699
+ depth1 =
700
+ optimization.GetPrototypeDepthOfExpectedType(object,
701
+ interceptor_holder);
702
+ if (depth1 == kInvalidProtoDepth) {
703
+ depth2 =
704
+ optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
705
+ lookup->holder());
706
+ }
707
+ can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
708
+ (depth2 != kInvalidProtoDepth);
709
+ }
710
+
711
+ __ IncrementCounter(counters->call_const_interceptor(), 1,
712
+ scratch1, scratch2);
713
+
714
+ if (can_do_fast_api_call) {
715
+ __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
716
+ scratch1, scratch2);
717
+ ReserveSpaceForFastApiCall(masm, scratch1);
718
+ }
719
+
720
+ // Check that the maps from receiver to interceptor's holder
721
+ // haven't changed and thus we can invoke interceptor.
722
+ Label miss_cleanup;
723
+ Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
724
+ Register holder =
725
+ stub_compiler_->CheckPrototypes(object, receiver,
726
+ interceptor_holder, scratch1,
727
+ scratch2, scratch3, name, depth1, miss);
728
+
729
+ // Invoke an interceptor and if it provides a value,
730
+ // branch to |regular_invoke|.
731
+ Label regular_invoke;
732
+ LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
733
+ &regular_invoke);
734
+
735
+ // Interceptor returned nothing for this property. Try to use cached
736
+ // constant function.
737
+
738
+ // Check that the maps from interceptor's holder to constant function's
739
+ // holder haven't changed and thus we can use cached constant function.
740
+ if (interceptor_holder != lookup->holder()) {
741
+ stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
742
+ lookup->holder(), scratch1,
743
+ scratch2, scratch3, name, depth2, miss);
744
+ } else {
745
+ // CheckPrototypes has a side effect of fetching a 'holder'
746
+ // for API (object which is instanceof for the signature). It's
747
+ // safe to omit it here, as if present, it should be fetched
748
+ // by the previous CheckPrototypes.
749
+ ASSERT(depth2 == kInvalidProtoDepth);
750
+ }
751
+
752
+ // Invoke function.
753
+ if (can_do_fast_api_call) {
754
+ MaybeObject* result = GenerateFastApiDirectCall(masm,
755
+ optimization,
756
+ arguments_.immediate());
757
+ if (result->IsFailure()) return result;
758
+ } else {
759
+ __ InvokeFunction(optimization.constant_function(), arguments_,
760
+ JUMP_FUNCTION);
761
+ }
762
+
763
+ // Deferred code for fast API call case---clean preallocated space.
764
+ if (can_do_fast_api_call) {
765
+ __ bind(&miss_cleanup);
766
+ FreeSpaceForFastApiCall(masm);
767
+ __ b(miss_label);
768
+ }
769
+
770
+ // Invoke a regular function.
771
+ __ bind(&regular_invoke);
772
+ if (can_do_fast_api_call) {
773
+ FreeSpaceForFastApiCall(masm);
774
+ }
775
+
776
+ return masm->isolate()->heap()->undefined_value();
777
+ }
778
+
779
+ void CompileRegular(MacroAssembler* masm,
780
+ JSObject* object,
781
+ Register receiver,
782
+ Register scratch1,
783
+ Register scratch2,
784
+ Register scratch3,
785
+ String* name,
786
+ JSObject* interceptor_holder,
787
+ Label* miss_label) {
788
+ Register holder =
789
+ stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
790
+ scratch1, scratch2, scratch3, name,
791
+ miss_label);
792
+
793
+ // Call a runtime function to load the interceptor property.
794
+ __ EnterInternalFrame();
795
+ // Save the name_ register across the call.
796
+ __ push(name_);
797
+
798
+ PushInterceptorArguments(masm,
799
+ receiver,
800
+ holder,
801
+ name_,
802
+ interceptor_holder);
803
+
804
+ __ CallExternalReference(
805
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
806
+ masm->isolate()),
807
+ 5);
808
+
809
+ // Restore the name_ register.
810
+ __ pop(name_);
811
+ __ LeaveInternalFrame();
812
+ }
813
+
814
+ void LoadWithInterceptor(MacroAssembler* masm,
815
+ Register receiver,
816
+ Register holder,
817
+ JSObject* holder_obj,
818
+ Register scratch,
819
+ Label* interceptor_succeeded) {
820
+ __ EnterInternalFrame();
821
+ __ Push(holder, name_);
822
+
823
+ CompileCallLoadPropertyWithInterceptor(masm,
824
+ receiver,
825
+ holder,
826
+ name_,
827
+ holder_obj);
828
+
829
+ __ pop(name_); // Restore the name.
830
+ __ pop(receiver); // Restore the holder.
831
+ __ LeaveInternalFrame();
832
+
833
+ // If interceptor returns no-result sentinel, call the constant function.
834
+ __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
835
+ __ cmp(r0, scratch);
836
+ __ b(ne, interceptor_succeeded);
837
+ }
838
+
839
+ StubCompiler* stub_compiler_;
840
+ const ParameterCount& arguments_;
841
+ Register name_;
842
+ };
843
+
844
+
845
+ // Generate code to check that a global property cell is empty. Create
846
+ // the property cell at compilation time if no cell exists for the
847
+ // property.
848
+ MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
849
+ MacroAssembler* masm,
850
+ GlobalObject* global,
851
+ String* name,
852
+ Register scratch,
853
+ Label* miss) {
854
+ Object* probe;
855
+ { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
856
+ if (!maybe_probe->ToObject(&probe)) return maybe_probe;
857
+ }
858
+ JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
859
+ ASSERT(cell->value()->IsTheHole());
860
+ __ mov(scratch, Operand(Handle<Object>(cell)));
861
+ __ ldr(scratch,
862
+ FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
863
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
864
+ __ cmp(scratch, ip);
865
+ __ b(ne, miss);
866
+ return cell;
867
+ }
868
+
869
+ // Calls GenerateCheckPropertyCell for each global object in the prototype chain
870
+ // from object to (but not including) holder.
871
+ MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
872
+ MacroAssembler* masm,
873
+ JSObject* object,
874
+ JSObject* holder,
875
+ String* name,
876
+ Register scratch,
877
+ Label* miss) {
878
+ JSObject* current = object;
879
+ while (current != holder) {
880
+ if (current->IsGlobalObject()) {
881
+ // Returns a cell or a failure.
882
+ MaybeObject* result = GenerateCheckPropertyCell(
883
+ masm,
884
+ GlobalObject::cast(current),
885
+ name,
886
+ scratch,
887
+ miss);
888
+ if (result->IsFailure()) return result;
889
+ }
890
+ ASSERT(current->IsJSObject());
891
+ current = JSObject::cast(current->GetPrototype());
892
+ }
893
+ return NULL;
894
+ }
895
+
896
+
897
+ // Convert and store int passed in register ival to IEEE 754 single precision
898
+ // floating point value at memory location (dst + 4 * wordoffset)
899
+ // If VFP3 is available use it for conversion.
900
+ static void StoreIntAsFloat(MacroAssembler* masm,
901
+ Register dst,
902
+ Register wordoffset,
903
+ Register ival,
904
+ Register fval,
905
+ Register scratch1,
906
+ Register scratch2) {
907
+ if (CpuFeatures::IsSupported(VFP3)) {
908
+ CpuFeatures::Scope scope(VFP3);
909
+ __ vmov(s0, ival);
910
+ __ add(scratch1, dst, Operand(wordoffset, LSL, 2));
911
+ __ vcvt_f32_s32(s0, s0);
912
+ __ vstr(s0, scratch1, 0);
913
+ } else {
914
+ Label not_special, done;
915
+ // Move sign bit from source to destination. This works because the sign
916
+ // bit in the exponent word of the double has the same position and polarity
917
+ // as the 2's complement sign bit in a Smi.
918
+ ASSERT(kBinary32SignMask == 0x80000000u);
919
+
920
+ __ and_(fval, ival, Operand(kBinary32SignMask), SetCC);
921
+ // Negate value if it is negative.
922
+ __ rsb(ival, ival, Operand(0, RelocInfo::NONE), LeaveCC, ne);
923
+
924
+ // We have -1, 0 or 1, which we treat specially. Register ival contains
925
+ // absolute value: it is either equal to 1 (special case of -1 and 1),
926
+ // greater than 1 (not a special case) or less than 1 (special case of 0).
927
+ __ cmp(ival, Operand(1));
928
+ __ b(gt, &not_special);
929
+
930
+ // For 1 or -1 we need to or in the 0 exponent (biased).
931
+ static const uint32_t exponent_word_for_1 =
932
+ kBinary32ExponentBias << kBinary32ExponentShift;
933
+
934
+ __ orr(fval, fval, Operand(exponent_word_for_1), LeaveCC, eq);
935
+ __ b(&done);
936
+
937
+ __ bind(&not_special);
938
+ // Count leading zeros.
939
+ // Gets the wrong answer for 0, but we already checked for that case above.
940
+ Register zeros = scratch2;
941
+ __ CountLeadingZeros(zeros, ival, scratch1);
942
+
943
+ // Compute exponent and or it into the exponent register.
944
+ __ rsb(scratch1,
945
+ zeros,
946
+ Operand((kBitsPerInt - 1) + kBinary32ExponentBias));
947
+
948
+ __ orr(fval,
949
+ fval,
950
+ Operand(scratch1, LSL, kBinary32ExponentShift));
951
+
952
+ // Shift up the source chopping the top bit off.
953
+ __ add(zeros, zeros, Operand(1));
954
+ // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
955
+ __ mov(ival, Operand(ival, LSL, zeros));
956
+ // And the top (top 20 bits).
957
+ __ orr(fval,
958
+ fval,
959
+ Operand(ival, LSR, kBitsPerInt - kBinary32MantissaBits));
960
+
961
+ __ bind(&done);
962
+ __ str(fval, MemOperand(dst, wordoffset, LSL, 2));
963
+ }
964
+ }
965
+
966
+
967
+ // Convert unsigned integer with specified number of leading zeroes in binary
968
+ // representation to IEEE 754 double.
969
+ // Integer to convert is passed in register hiword.
970
+ // Resulting double is returned in registers hiword:loword.
971
+ // This functions does not work correctly for 0.
972
+ static void GenerateUInt2Double(MacroAssembler* masm,
973
+ Register hiword,
974
+ Register loword,
975
+ Register scratch,
976
+ int leading_zeroes) {
977
+ const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
978
+ const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
979
+
980
+ const int mantissa_shift_for_hi_word =
981
+ meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
982
+
983
+ const int mantissa_shift_for_lo_word =
984
+ kBitsPerInt - mantissa_shift_for_hi_word;
985
+
986
+ __ mov(scratch, Operand(biased_exponent << HeapNumber::kExponentShift));
987
+ if (mantissa_shift_for_hi_word > 0) {
988
+ __ mov(loword, Operand(hiword, LSL, mantissa_shift_for_lo_word));
989
+ __ orr(hiword, scratch, Operand(hiword, LSR, mantissa_shift_for_hi_word));
990
+ } else {
991
+ __ mov(loword, Operand(0, RelocInfo::NONE));
992
+ __ orr(hiword, scratch, Operand(hiword, LSL, mantissa_shift_for_hi_word));
993
+ }
994
+
995
+ // If least significant bit of biased exponent was not 1 it was corrupted
996
+ // by most significant bit of mantissa so we should fix that.
997
+ if (!(biased_exponent & 1)) {
998
+ __ bic(hiword, hiword, Operand(1 << HeapNumber::kExponentShift));
999
+ }
1000
+ }
1001
+
1002
+
1003
+ #undef __
1004
+ #define __ ACCESS_MASM(masm())
1005
+
1006
+
1007
+ Register StubCompiler::CheckPrototypes(JSObject* object,
1008
+ Register object_reg,
1009
+ JSObject* holder,
1010
+ Register holder_reg,
1011
+ Register scratch1,
1012
+ Register scratch2,
1013
+ String* name,
1014
+ int save_at_depth,
1015
+ Label* miss) {
1016
+ // Make sure there's no overlap between holder and object registers.
1017
+ ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1018
+ ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1019
+ && !scratch2.is(scratch1));
1020
+
1021
+ // Keep track of the current object in register reg.
1022
+ Register reg = object_reg;
1023
+ int depth = 0;
1024
+
1025
+ if (save_at_depth == depth) {
1026
+ __ str(reg, MemOperand(sp));
1027
+ }
1028
+
1029
+ // Check the maps in the prototype chain.
1030
+ // Traverse the prototype chain from the object and do map checks.
1031
+ JSObject* current = object;
1032
+ while (current != holder) {
1033
+ depth++;
1034
+
1035
+ // Only global objects and objects that do not require access
1036
+ // checks are allowed in stubs.
1037
+ ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1038
+
1039
+ ASSERT(current->GetPrototype()->IsJSObject());
1040
+ JSObject* prototype = JSObject::cast(current->GetPrototype());
1041
+ if (!current->HasFastProperties() &&
1042
+ !current->IsJSGlobalObject() &&
1043
+ !current->IsJSGlobalProxy()) {
1044
+ if (!name->IsSymbol()) {
1045
+ MaybeObject* maybe_lookup_result = heap()->LookupSymbol(name);
1046
+ Object* lookup_result = NULL; // Initialization to please compiler.
1047
+ if (!maybe_lookup_result->ToObject(&lookup_result)) {
1048
+ set_failure(Failure::cast(maybe_lookup_result));
1049
+ return reg;
1050
+ }
1051
+ name = String::cast(lookup_result);
1052
+ }
1053
+ ASSERT(current->property_dictionary()->FindEntry(name) ==
1054
+ StringDictionary::kNotFound);
1055
+
1056
+ MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(),
1057
+ miss,
1058
+ reg,
1059
+ name,
1060
+ scratch1,
1061
+ scratch2);
1062
+ if (negative_lookup->IsFailure()) {
1063
+ set_failure(Failure::cast(negative_lookup));
1064
+ return reg;
1065
+ }
1066
+
1067
+ __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1068
+ reg = holder_reg; // from now the object is in holder_reg
1069
+ __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1070
+ } else if (heap()->InNewSpace(prototype)) {
1071
+ // Get the map of the current object.
1072
+ __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1073
+ __ cmp(scratch1, Operand(Handle<Map>(current->map())));
1074
+
1075
+ // Branch on the result of the map check.
1076
+ __ b(ne, miss);
1077
+
1078
+ // Check access rights to the global object. This has to happen
1079
+ // after the map check so that we know that the object is
1080
+ // actually a global object.
1081
+ if (current->IsJSGlobalProxy()) {
1082
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
1083
+ // Restore scratch register to be the map of the object. In the
1084
+ // new space case below, we load the prototype from the map in
1085
+ // the scratch register.
1086
+ __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1087
+ }
1088
+
1089
+ reg = holder_reg; // from now the object is in holder_reg
1090
+ // The prototype is in new space; we cannot store a reference
1091
+ // to it in the code. Load it from the map.
1092
+ __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1093
+ } else {
1094
+ // Check the map of the current object.
1095
+ __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1096
+ __ cmp(scratch1, Operand(Handle<Map>(current->map())));
1097
+ // Branch on the result of the map check.
1098
+ __ b(ne, miss);
1099
+ // Check access rights to the global object. This has to happen
1100
+ // after the map check so that we know that the object is
1101
+ // actually a global object.
1102
+ if (current->IsJSGlobalProxy()) {
1103
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
1104
+ }
1105
+ // The prototype is in old space; load it directly.
1106
+ reg = holder_reg; // from now the object is in holder_reg
1107
+ __ mov(reg, Operand(Handle<JSObject>(prototype)));
1108
+ }
1109
+
1110
+ if (save_at_depth == depth) {
1111
+ __ str(reg, MemOperand(sp));
1112
+ }
1113
+
1114
+ // Go to the next object in the prototype chain.
1115
+ current = prototype;
1116
+ }
1117
+
1118
+ // Check the holder map.
1119
+ __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1120
+ __ cmp(scratch1, Operand(Handle<Map>(current->map())));
1121
+ __ b(ne, miss);
1122
+
1123
+ // Log the check depth.
1124
+ LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
1125
+
1126
+ // Perform security check for access to the global object.
1127
+ ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1128
+ if (holder->IsJSGlobalProxy()) {
1129
+ __ CheckAccessGlobalProxy(reg, scratch1, miss);
1130
+ };
1131
+
1132
+ // If we've skipped any global objects, it's not enough to verify
1133
+ // that their maps haven't changed. We also need to check that the
1134
+ // property cell for the property is still empty.
1135
+ MaybeObject* result = GenerateCheckPropertyCells(masm(),
1136
+ object,
1137
+ holder,
1138
+ name,
1139
+ scratch1,
1140
+ miss);
1141
+ if (result->IsFailure()) set_failure(Failure::cast(result));
1142
+
1143
+ // Return the register containing the holder.
1144
+ return reg;
1145
+ }
1146
+
1147
+
1148
+ void StubCompiler::GenerateLoadField(JSObject* object,
1149
+ JSObject* holder,
1150
+ Register receiver,
1151
+ Register scratch1,
1152
+ Register scratch2,
1153
+ Register scratch3,
1154
+ int index,
1155
+ String* name,
1156
+ Label* miss) {
1157
+ // Check that the receiver isn't a smi.
1158
+ __ tst(receiver, Operand(kSmiTagMask));
1159
+ __ b(eq, miss);
1160
+
1161
+ // Check that the maps haven't changed.
1162
+ Register reg =
1163
+ CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1164
+ name, miss);
1165
+ GenerateFastPropertyLoad(masm(), r0, reg, holder, index);
1166
+ __ Ret();
1167
+ }
1168
+
1169
+
1170
+ void StubCompiler::GenerateLoadConstant(JSObject* object,
1171
+ JSObject* holder,
1172
+ Register receiver,
1173
+ Register scratch1,
1174
+ Register scratch2,
1175
+ Register scratch3,
1176
+ Object* value,
1177
+ String* name,
1178
+ Label* miss) {
1179
+ // Check that the receiver isn't a smi.
1180
+ __ tst(receiver, Operand(kSmiTagMask));
1181
+ __ b(eq, miss);
1182
+
1183
+ // Check that the maps haven't changed.
1184
+ Register reg =
1185
+ CheckPrototypes(object, receiver, holder,
1186
+ scratch1, scratch2, scratch3, name, miss);
1187
+
1188
+ // Return the constant value.
1189
+ __ mov(r0, Operand(Handle<Object>(value)));
1190
+ __ Ret();
1191
+ }
1192
+
1193
+
1194
+ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1195
+ JSObject* holder,
1196
+ Register receiver,
1197
+ Register name_reg,
1198
+ Register scratch1,
1199
+ Register scratch2,
1200
+ Register scratch3,
1201
+ AccessorInfo* callback,
1202
+ String* name,
1203
+ Label* miss) {
1204
+ // Check that the receiver isn't a smi.
1205
+ __ tst(receiver, Operand(kSmiTagMask));
1206
+ __ b(eq, miss);
1207
+
1208
+ // Check that the maps haven't changed.
1209
+ Register reg =
1210
+ CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1211
+ name, miss);
1212
+
1213
+ // Build AccessorInfo::args_ list on the stack and push property name below
1214
+ // the exit frame to make GC aware of them and store pointers to them.
1215
+ __ push(receiver);
1216
+ __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_
1217
+ Handle<AccessorInfo> callback_handle(callback);
1218
+ if (heap()->InNewSpace(callback_handle->data())) {
1219
+ __ Move(scratch3, callback_handle);
1220
+ __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1221
+ } else {
1222
+ __ Move(scratch3, Handle<Object>(callback_handle->data()));
1223
+ }
1224
+ __ Push(reg, scratch3, name_reg);
1225
+ __ mov(r0, sp); // r0 = Handle<String>
1226
+
1227
+ Address getter_address = v8::ToCData<Address>(callback->getter());
1228
+ ApiFunction fun(getter_address);
1229
+
1230
+ const int kApiStackSpace = 1;
1231
+ __ EnterExitFrame(false, kApiStackSpace);
1232
+ // Create AccessorInfo instance on the stack above the exit frame with
1233
+ // scratch2 (internal::Object **args_) as the data.
1234
+ __ str(scratch2, MemOperand(sp, 1 * kPointerSize));
1235
+ __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
1236
+
1237
+ // Emitting a stub call may try to allocate (if the code is not
1238
+ // already generated). Do not allow the assembler to perform a
1239
+ // garbage collection but instead return the allocation failure
1240
+ // object.
1241
+ const int kStackUnwindSpace = 4;
1242
+ ExternalReference ref =
1243
+ ExternalReference(&fun,
1244
+ ExternalReference::DIRECT_GETTER_CALL,
1245
+ masm()->isolate());
1246
+ return masm()->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
1247
+ }
1248
+
1249
+
1250
+ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1251
+ JSObject* interceptor_holder,
1252
+ LookupResult* lookup,
1253
+ Register receiver,
1254
+ Register name_reg,
1255
+ Register scratch1,
1256
+ Register scratch2,
1257
+ Register scratch3,
1258
+ String* name,
1259
+ Label* miss) {
1260
+ ASSERT(interceptor_holder->HasNamedInterceptor());
1261
+ ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1262
+
1263
+ // Check that the receiver isn't a smi.
1264
+ __ JumpIfSmi(receiver, miss);
1265
+
1266
+ // So far the most popular follow ups for interceptor loads are FIELD
1267
+ // and CALLBACKS, so inline only them, other cases may be added
1268
+ // later.
1269
+ bool compile_followup_inline = false;
1270
+ if (lookup->IsProperty() && lookup->IsCacheable()) {
1271
+ if (lookup->type() == FIELD) {
1272
+ compile_followup_inline = true;
1273
+ } else if (lookup->type() == CALLBACKS &&
1274
+ lookup->GetCallbackObject()->IsAccessorInfo() &&
1275
+ AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1276
+ compile_followup_inline = true;
1277
+ }
1278
+ }
1279
+
1280
+ if (compile_followup_inline) {
1281
+ // Compile the interceptor call, followed by inline code to load the
1282
+ // property from further up the prototype chain if the call fails.
1283
+ // Check that the maps haven't changed.
1284
+ Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1285
+ scratch1, scratch2, scratch3,
1286
+ name, miss);
1287
+ ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1288
+
1289
+ // Save necessary data before invoking an interceptor.
1290
+ // Requires a frame to make GC aware of pushed pointers.
1291
+ __ EnterInternalFrame();
1292
+
1293
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1294
+ // CALLBACKS case needs a receiver to be passed into C++ callback.
1295
+ __ Push(receiver, holder_reg, name_reg);
1296
+ } else {
1297
+ __ Push(holder_reg, name_reg);
1298
+ }
1299
+
1300
+ // Invoke an interceptor. Note: map checks from receiver to
1301
+ // interceptor's holder has been compiled before (see a caller
1302
+ // of this method.)
1303
+ CompileCallLoadPropertyWithInterceptor(masm(),
1304
+ receiver,
1305
+ holder_reg,
1306
+ name_reg,
1307
+ interceptor_holder);
1308
+
1309
+ // Check if interceptor provided a value for property. If it's
1310
+ // the case, return immediately.
1311
+ Label interceptor_failed;
1312
+ __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1313
+ __ cmp(r0, scratch1);
1314
+ __ b(eq, &interceptor_failed);
1315
+ __ LeaveInternalFrame();
1316
+ __ Ret();
1317
+
1318
+ __ bind(&interceptor_failed);
1319
+ __ pop(name_reg);
1320
+ __ pop(holder_reg);
1321
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1322
+ __ pop(receiver);
1323
+ }
1324
+
1325
+ __ LeaveInternalFrame();
1326
+
1327
+ // Check that the maps from interceptor's holder to lookup's holder
1328
+ // haven't changed. And load lookup's holder into |holder| register.
1329
+ if (interceptor_holder != lookup->holder()) {
1330
+ holder_reg = CheckPrototypes(interceptor_holder,
1331
+ holder_reg,
1332
+ lookup->holder(),
1333
+ scratch1,
1334
+ scratch2,
1335
+ scratch3,
1336
+ name,
1337
+ miss);
1338
+ }
1339
+
1340
+ if (lookup->type() == FIELD) {
1341
+ // We found FIELD property in prototype chain of interceptor's holder.
1342
+ // Retrieve a field from field's holder.
1343
+ GenerateFastPropertyLoad(masm(), r0, holder_reg,
1344
+ lookup->holder(), lookup->GetFieldIndex());
1345
+ __ Ret();
1346
+ } else {
1347
+ // We found CALLBACKS property in prototype chain of interceptor's
1348
+ // holder.
1349
+ ASSERT(lookup->type() == CALLBACKS);
1350
+ ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1351
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1352
+ ASSERT(callback != NULL);
1353
+ ASSERT(callback->getter() != NULL);
1354
+
1355
+ // Tail call to runtime.
1356
+ // Important invariant in CALLBACKS case: the code above must be
1357
+ // structured to never clobber |receiver| register.
1358
+ __ Move(scratch2, Handle<AccessorInfo>(callback));
1359
+ // holder_reg is either receiver or scratch1.
1360
+ if (!receiver.is(holder_reg)) {
1361
+ ASSERT(scratch1.is(holder_reg));
1362
+ __ Push(receiver, holder_reg);
1363
+ __ ldr(scratch3,
1364
+ FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1365
+ __ Push(scratch3, scratch2, name_reg);
1366
+ } else {
1367
+ __ push(receiver);
1368
+ __ ldr(scratch3,
1369
+ FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1370
+ __ Push(holder_reg, scratch3, scratch2, name_reg);
1371
+ }
1372
+
1373
+ ExternalReference ref =
1374
+ ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1375
+ masm()->isolate());
1376
+ __ TailCallExternalReference(ref, 5, 1);
1377
+ }
1378
+ } else { // !compile_followup_inline
1379
+ // Call the runtime system to load the interceptor.
1380
+ // Check that the maps haven't changed.
1381
+ Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1382
+ scratch1, scratch2, scratch3,
1383
+ name, miss);
1384
+ PushInterceptorArguments(masm(), receiver, holder_reg,
1385
+ name_reg, interceptor_holder);
1386
+
1387
+ ExternalReference ref =
1388
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1389
+ masm()->isolate());
1390
+ __ TailCallExternalReference(ref, 5, 1);
1391
+ }
1392
+ }
1393
+
1394
+
1395
+ void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
1396
+ if (kind_ == Code::KEYED_CALL_IC) {
1397
+ __ cmp(r2, Operand(Handle<String>(name)));
1398
+ __ b(ne, miss);
1399
+ }
1400
+ }
1401
+
1402
+
1403
+ void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1404
+ JSObject* holder,
1405
+ String* name,
1406
+ Label* miss) {
1407
+ ASSERT(holder->IsGlobalObject());
1408
+
1409
+ // Get the number of arguments.
1410
+ const int argc = arguments().immediate();
1411
+
1412
+ // Get the receiver from the stack.
1413
+ __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1414
+
1415
+ // If the object is the holder then we know that it's a global
1416
+ // object which can only happen for contextual calls. In this case,
1417
+ // the receiver cannot be a smi.
1418
+ if (object != holder) {
1419
+ __ tst(r0, Operand(kSmiTagMask));
1420
+ __ b(eq, miss);
1421
+ }
1422
+
1423
+ // Check that the maps haven't changed.
1424
+ CheckPrototypes(object, r0, holder, r3, r1, r4, name, miss);
1425
+ }
1426
+
1427
+
1428
+ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1429
+ JSFunction* function,
1430
+ Label* miss) {
1431
+ // Get the value from the cell.
1432
+ __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1433
+ __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
1434
+
1435
+ // Check that the cell contains the same function.
1436
+ if (heap()->InNewSpace(function)) {
1437
+ // We can't embed a pointer to a function in new space so we have
1438
+ // to verify that the shared function info is unchanged. This has
1439
+ // the nice side effect that multiple closures based on the same
1440
+ // function can all use this call IC. Before we load through the
1441
+ // function, we have to verify that it still is a function.
1442
+ __ tst(r1, Operand(kSmiTagMask));
1443
+ __ b(eq, miss);
1444
+ __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
1445
+ __ b(ne, miss);
1446
+
1447
+ // Check the shared function info. Make sure it hasn't changed.
1448
+ __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
1449
+ __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1450
+ __ cmp(r4, r3);
1451
+ __ b(ne, miss);
1452
+ } else {
1453
+ __ cmp(r1, Operand(Handle<JSFunction>(function)));
1454
+ __ b(ne, miss);
1455
+ }
1456
+ }
1457
+
1458
+
1459
+ MaybeObject* CallStubCompiler::GenerateMissBranch() {
1460
+ MaybeObject* maybe_obj =
1461
+ isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1462
+ kind_,
1463
+ extra_ic_state_);
1464
+ Object* obj;
1465
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1466
+ __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1467
+ return obj;
1468
+ }
1469
+
1470
+
1471
+ MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1472
+ JSObject* holder,
1473
+ int index,
1474
+ String* name) {
1475
+ // ----------- S t a t e -------------
1476
+ // -- r2 : name
1477
+ // -- lr : return address
1478
+ // -----------------------------------
1479
+ Label miss;
1480
+
1481
+ GenerateNameCheck(name, &miss);
1482
+
1483
+ const int argc = arguments().immediate();
1484
+
1485
+ // Get the receiver of the function from the stack into r0.
1486
+ __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1487
+ // Check that the receiver isn't a smi.
1488
+ __ tst(r0, Operand(kSmiTagMask));
1489
+ __ b(eq, &miss);
1490
+
1491
+ // Do the right check and compute the holder register.
1492
+ Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss);
1493
+ GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
1494
+
1495
+ GenerateCallFunction(masm(), object, arguments(), &miss);
1496
+
1497
+ // Handle call cache miss.
1498
+ __ bind(&miss);
1499
+ MaybeObject* maybe_result = GenerateMissBranch();
1500
+ if (maybe_result->IsFailure()) return maybe_result;
1501
+
1502
+ // Return the generated code.
1503
+ return GetCode(FIELD, name);
1504
+ }
1505
+
1506
+
1507
+ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1508
+ JSObject* holder,
1509
+ JSGlobalPropertyCell* cell,
1510
+ JSFunction* function,
1511
+ String* name) {
1512
+ // ----------- S t a t e -------------
1513
+ // -- r2 : name
1514
+ // -- lr : return address
1515
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1516
+ // -- ...
1517
+ // -- sp[argc * 4] : receiver
1518
+ // -----------------------------------
1519
+
1520
+ // If object is not an array, bail out to regular call.
1521
+ if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1522
+
1523
+ Label miss;
1524
+
1525
+ GenerateNameCheck(name, &miss);
1526
+
1527
+ Register receiver = r1;
1528
+
1529
+ // Get the receiver from the stack
1530
+ const int argc = arguments().immediate();
1531
+ __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1532
+
1533
+ // Check that the receiver isn't a smi.
1534
+ __ JumpIfSmi(receiver, &miss);
1535
+
1536
+ // Check that the maps haven't changed.
1537
+ CheckPrototypes(JSObject::cast(object), receiver,
1538
+ holder, r3, r0, r4, name, &miss);
1539
+
1540
+ if (argc == 0) {
1541
+ // Nothing to do, just return the length.
1542
+ __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1543
+ __ Drop(argc + 1);
1544
+ __ Ret();
1545
+ } else {
1546
+ Label call_builtin;
1547
+
1548
+ Register elements = r3;
1549
+ Register end_elements = r5;
1550
+
1551
+ // Get the elements array of the object.
1552
+ __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1553
+
1554
+ // Check that the elements are in fast mode and writable.
1555
+ __ CheckMap(elements,
1556
+ r0,
1557
+ Heap::kFixedArrayMapRootIndex,
1558
+ &call_builtin,
1559
+ DONT_DO_SMI_CHECK);
1560
+
1561
+ if (argc == 1) { // Otherwise fall through to call the builtin.
1562
+ Label exit, with_write_barrier, attempt_to_grow_elements;
1563
+
1564
+ // Get the array's length into r0 and calculate new length.
1565
+ __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1566
+ STATIC_ASSERT(kSmiTagSize == 1);
1567
+ STATIC_ASSERT(kSmiTag == 0);
1568
+ __ add(r0, r0, Operand(Smi::FromInt(argc)));
1569
+
1570
+ // Get the element's length.
1571
+ __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1572
+
1573
+ // Check if we could survive without allocation.
1574
+ __ cmp(r0, r4);
1575
+ __ b(gt, &attempt_to_grow_elements);
1576
+
1577
+ // Save new length.
1578
+ __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1579
+
1580
+ // Push the element.
1581
+ __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
1582
+ // We may need a register containing the address end_elements below,
1583
+ // so write back the value in end_elements.
1584
+ __ add(end_elements, elements,
1585
+ Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1586
+ const int kEndElementsOffset =
1587
+ FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1588
+ __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
1589
+
1590
+ // Check for a smi.
1591
+ __ JumpIfNotSmi(r4, &with_write_barrier);
1592
+ __ bind(&exit);
1593
+ __ Drop(argc + 1);
1594
+ __ Ret();
1595
+
1596
+ __ bind(&with_write_barrier);
1597
+ __ InNewSpace(elements, r4, eq, &exit);
1598
+ __ RecordWriteHelper(elements, end_elements, r4);
1599
+ __ Drop(argc + 1);
1600
+ __ Ret();
1601
+
1602
+ __ bind(&attempt_to_grow_elements);
1603
+ // r0: array's length + 1.
1604
+ // r4: elements' length.
1605
+
1606
+ if (!FLAG_inline_new) {
1607
+ __ b(&call_builtin);
1608
+ }
1609
+
1610
+ Isolate* isolate = masm()->isolate();
1611
+ ExternalReference new_space_allocation_top =
1612
+ ExternalReference::new_space_allocation_top_address(isolate);
1613
+ ExternalReference new_space_allocation_limit =
1614
+ ExternalReference::new_space_allocation_limit_address(isolate);
1615
+
1616
+ const int kAllocationDelta = 4;
1617
+ // Load top and check if it is the end of elements.
1618
+ __ add(end_elements, elements,
1619
+ Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1620
+ __ add(end_elements, end_elements, Operand(kEndElementsOffset));
1621
+ __ mov(r7, Operand(new_space_allocation_top));
1622
+ __ ldr(r6, MemOperand(r7));
1623
+ __ cmp(end_elements, r6);
1624
+ __ b(ne, &call_builtin);
1625
+
1626
+ __ mov(r9, Operand(new_space_allocation_limit));
1627
+ __ ldr(r9, MemOperand(r9));
1628
+ __ add(r6, r6, Operand(kAllocationDelta * kPointerSize));
1629
+ __ cmp(r6, r9);
1630
+ __ b(hi, &call_builtin);
1631
+
1632
+ // We fit and could grow elements.
1633
+ // Update new_space_allocation_top.
1634
+ __ str(r6, MemOperand(r7));
1635
+ // Push the argument.
1636
+ __ ldr(r6, MemOperand(sp, (argc - 1) * kPointerSize));
1637
+ __ str(r6, MemOperand(end_elements));
1638
+ // Fill the rest with holes.
1639
+ __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1640
+ for (int i = 1; i < kAllocationDelta; i++) {
1641
+ __ str(r6, MemOperand(end_elements, i * kPointerSize));
1642
+ }
1643
+
1644
+ // Update elements' and array's sizes.
1645
+ __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1646
+ __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta)));
1647
+ __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1648
+
1649
+ // Elements are in new space, so write barrier is not required.
1650
+ __ Drop(argc + 1);
1651
+ __ Ret();
1652
+ }
1653
+ __ bind(&call_builtin);
1654
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1655
+ masm()->isolate()),
1656
+ argc + 1,
1657
+ 1);
1658
+ }
1659
+
1660
+ // Handle call cache miss.
1661
+ __ bind(&miss);
1662
+ MaybeObject* maybe_result = GenerateMissBranch();
1663
+ if (maybe_result->IsFailure()) return maybe_result;
1664
+
1665
+ // Return the generated code.
1666
+ return GetCode(function);
1667
+ }
1668
+
1669
+
1670
+ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1671
+ JSObject* holder,
1672
+ JSGlobalPropertyCell* cell,
1673
+ JSFunction* function,
1674
+ String* name) {
1675
+ // ----------- S t a t e -------------
1676
+ // -- r2 : name
1677
+ // -- lr : return address
1678
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1679
+ // -- ...
1680
+ // -- sp[argc * 4] : receiver
1681
+ // -----------------------------------
1682
+
1683
+ // If object is not an array, bail out to regular call.
1684
+ if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1685
+
1686
+ Label miss, return_undefined, call_builtin;
1687
+
1688
+ Register receiver = r1;
1689
+ Register elements = r3;
1690
+
1691
+ GenerateNameCheck(name, &miss);
1692
+
1693
+ // Get the receiver from the stack
1694
+ const int argc = arguments().immediate();
1695
+ __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1696
+
1697
+ // Check that the receiver isn't a smi.
1698
+ __ JumpIfSmi(receiver, &miss);
1699
+
1700
+ // Check that the maps haven't changed.
1701
+ CheckPrototypes(JSObject::cast(object),
1702
+ receiver, holder, elements, r4, r0, name, &miss);
1703
+
1704
+ // Get the elements array of the object.
1705
+ __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1706
+
1707
+ // Check that the elements are in fast mode and writable.
1708
+ __ CheckMap(elements,
1709
+ r0,
1710
+ Heap::kFixedArrayMapRootIndex,
1711
+ &call_builtin,
1712
+ DONT_DO_SMI_CHECK);
1713
+
1714
+ // Get the array's length into r4 and calculate new length.
1715
+ __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1716
+ __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
1717
+ __ b(lt, &return_undefined);
1718
+
1719
+ // Get the last element.
1720
+ __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1721
+ STATIC_ASSERT(kSmiTagSize == 1);
1722
+ STATIC_ASSERT(kSmiTag == 0);
1723
+ // We can't address the last element in one operation. Compute the more
1724
+ // expensive shift first, and use an offset later on.
1725
+ __ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
1726
+ __ ldr(r0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1727
+ __ cmp(r0, r6);
1728
+ __ b(eq, &call_builtin);
1729
+
1730
+ // Set the array's length.
1731
+ __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1732
+
1733
+ // Fill with the hole.
1734
+ __ str(r6, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1735
+ __ Drop(argc + 1);
1736
+ __ Ret();
1737
+
1738
+ __ bind(&return_undefined);
1739
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1740
+ __ Drop(argc + 1);
1741
+ __ Ret();
1742
+
1743
+ __ bind(&call_builtin);
1744
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1745
+ masm()->isolate()),
1746
+ argc + 1,
1747
+ 1);
1748
+
1749
+ // Handle call cache miss.
1750
+ __ bind(&miss);
1751
+ MaybeObject* maybe_result = GenerateMissBranch();
1752
+ if (maybe_result->IsFailure()) return maybe_result;
1753
+
1754
+ // Return the generated code.
1755
+ return GetCode(function);
1756
+ }
1757
+
1758
+
1759
+ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1760
+ Object* object,
1761
+ JSObject* holder,
1762
+ JSGlobalPropertyCell* cell,
1763
+ JSFunction* function,
1764
+ String* name) {
1765
+ // ----------- S t a t e -------------
1766
+ // -- r2 : function name
1767
+ // -- lr : return address
1768
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1769
+ // -- ...
1770
+ // -- sp[argc * 4] : receiver
1771
+ // -----------------------------------
1772
+
1773
+ // If object is not a string, bail out to regular call.
1774
+ if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1775
+
1776
+ const int argc = arguments().immediate();
1777
+
1778
+ Label miss;
1779
+ Label name_miss;
1780
+ Label index_out_of_range;
1781
+ Label* index_out_of_range_label = &index_out_of_range;
1782
+
1783
+ if (kind_ == Code::CALL_IC &&
1784
+ (CallICBase::StringStubState::decode(extra_ic_state_) ==
1785
+ DEFAULT_STRING_STUB)) {
1786
+ index_out_of_range_label = &miss;
1787
+ }
1788
+
1789
+ GenerateNameCheck(name, &name_miss);
1790
+
1791
+ // Check that the maps starting from the prototype haven't changed.
1792
+ GenerateDirectLoadGlobalFunctionPrototype(masm(),
1793
+ Context::STRING_FUNCTION_INDEX,
1794
+ r0,
1795
+ &miss);
1796
+ ASSERT(object != holder);
1797
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder,
1798
+ r1, r3, r4, name, &miss);
1799
+
1800
+ Register receiver = r1;
1801
+ Register index = r4;
1802
+ Register scratch = r3;
1803
+ Register result = r0;
1804
+ __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1805
+ if (argc > 0) {
1806
+ __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1807
+ } else {
1808
+ __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1809
+ }
1810
+
1811
+ StringCharCodeAtGenerator char_code_at_generator(receiver,
1812
+ index,
1813
+ scratch,
1814
+ result,
1815
+ &miss, // When not a string.
1816
+ &miss, // When not a number.
1817
+ index_out_of_range_label,
1818
+ STRING_INDEX_IS_NUMBER);
1819
+ char_code_at_generator.GenerateFast(masm());
1820
+ __ Drop(argc + 1);
1821
+ __ Ret();
1822
+
1823
+ StubRuntimeCallHelper call_helper;
1824
+ char_code_at_generator.GenerateSlow(masm(), call_helper);
1825
+
1826
+ if (index_out_of_range.is_linked()) {
1827
+ __ bind(&index_out_of_range);
1828
+ __ LoadRoot(r0, Heap::kNanValueRootIndex);
1829
+ __ Drop(argc + 1);
1830
+ __ Ret();
1831
+ }
1832
+
1833
+ __ bind(&miss);
1834
+ // Restore function name in r2.
1835
+ __ Move(r2, Handle<String>(name));
1836
+ __ bind(&name_miss);
1837
+ MaybeObject* maybe_result = GenerateMissBranch();
1838
+ if (maybe_result->IsFailure()) return maybe_result;
1839
+
1840
+ // Return the generated code.
1841
+ return GetCode(function);
1842
+ }
1843
+
1844
+
1845
+ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1846
+ Object* object,
1847
+ JSObject* holder,
1848
+ JSGlobalPropertyCell* cell,
1849
+ JSFunction* function,
1850
+ String* name) {
1851
+ // ----------- S t a t e -------------
1852
+ // -- r2 : function name
1853
+ // -- lr : return address
1854
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1855
+ // -- ...
1856
+ // -- sp[argc * 4] : receiver
1857
+ // -----------------------------------
1858
+
1859
+ // If object is not a string, bail out to regular call.
1860
+ if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1861
+
1862
+ const int argc = arguments().immediate();
1863
+
1864
+ Label miss;
1865
+ Label name_miss;
1866
+ Label index_out_of_range;
1867
+ Label* index_out_of_range_label = &index_out_of_range;
1868
+
1869
+ if (kind_ == Code::CALL_IC &&
1870
+ (CallICBase::StringStubState::decode(extra_ic_state_) ==
1871
+ DEFAULT_STRING_STUB)) {
1872
+ index_out_of_range_label = &miss;
1873
+ }
1874
+
1875
+ GenerateNameCheck(name, &name_miss);
1876
+
1877
+ // Check that the maps starting from the prototype haven't changed.
1878
+ GenerateDirectLoadGlobalFunctionPrototype(masm(),
1879
+ Context::STRING_FUNCTION_INDEX,
1880
+ r0,
1881
+ &miss);
1882
+ ASSERT(object != holder);
1883
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder,
1884
+ r1, r3, r4, name, &miss);
1885
+
1886
+ Register receiver = r0;
1887
+ Register index = r4;
1888
+ Register scratch1 = r1;
1889
+ Register scratch2 = r3;
1890
+ Register result = r0;
1891
+ __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1892
+ if (argc > 0) {
1893
+ __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1894
+ } else {
1895
+ __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1896
+ }
1897
+
1898
+ StringCharAtGenerator char_at_generator(receiver,
1899
+ index,
1900
+ scratch1,
1901
+ scratch2,
1902
+ result,
1903
+ &miss, // When not a string.
1904
+ &miss, // When not a number.
1905
+ index_out_of_range_label,
1906
+ STRING_INDEX_IS_NUMBER);
1907
+ char_at_generator.GenerateFast(masm());
1908
+ __ Drop(argc + 1);
1909
+ __ Ret();
1910
+
1911
+ StubRuntimeCallHelper call_helper;
1912
+ char_at_generator.GenerateSlow(masm(), call_helper);
1913
+
1914
+ if (index_out_of_range.is_linked()) {
1915
+ __ bind(&index_out_of_range);
1916
+ __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
1917
+ __ Drop(argc + 1);
1918
+ __ Ret();
1919
+ }
1920
+
1921
+ __ bind(&miss);
1922
+ // Restore function name in r2.
1923
+ __ Move(r2, Handle<String>(name));
1924
+ __ bind(&name_miss);
1925
+ MaybeObject* maybe_result = GenerateMissBranch();
1926
+ if (maybe_result->IsFailure()) return maybe_result;
1927
+
1928
+ // Return the generated code.
1929
+ return GetCode(function);
1930
+ }
1931
+
1932
+
1933
+ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1934
+ Object* object,
1935
+ JSObject* holder,
1936
+ JSGlobalPropertyCell* cell,
1937
+ JSFunction* function,
1938
+ String* name) {
1939
+ // ----------- S t a t e -------------
1940
+ // -- r2 : function name
1941
+ // -- lr : return address
1942
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1943
+ // -- ...
1944
+ // -- sp[argc * 4] : receiver
1945
+ // -----------------------------------
1946
+
1947
+ const int argc = arguments().immediate();
1948
+
1949
+ // If the object is not a JSObject or we got an unexpected number of
1950
+ // arguments, bail out to the regular call.
1951
+ if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
1952
+
1953
+ Label miss;
1954
+ GenerateNameCheck(name, &miss);
1955
+
1956
+ if (cell == NULL) {
1957
+ __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
1958
+
1959
+ STATIC_ASSERT(kSmiTag == 0);
1960
+ __ tst(r1, Operand(kSmiTagMask));
1961
+ __ b(eq, &miss);
1962
+
1963
+ CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
1964
+ &miss);
1965
+ } else {
1966
+ ASSERT(cell->value() == function);
1967
+ GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1968
+ GenerateLoadFunctionFromCell(cell, function, &miss);
1969
+ }
1970
+
1971
+ // Load the char code argument.
1972
+ Register code = r1;
1973
+ __ ldr(code, MemOperand(sp, 0 * kPointerSize));
1974
+
1975
+ // Check the code is a smi.
1976
+ Label slow;
1977
+ STATIC_ASSERT(kSmiTag == 0);
1978
+ __ tst(code, Operand(kSmiTagMask));
1979
+ __ b(ne, &slow);
1980
+
1981
+ // Convert the smi code to uint16.
1982
+ __ and_(code, code, Operand(Smi::FromInt(0xffff)));
1983
+
1984
+ StringCharFromCodeGenerator char_from_code_generator(code, r0);
1985
+ char_from_code_generator.GenerateFast(masm());
1986
+ __ Drop(argc + 1);
1987
+ __ Ret();
1988
+
1989
+ StubRuntimeCallHelper call_helper;
1990
+ char_from_code_generator.GenerateSlow(masm(), call_helper);
1991
+
1992
+ // Tail call the full function. We do not have to patch the receiver
1993
+ // because the function makes no use of it.
1994
+ __ bind(&slow);
1995
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1996
+
1997
+ __ bind(&miss);
1998
+ // r2: function name.
1999
+ MaybeObject* maybe_result = GenerateMissBranch();
2000
+ if (maybe_result->IsFailure()) return maybe_result;
2001
+
2002
+ // Return the generated code.
2003
+ return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2004
+ }
2005
+
2006
+
2007
+ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
2008
+ JSObject* holder,
2009
+ JSGlobalPropertyCell* cell,
2010
+ JSFunction* function,
2011
+ String* name) {
2012
+ // ----------- S t a t e -------------
2013
+ // -- r2 : function name
2014
+ // -- lr : return address
2015
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2016
+ // -- ...
2017
+ // -- sp[argc * 4] : receiver
2018
+ // -----------------------------------
2019
+
2020
+ if (!CpuFeatures::IsSupported(VFP3)) {
2021
+ return heap()->undefined_value();
2022
+ }
2023
+
2024
+ CpuFeatures::Scope scope_vfp3(VFP3);
2025
+
2026
+ const int argc = arguments().immediate();
2027
+
2028
+ // If the object is not a JSObject or we got an unexpected number of
2029
+ // arguments, bail out to the regular call.
2030
+ if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2031
+
2032
+ Label miss, slow;
2033
+ GenerateNameCheck(name, &miss);
2034
+
2035
+ if (cell == NULL) {
2036
+ __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2037
+
2038
+ STATIC_ASSERT(kSmiTag == 0);
2039
+ __ JumpIfSmi(r1, &miss);
2040
+
2041
+ CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2042
+ &miss);
2043
+ } else {
2044
+ ASSERT(cell->value() == function);
2045
+ GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2046
+ GenerateLoadFunctionFromCell(cell, function, &miss);
2047
+ }
2048
+
2049
+ // Load the (only) argument into r0.
2050
+ __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2051
+
2052
+ // If the argument is a smi, just return.
2053
+ STATIC_ASSERT(kSmiTag == 0);
2054
+ __ tst(r0, Operand(kSmiTagMask));
2055
+ __ Drop(argc + 1, eq);
2056
+ __ Ret(eq);
2057
+
2058
+ __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2059
+
2060
+ Label wont_fit_smi, no_vfp_exception, restore_fpscr_and_return;
2061
+
2062
+ // If vfp3 is enabled, we use the fpu rounding with the RM (round towards
2063
+ // minus infinity) mode.
2064
+
2065
+ // Load the HeapNumber value.
2066
+ // We will need access to the value in the core registers, so we load it
2067
+ // with ldrd and move it to the fpu. It also spares a sub instruction for
2068
+ // updating the HeapNumber value address, as vldr expects a multiple
2069
+ // of 4 offset.
2070
+ __ Ldrd(r4, r5, FieldMemOperand(r0, HeapNumber::kValueOffset));
2071
+ __ vmov(d1, r4, r5);
2072
+
2073
+ // Backup FPSCR.
2074
+ __ vmrs(r3);
2075
+ // Set custom FPCSR:
2076
+ // - Set rounding mode to "Round towards Minus Infinity"
2077
+ // (ie bits [23:22] = 0b10).
2078
+ // - Clear vfp cumulative exception flags (bits [3:0]).
2079
+ // - Make sure Flush-to-zero mode control bit is unset (bit 22).
2080
+ __ bic(r9, r3,
2081
+ Operand(kVFPExceptionMask | kVFPRoundingModeMask | kVFPFlushToZeroMask));
2082
+ __ orr(r9, r9, Operand(kRoundToMinusInf));
2083
+ __ vmsr(r9);
2084
+
2085
+ // Convert the argument to an integer.
2086
+ __ vcvt_s32_f64(s0, d1, kFPSCRRounding);
2087
+
2088
+ // Use vcvt latency to start checking for special cases.
2089
+ // Get the argument exponent and clear the sign bit.
2090
+ __ bic(r6, r5, Operand(HeapNumber::kSignMask));
2091
+ __ mov(r6, Operand(r6, LSR, HeapNumber::kMantissaBitsInTopWord));
2092
+
2093
+ // Retrieve FPSCR and check for vfp exceptions.
2094
+ __ vmrs(r9);
2095
+ __ tst(r9, Operand(kVFPExceptionMask));
2096
+ __ b(&no_vfp_exception, eq);
2097
+
2098
+ // Check for NaN, Infinity, and -Infinity.
2099
+ // They are invariant through a Math.Floor call, so just
2100
+ // return the original argument.
2101
+ __ sub(r7, r6, Operand(HeapNumber::kExponentMask
2102
+ >> HeapNumber::kMantissaBitsInTopWord), SetCC);
2103
+ __ b(&restore_fpscr_and_return, eq);
2104
+ // We had an overflow or underflow in the conversion. Check if we
2105
+ // have a big exponent.
2106
+ __ cmp(r7, Operand(HeapNumber::kMantissaBits));
2107
+ // If greater or equal, the argument is already round and in r0.
2108
+ __ b(&restore_fpscr_and_return, ge);
2109
+ __ b(&wont_fit_smi);
2110
+
2111
+ __ bind(&no_vfp_exception);
2112
+ // Move the result back to general purpose register r0.
2113
+ __ vmov(r0, s0);
2114
+ // Check if the result fits into a smi.
2115
+ __ add(r1, r0, Operand(0x40000000), SetCC);
2116
+ __ b(&wont_fit_smi, mi);
2117
+ // Tag the result.
2118
+ STATIC_ASSERT(kSmiTag == 0);
2119
+ __ mov(r0, Operand(r0, LSL, kSmiTagSize));
2120
+
2121
+ // Check for -0.
2122
+ __ cmp(r0, Operand(0, RelocInfo::NONE));
2123
+ __ b(&restore_fpscr_and_return, ne);
2124
+ // r5 already holds the HeapNumber exponent.
2125
+ __ tst(r5, Operand(HeapNumber::kSignMask));
2126
+ // If our HeapNumber is negative it was -0, so load its address and return.
2127
+ // Else r0 is loaded with 0, so we can also just return.
2128
+ __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne);
2129
+
2130
+ __ bind(&restore_fpscr_and_return);
2131
+ // Restore FPSCR and return.
2132
+ __ vmsr(r3);
2133
+ __ Drop(argc + 1);
2134
+ __ Ret();
2135
+
2136
+ __ bind(&wont_fit_smi);
2137
+ // Restore FPCSR and fall to slow case.
2138
+ __ vmsr(r3);
2139
+
2140
+ __ bind(&slow);
2141
+ // Tail call the full function. We do not have to patch the receiver
2142
+ // because the function makes no use of it.
2143
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2144
+
2145
+ __ bind(&miss);
2146
+ // r2: function name.
2147
+ MaybeObject* maybe_result = GenerateMissBranch();
2148
+ if (maybe_result->IsFailure()) return maybe_result;
2149
+
2150
+ // Return the generated code.
2151
+ return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2152
+ }
2153
+
2154
+
2155
+ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
2156
+ JSObject* holder,
2157
+ JSGlobalPropertyCell* cell,
2158
+ JSFunction* function,
2159
+ String* name) {
2160
+ // ----------- S t a t e -------------
2161
+ // -- r2 : function name
2162
+ // -- lr : return address
2163
+ // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2164
+ // -- ...
2165
+ // -- sp[argc * 4] : receiver
2166
+ // -----------------------------------
2167
+
2168
+ const int argc = arguments().immediate();
2169
+
2170
+ // If the object is not a JSObject or we got an unexpected number of
2171
+ // arguments, bail out to the regular call.
2172
+ if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2173
+
2174
+ Label miss;
2175
+ GenerateNameCheck(name, &miss);
2176
+
2177
+ if (cell == NULL) {
2178
+ __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2179
+
2180
+ STATIC_ASSERT(kSmiTag == 0);
2181
+ __ tst(r1, Operand(kSmiTagMask));
2182
+ __ b(eq, &miss);
2183
+
2184
+ CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2185
+ &miss);
2186
+ } else {
2187
+ ASSERT(cell->value() == function);
2188
+ GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2189
+ GenerateLoadFunctionFromCell(cell, function, &miss);
2190
+ }
2191
+
2192
+ // Load the (only) argument into r0.
2193
+ __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2194
+
2195
+ // Check if the argument is a smi.
2196
+ Label not_smi;
2197
+ STATIC_ASSERT(kSmiTag == 0);
2198
+ __ JumpIfNotSmi(r0, &not_smi);
2199
+
2200
+ // Do bitwise not or do nothing depending on the sign of the
2201
+ // argument.
2202
+ __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1));
2203
+
2204
+ // Add 1 or do nothing depending on the sign of the argument.
2205
+ __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC);
2206
+
2207
+ // If the result is still negative, go to the slow case.
2208
+ // This only happens for the most negative smi.
2209
+ Label slow;
2210
+ __ b(mi, &slow);
2211
+
2212
+ // Smi case done.
2213
+ __ Drop(argc + 1);
2214
+ __ Ret();
2215
+
2216
+ // Check if the argument is a heap number and load its exponent and
2217
+ // sign.
2218
+ __ bind(&not_smi);
2219
+ __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2220
+ __ ldr(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2221
+
2222
+ // Check the sign of the argument. If the argument is positive,
2223
+ // just return it.
2224
+ Label negative_sign;
2225
+ __ tst(r1, Operand(HeapNumber::kSignMask));
2226
+ __ b(ne, &negative_sign);
2227
+ __ Drop(argc + 1);
2228
+ __ Ret();
2229
+
2230
+ // If the argument is negative, clear the sign, and return a new
2231
+ // number.
2232
+ __ bind(&negative_sign);
2233
+ __ eor(r1, r1, Operand(HeapNumber::kSignMask));
2234
+ __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2235
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2236
+ __ AllocateHeapNumber(r0, r4, r5, r6, &slow);
2237
+ __ str(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2238
+ __ str(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2239
+ __ Drop(argc + 1);
2240
+ __ Ret();
2241
+
2242
+ // Tail call the full function. We do not have to patch the receiver
2243
+ // because the function makes no use of it.
2244
+ __ bind(&slow);
2245
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2246
+
2247
+ __ bind(&miss);
2248
+ // r2: function name.
2249
+ MaybeObject* maybe_result = GenerateMissBranch();
2250
+ if (maybe_result->IsFailure()) return maybe_result;
2251
+
2252
+ // Return the generated code.
2253
+ return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2254
+ }
2255
+
2256
+
2257
+ MaybeObject* CallStubCompiler::CompileFastApiCall(
2258
+ const CallOptimization& optimization,
2259
+ Object* object,
2260
+ JSObject* holder,
2261
+ JSGlobalPropertyCell* cell,
2262
+ JSFunction* function,
2263
+ String* name) {
2264
+ Counters* counters = isolate()->counters();
2265
+
2266
+ ASSERT(optimization.is_simple_api_call());
2267
+ // Bail out if object is a global object as we don't want to
2268
+ // repatch it to global receiver.
2269
+ if (object->IsGlobalObject()) return heap()->undefined_value();
2270
+ if (cell != NULL) return heap()->undefined_value();
2271
+ if (!object->IsJSObject()) return heap()->undefined_value();
2272
+ int depth = optimization.GetPrototypeDepthOfExpectedType(
2273
+ JSObject::cast(object), holder);
2274
+ if (depth == kInvalidProtoDepth) return heap()->undefined_value();
2275
+
2276
+ Label miss, miss_before_stack_reserved;
2277
+
2278
+ GenerateNameCheck(name, &miss_before_stack_reserved);
2279
+
2280
+ // Get the receiver from the stack.
2281
+ const int argc = arguments().immediate();
2282
+ __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2283
+
2284
+ // Check that the receiver isn't a smi.
2285
+ __ tst(r1, Operand(kSmiTagMask));
2286
+ __ b(eq, &miss_before_stack_reserved);
2287
+
2288
+ __ IncrementCounter(counters->call_const(), 1, r0, r3);
2289
+ __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3);
2290
+
2291
+ ReserveSpaceForFastApiCall(masm(), r0);
2292
+
2293
+ // Check that the maps haven't changed and find a Holder as a side effect.
2294
+ CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2295
+ depth, &miss);
2296
+
2297
+ MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc);
2298
+ if (result->IsFailure()) return result;
2299
+
2300
+ __ bind(&miss);
2301
+ FreeSpaceForFastApiCall(masm());
2302
+
2303
+ __ bind(&miss_before_stack_reserved);
2304
+ MaybeObject* maybe_result = GenerateMissBranch();
2305
+ if (maybe_result->IsFailure()) return maybe_result;
2306
+
2307
+ // Return the generated code.
2308
+ return GetCode(function);
2309
+ }
2310
+
2311
+
2312
+ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2313
+ JSObject* holder,
2314
+ JSFunction* function,
2315
+ String* name,
2316
+ CheckType check) {
2317
+ // ----------- S t a t e -------------
2318
+ // -- r2 : name
2319
+ // -- lr : return address
2320
+ // -----------------------------------
2321
+ if (HasCustomCallGenerator(function)) {
2322
+ MaybeObject* maybe_result = CompileCustomCall(
2323
+ object, holder, NULL, function, name);
2324
+ Object* result;
2325
+ if (!maybe_result->ToObject(&result)) return maybe_result;
2326
+ // undefined means bail out to regular compiler.
2327
+ if (!result->IsUndefined()) return result;
2328
+ }
2329
+
2330
+ Label miss;
2331
+
2332
+ GenerateNameCheck(name, &miss);
2333
+
2334
+ // Get the receiver from the stack
2335
+ const int argc = arguments().immediate();
2336
+ __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2337
+
2338
+ // Check that the receiver isn't a smi.
2339
+ if (check != NUMBER_CHECK) {
2340
+ __ tst(r1, Operand(kSmiTagMask));
2341
+ __ b(eq, &miss);
2342
+ }
2343
+
2344
+ // Make sure that it's okay not to patch the on stack receiver
2345
+ // unless we're doing a receiver map check.
2346
+ ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2347
+
2348
+ SharedFunctionInfo* function_info = function->shared();
2349
+ switch (check) {
2350
+ case RECEIVER_MAP_CHECK:
2351
+ __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2352
+ 1, r0, r3);
2353
+
2354
+ // Check that the maps haven't changed.
2355
+ CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2356
+ &miss);
2357
+
2358
+ // Patch the receiver on the stack with the global proxy if
2359
+ // necessary.
2360
+ if (object->IsGlobalObject()) {
2361
+ __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2362
+ __ str(r3, MemOperand(sp, argc * kPointerSize));
2363
+ }
2364
+ break;
2365
+
2366
+ case STRING_CHECK:
2367
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2368
+ // Calling non-strict non-builtins with a value as the receiver
2369
+ // requires boxing.
2370
+ __ jmp(&miss);
2371
+ } else {
2372
+ // Check that the object is a two-byte string or a symbol.
2373
+ __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
2374
+ __ b(hs, &miss);
2375
+ // Check that the maps starting from the prototype haven't changed.
2376
+ GenerateDirectLoadGlobalFunctionPrototype(
2377
+ masm(), Context::STRING_FUNCTION_INDEX, r0, &miss);
2378
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
2379
+ r1, r4, name, &miss);
2380
+ }
2381
+ break;
2382
+
2383
+ case NUMBER_CHECK: {
2384
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2385
+ // Calling non-strict non-builtins with a value as the receiver
2386
+ // requires boxing.
2387
+ __ jmp(&miss);
2388
+ } else {
2389
+ Label fast;
2390
+ // Check that the object is a smi or a heap number.
2391
+ __ tst(r1, Operand(kSmiTagMask));
2392
+ __ b(eq, &fast);
2393
+ __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE);
2394
+ __ b(ne, &miss);
2395
+ __ bind(&fast);
2396
+ // Check that the maps starting from the prototype haven't changed.
2397
+ GenerateDirectLoadGlobalFunctionPrototype(
2398
+ masm(), Context::NUMBER_FUNCTION_INDEX, r0, &miss);
2399
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
2400
+ r1, r4, name, &miss);
2401
+ }
2402
+ break;
2403
+ }
2404
+
2405
+ case BOOLEAN_CHECK: {
2406
+ if (!function->IsBuiltin() && !function_info->strict_mode()) {
2407
+ // Calling non-strict non-builtins with a value as the receiver
2408
+ // requires boxing.
2409
+ __ jmp(&miss);
2410
+ } else {
2411
+ Label fast;
2412
+ // Check that the object is a boolean.
2413
+ __ LoadRoot(ip, Heap::kTrueValueRootIndex);
2414
+ __ cmp(r1, ip);
2415
+ __ b(eq, &fast);
2416
+ __ LoadRoot(ip, Heap::kFalseValueRootIndex);
2417
+ __ cmp(r1, ip);
2418
+ __ b(ne, &miss);
2419
+ __ bind(&fast);
2420
+ // Check that the maps starting from the prototype haven't changed.
2421
+ GenerateDirectLoadGlobalFunctionPrototype(
2422
+ masm(), Context::BOOLEAN_FUNCTION_INDEX, r0, &miss);
2423
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
2424
+ r1, r4, name, &miss);
2425
+ }
2426
+ break;
2427
+ }
2428
+
2429
+ default:
2430
+ UNREACHABLE();
2431
+ }
2432
+
2433
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2434
+
2435
+ // Handle call cache miss.
2436
+ __ bind(&miss);
2437
+ MaybeObject* maybe_result = GenerateMissBranch();
2438
+ if (maybe_result->IsFailure()) return maybe_result;
2439
+
2440
+ // Return the generated code.
2441
+ return GetCode(function);
2442
+ }
2443
+
2444
+
2445
+ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2446
+ JSObject* holder,
2447
+ String* name) {
2448
+ // ----------- S t a t e -------------
2449
+ // -- r2 : name
2450
+ // -- lr : return address
2451
+ // -----------------------------------
2452
+
2453
+ Label miss;
2454
+
2455
+ GenerateNameCheck(name, &miss);
2456
+
2457
+ // Get the number of arguments.
2458
+ const int argc = arguments().immediate();
2459
+
2460
+ LookupResult lookup;
2461
+ LookupPostInterceptor(holder, name, &lookup);
2462
+
2463
+ // Get the receiver from the stack.
2464
+ __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2465
+
2466
+ CallInterceptorCompiler compiler(this, arguments(), r2);
2467
+ MaybeObject* result = compiler.Compile(masm(),
2468
+ object,
2469
+ holder,
2470
+ name,
2471
+ &lookup,
2472
+ r1,
2473
+ r3,
2474
+ r4,
2475
+ r0,
2476
+ &miss);
2477
+ if (result->IsFailure()) {
2478
+ return result;
2479
+ }
2480
+
2481
+ // Move returned value, the function to call, to r1.
2482
+ __ mov(r1, r0);
2483
+ // Restore receiver.
2484
+ __ ldr(r0, MemOperand(sp, argc * kPointerSize));
2485
+
2486
+ GenerateCallFunction(masm(), object, arguments(), &miss);
2487
+
2488
+ // Handle call cache miss.
2489
+ __ bind(&miss);
2490
+ MaybeObject* maybe_result = GenerateMissBranch();
2491
+ if (maybe_result->IsFailure()) return maybe_result;
2492
+
2493
+ // Return the generated code.
2494
+ return GetCode(INTERCEPTOR, name);
2495
+ }
2496
+
2497
+
2498
+ MaybeObject* CallStubCompiler::CompileCallGlobal(
2499
+ JSObject* object,
2500
+ GlobalObject* holder,
2501
+ JSGlobalPropertyCell* cell,
2502
+ JSFunction* function,
2503
+ String* name,
2504
+ Code::ExtraICState extra_ic_state) {
2505
+ // ----------- S t a t e -------------
2506
+ // -- r2 : name
2507
+ // -- lr : return address
2508
+ // -----------------------------------
2509
+
2510
+ if (HasCustomCallGenerator(function)) {
2511
+ MaybeObject* maybe_result = CompileCustomCall(
2512
+ object, holder, cell, function, name);
2513
+ Object* result;
2514
+ if (!maybe_result->ToObject(&result)) return maybe_result;
2515
+ // undefined means bail out to regular compiler.
2516
+ if (!result->IsUndefined()) return result;
2517
+ }
2518
+
2519
+ Label miss;
2520
+
2521
+ GenerateNameCheck(name, &miss);
2522
+
2523
+ // Get the number of arguments.
2524
+ const int argc = arguments().immediate();
2525
+
2526
+ GenerateGlobalReceiverCheck(object, holder, name, &miss);
2527
+
2528
+ GenerateLoadFunctionFromCell(cell, function, &miss);
2529
+
2530
+ // Patch the receiver on the stack with the global proxy if
2531
+ // necessary.
2532
+ if (object->IsGlobalObject()) {
2533
+ __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
2534
+ __ str(r3, MemOperand(sp, argc * kPointerSize));
2535
+ }
2536
+
2537
+ // Setup the context (function already in r1).
2538
+ __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2539
+
2540
+ // Jump to the cached code (tail call).
2541
+ Counters* counters = masm()->isolate()->counters();
2542
+ __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
2543
+ ASSERT(function->is_compiled());
2544
+ Handle<Code> code(function->code());
2545
+ ParameterCount expected(function->shared()->formal_parameter_count());
2546
+ CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
2547
+ ? CALL_AS_FUNCTION
2548
+ : CALL_AS_METHOD;
2549
+ if (V8::UseCrankshaft()) {
2550
+ // TODO(kasperl): For now, we always call indirectly through the
2551
+ // code field in the function to allow recompilation to take effect
2552
+ // without changing any of the call sites.
2553
+ __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2554
+ __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION,
2555
+ NullCallWrapper(), call_kind);
2556
+ } else {
2557
+ __ InvokeCode(code, expected, arguments(), RelocInfo::CODE_TARGET,
2558
+ JUMP_FUNCTION, call_kind);
2559
+ }
2560
+
2561
+ // Handle call cache miss.
2562
+ __ bind(&miss);
2563
+ __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3);
2564
+ MaybeObject* maybe_result = GenerateMissBranch();
2565
+ if (maybe_result->IsFailure()) return maybe_result;
2566
+
2567
+ // Return the generated code.
2568
+ return GetCode(NORMAL, name);
2569
+ }
2570
+
2571
+
2572
+ MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2573
+ int index,
2574
+ Map* transition,
2575
+ String* name) {
2576
+ // ----------- S t a t e -------------
2577
+ // -- r0 : value
2578
+ // -- r1 : receiver
2579
+ // -- r2 : name
2580
+ // -- lr : return address
2581
+ // -----------------------------------
2582
+ Label miss;
2583
+
2584
+ GenerateStoreField(masm(),
2585
+ object,
2586
+ index,
2587
+ transition,
2588
+ r1, r2, r3,
2589
+ &miss);
2590
+ __ bind(&miss);
2591
+ Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2592
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2593
+
2594
+ // Return the generated code.
2595
+ return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2596
+ }
2597
+
2598
+
2599
+ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2600
+ AccessorInfo* callback,
2601
+ String* name) {
2602
+ // ----------- S t a t e -------------
2603
+ // -- r0 : value
2604
+ // -- r1 : receiver
2605
+ // -- r2 : name
2606
+ // -- lr : return address
2607
+ // -----------------------------------
2608
+ Label miss;
2609
+
2610
+ // Check that the object isn't a smi.
2611
+ __ tst(r1, Operand(kSmiTagMask));
2612
+ __ b(eq, &miss);
2613
+
2614
+ // Check that the map of the object hasn't changed.
2615
+ __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2616
+ __ cmp(r3, Operand(Handle<Map>(object->map())));
2617
+ __ b(ne, &miss);
2618
+
2619
+ // Perform global security token check if needed.
2620
+ if (object->IsJSGlobalProxy()) {
2621
+ __ CheckAccessGlobalProxy(r1, r3, &miss);
2622
+ }
2623
+
2624
+ // Stub never generated for non-global objects that require access
2625
+ // checks.
2626
+ ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2627
+
2628
+ __ push(r1); // receiver
2629
+ __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback info
2630
+ __ Push(ip, r2, r0);
2631
+
2632
+ // Do tail-call to the runtime system.
2633
+ ExternalReference store_callback_property =
2634
+ ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2635
+ masm()->isolate());
2636
+ __ TailCallExternalReference(store_callback_property, 4, 1);
2637
+
2638
+ // Handle store cache miss.
2639
+ __ bind(&miss);
2640
+ Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2641
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2642
+
2643
+ // Return the generated code.
2644
+ return GetCode(CALLBACKS, name);
2645
+ }
2646
+
2647
+
2648
+ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2649
+ String* name) {
2650
+ // ----------- S t a t e -------------
2651
+ // -- r0 : value
2652
+ // -- r1 : receiver
2653
+ // -- r2 : name
2654
+ // -- lr : return address
2655
+ // -----------------------------------
2656
+ Label miss;
2657
+
2658
+ // Check that the object isn't a smi.
2659
+ __ tst(r1, Operand(kSmiTagMask));
2660
+ __ b(eq, &miss);
2661
+
2662
+ // Check that the map of the object hasn't changed.
2663
+ __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2664
+ __ cmp(r3, Operand(Handle<Map>(receiver->map())));
2665
+ __ b(ne, &miss);
2666
+
2667
+ // Perform global security token check if needed.
2668
+ if (receiver->IsJSGlobalProxy()) {
2669
+ __ CheckAccessGlobalProxy(r1, r3, &miss);
2670
+ }
2671
+
2672
+ // Stub is never generated for non-global objects that require access
2673
+ // checks.
2674
+ ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2675
+
2676
+ __ Push(r1, r2, r0); // Receiver, name, value.
2677
+
2678
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_)));
2679
+ __ push(r0); // strict mode
2680
+
2681
+ // Do tail-call to the runtime system.
2682
+ ExternalReference store_ic_property =
2683
+ ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2684
+ masm()->isolate());
2685
+ __ TailCallExternalReference(store_ic_property, 4, 1);
2686
+
2687
+ // Handle store cache miss.
2688
+ __ bind(&miss);
2689
+ Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2690
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2691
+
2692
+ // Return the generated code.
2693
+ return GetCode(INTERCEPTOR, name);
2694
+ }
2695
+
2696
+
2697
+ MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2698
+ JSGlobalPropertyCell* cell,
2699
+ String* name) {
2700
+ // ----------- S t a t e -------------
2701
+ // -- r0 : value
2702
+ // -- r1 : receiver
2703
+ // -- r2 : name
2704
+ // -- lr : return address
2705
+ // -----------------------------------
2706
+ Label miss;
2707
+
2708
+ // Check that the map of the global has not changed.
2709
+ __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2710
+ __ cmp(r3, Operand(Handle<Map>(object->map())));
2711
+ __ b(ne, &miss);
2712
+
2713
+ // Check that the value in the cell is not the hole. If it is, this
2714
+ // cell could have been deleted and reintroducing the global needs
2715
+ // to update the property details in the property dictionary of the
2716
+ // global object. We bail out to the runtime system to do that.
2717
+ __ mov(r4, Operand(Handle<JSGlobalPropertyCell>(cell)));
2718
+ __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2719
+ __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
2720
+ __ cmp(r5, r6);
2721
+ __ b(eq, &miss);
2722
+
2723
+ // Store the value in the cell.
2724
+ __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
2725
+
2726
+ Counters* counters = masm()->isolate()->counters();
2727
+ __ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3);
2728
+ __ Ret();
2729
+
2730
+ // Handle store cache miss.
2731
+ __ bind(&miss);
2732
+ __ IncrementCounter(counters->named_store_global_inline_miss(), 1, r4, r3);
2733
+ Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2734
+ __ Jump(ic, RelocInfo::CODE_TARGET);
2735
+
2736
+ // Return the generated code.
2737
+ return GetCode(NORMAL, name);
2738
+ }
2739
+
2740
+
2741
+ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2742
+ JSObject* object,
2743
+ JSObject* last) {
2744
+ // ----------- S t a t e -------------
2745
+ // -- r0 : receiver
2746
+ // -- lr : return address
2747
+ // -----------------------------------
2748
+ Label miss;
2749
+
2750
+ // Check that receiver is not a smi.
2751
+ __ tst(r0, Operand(kSmiTagMask));
2752
+ __ b(eq, &miss);
2753
+
2754
+ // Check the maps of the full prototype chain.
2755
+ CheckPrototypes(object, r0, last, r3, r1, r4, name, &miss);
2756
+
2757
+ // If the last object in the prototype chain is a global object,
2758
+ // check that the global property cell is empty.
2759
+ if (last->IsGlobalObject()) {
2760
+ MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2761
+ GlobalObject::cast(last),
2762
+ name,
2763
+ r1,
2764
+ &miss);
2765
+ if (cell->IsFailure()) {
2766
+ miss.Unuse();
2767
+ return cell;
2768
+ }
2769
+ }
2770
+
2771
+ // Return undefined if maps of the full prototype chain are still the
2772
+ // same and no global property with this name contains a value.
2773
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2774
+ __ Ret();
2775
+
2776
+ __ bind(&miss);
2777
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2778
+
2779
+ // Return the generated code.
2780
+ return GetCode(NONEXISTENT, heap()->empty_string());
2781
+ }
2782
+
2783
+
2784
+ MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
2785
+ JSObject* holder,
2786
+ int index,
2787
+ String* name) {
2788
+ // ----------- S t a t e -------------
2789
+ // -- r0 : receiver
2790
+ // -- r2 : name
2791
+ // -- lr : return address
2792
+ // -----------------------------------
2793
+ Label miss;
2794
+
2795
+ GenerateLoadField(object, holder, r0, r3, r1, r4, index, name, &miss);
2796
+ __ bind(&miss);
2797
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2798
+
2799
+ // Return the generated code.
2800
+ return GetCode(FIELD, name);
2801
+ }
2802
+
2803
+
2804
+ MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2805
+ JSObject* object,
2806
+ JSObject* holder,
2807
+ AccessorInfo* callback) {
2808
+ // ----------- S t a t e -------------
2809
+ // -- r0 : receiver
2810
+ // -- r2 : name
2811
+ // -- lr : return address
2812
+ // -----------------------------------
2813
+ Label miss;
2814
+
2815
+ MaybeObject* result = GenerateLoadCallback(object, holder, r0, r2, r3, r1, r4,
2816
+ callback, name, &miss);
2817
+ if (result->IsFailure()) {
2818
+ miss.Unuse();
2819
+ return result;
2820
+ }
2821
+
2822
+ __ bind(&miss);
2823
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2824
+
2825
+ // Return the generated code.
2826
+ return GetCode(CALLBACKS, name);
2827
+ }
2828
+
2829
+
2830
+ MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2831
+ JSObject* holder,
2832
+ Object* value,
2833
+ String* name) {
2834
+ // ----------- S t a t e -------------
2835
+ // -- r0 : receiver
2836
+ // -- r2 : name
2837
+ // -- lr : return address
2838
+ // -----------------------------------
2839
+ Label miss;
2840
+
2841
+ GenerateLoadConstant(object, holder, r0, r3, r1, r4, value, name, &miss);
2842
+ __ bind(&miss);
2843
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2844
+
2845
+ // Return the generated code.
2846
+ return GetCode(CONSTANT_FUNCTION, name);
2847
+ }
2848
+
2849
+
2850
+ MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
2851
+ JSObject* holder,
2852
+ String* name) {
2853
+ // ----------- S t a t e -------------
2854
+ // -- r0 : receiver
2855
+ // -- r2 : name
2856
+ // -- lr : return address
2857
+ // -----------------------------------
2858
+ Label miss;
2859
+
2860
+ LookupResult lookup;
2861
+ LookupPostInterceptor(holder, name, &lookup);
2862
+ GenerateLoadInterceptor(object,
2863
+ holder,
2864
+ &lookup,
2865
+ r0,
2866
+ r2,
2867
+ r3,
2868
+ r1,
2869
+ r4,
2870
+ name,
2871
+ &miss);
2872
+ __ bind(&miss);
2873
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2874
+
2875
+ // Return the generated code.
2876
+ return GetCode(INTERCEPTOR, name);
2877
+ }
2878
+
2879
+
2880
+ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2881
+ GlobalObject* holder,
2882
+ JSGlobalPropertyCell* cell,
2883
+ String* name,
2884
+ bool is_dont_delete) {
2885
+ // ----------- S t a t e -------------
2886
+ // -- r0 : receiver
2887
+ // -- r2 : name
2888
+ // -- lr : return address
2889
+ // -----------------------------------
2890
+ Label miss;
2891
+
2892
+ // If the object is the holder then we know that it's a global
2893
+ // object which can only happen for contextual calls. In this case,
2894
+ // the receiver cannot be a smi.
2895
+ if (object != holder) {
2896
+ __ tst(r0, Operand(kSmiTagMask));
2897
+ __ b(eq, &miss);
2898
+ }
2899
+
2900
+ // Check that the map of the global has not changed.
2901
+ CheckPrototypes(object, r0, holder, r3, r4, r1, name, &miss);
2902
+
2903
+ // Get the value from the cell.
2904
+ __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
2905
+ __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
2906
+
2907
+ // Check for deleted property if property can actually be deleted.
2908
+ if (!is_dont_delete) {
2909
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2910
+ __ cmp(r4, ip);
2911
+ __ b(eq, &miss);
2912
+ }
2913
+
2914
+ __ mov(r0, r4);
2915
+ Counters* counters = masm()->isolate()->counters();
2916
+ __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
2917
+ __ Ret();
2918
+
2919
+ __ bind(&miss);
2920
+ __ IncrementCounter(counters->named_load_global_stub_miss(), 1, r1, r3);
2921
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
2922
+
2923
+ // Return the generated code.
2924
+ return GetCode(NORMAL, name);
2925
+ }
2926
+
2927
+
2928
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2929
+ JSObject* receiver,
2930
+ JSObject* holder,
2931
+ int index) {
2932
+ // ----------- S t a t e -------------
2933
+ // -- lr : return address
2934
+ // -- r0 : key
2935
+ // -- r1 : receiver
2936
+ // -----------------------------------
2937
+ Label miss;
2938
+
2939
+ // Check the key is the cached one.
2940
+ __ cmp(r0, Operand(Handle<String>(name)));
2941
+ __ b(ne, &miss);
2942
+
2943
+ GenerateLoadField(receiver, holder, r1, r2, r3, r4, index, name, &miss);
2944
+ __ bind(&miss);
2945
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2946
+
2947
+ return GetCode(FIELD, name);
2948
+ }
2949
+
2950
+
2951
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2952
+ String* name,
2953
+ JSObject* receiver,
2954
+ JSObject* holder,
2955
+ AccessorInfo* callback) {
2956
+ // ----------- S t a t e -------------
2957
+ // -- lr : return address
2958
+ // -- r0 : key
2959
+ // -- r1 : receiver
2960
+ // -----------------------------------
2961
+ Label miss;
2962
+
2963
+ // Check the key is the cached one.
2964
+ __ cmp(r0, Operand(Handle<String>(name)));
2965
+ __ b(ne, &miss);
2966
+
2967
+ MaybeObject* result = GenerateLoadCallback(receiver, holder, r1, r0, r2, r3,
2968
+ r4, callback, name, &miss);
2969
+ if (result->IsFailure()) {
2970
+ miss.Unuse();
2971
+ return result;
2972
+ }
2973
+
2974
+ __ bind(&miss);
2975
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2976
+
2977
+ return GetCode(CALLBACKS, name);
2978
+ }
2979
+
2980
+
2981
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2982
+ JSObject* receiver,
2983
+ JSObject* holder,
2984
+ Object* value) {
2985
+ // ----------- S t a t e -------------
2986
+ // -- lr : return address
2987
+ // -- r0 : key
2988
+ // -- r1 : receiver
2989
+ // -----------------------------------
2990
+ Label miss;
2991
+
2992
+ // Check the key is the cached one.
2993
+ __ cmp(r0, Operand(Handle<String>(name)));
2994
+ __ b(ne, &miss);
2995
+
2996
+ GenerateLoadConstant(receiver, holder, r1, r2, r3, r4, value, name, &miss);
2997
+ __ bind(&miss);
2998
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2999
+
3000
+ // Return the generated code.
3001
+ return GetCode(CONSTANT_FUNCTION, name);
3002
+ }
3003
+
3004
+
3005
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
3006
+ JSObject* holder,
3007
+ String* name) {
3008
+ // ----------- S t a t e -------------
3009
+ // -- lr : return address
3010
+ // -- r0 : key
3011
+ // -- r1 : receiver
3012
+ // -----------------------------------
3013
+ Label miss;
3014
+
3015
+ // Check the key is the cached one.
3016
+ __ cmp(r0, Operand(Handle<String>(name)));
3017
+ __ b(ne, &miss);
3018
+
3019
+ LookupResult lookup;
3020
+ LookupPostInterceptor(holder, name, &lookup);
3021
+ GenerateLoadInterceptor(receiver,
3022
+ holder,
3023
+ &lookup,
3024
+ r1,
3025
+ r0,
3026
+ r2,
3027
+ r3,
3028
+ r4,
3029
+ name,
3030
+ &miss);
3031
+ __ bind(&miss);
3032
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3033
+
3034
+ return GetCode(INTERCEPTOR, name);
3035
+ }
3036
+
3037
+
3038
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
3039
+ // ----------- S t a t e -------------
3040
+ // -- lr : return address
3041
+ // -- r0 : key
3042
+ // -- r1 : receiver
3043
+ // -----------------------------------
3044
+ Label miss;
3045
+
3046
+ // Check the key is the cached one.
3047
+ __ cmp(r0, Operand(Handle<String>(name)));
3048
+ __ b(ne, &miss);
3049
+
3050
+ GenerateLoadArrayLength(masm(), r1, r2, &miss);
3051
+ __ bind(&miss);
3052
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3053
+
3054
+ return GetCode(CALLBACKS, name);
3055
+ }
3056
+
3057
+
3058
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
3059
+ // ----------- S t a t e -------------
3060
+ // -- lr : return address
3061
+ // -- r0 : key
3062
+ // -- r1 : receiver
3063
+ // -----------------------------------
3064
+ Label miss;
3065
+
3066
+ Counters* counters = masm()->isolate()->counters();
3067
+ __ IncrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
3068
+
3069
+ // Check the key is the cached one.
3070
+ __ cmp(r0, Operand(Handle<String>(name)));
3071
+ __ b(ne, &miss);
3072
+
3073
+ GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true);
3074
+ __ bind(&miss);
3075
+ __ DecrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
3076
+
3077
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3078
+
3079
+ return GetCode(CALLBACKS, name);
3080
+ }
3081
+
3082
+
3083
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
3084
+ // ----------- S t a t e -------------
3085
+ // -- lr : return address
3086
+ // -- r0 : key
3087
+ // -- r1 : receiver
3088
+ // -----------------------------------
3089
+ Label miss;
3090
+
3091
+ Counters* counters = masm()->isolate()->counters();
3092
+ __ IncrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
3093
+
3094
+ // Check the name hasn't changed.
3095
+ __ cmp(r0, Operand(Handle<String>(name)));
3096
+ __ b(ne, &miss);
3097
+
3098
+ GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss);
3099
+ __ bind(&miss);
3100
+ __ DecrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
3101
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3102
+
3103
+ return GetCode(CALLBACKS, name);
3104
+ }
3105
+
3106
+
3107
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadFastElement(Map* receiver_map) {
3108
+ // ----------- S t a t e -------------
3109
+ // -- lr : return address
3110
+ // -- r0 : key
3111
+ // -- r1 : receiver
3112
+ // -----------------------------------
3113
+ MaybeObject* maybe_stub = KeyedLoadFastElementStub().TryGetCode();
3114
+ Code* stub;
3115
+ if (!maybe_stub->To(&stub)) return maybe_stub;
3116
+ __ DispatchMap(r1,
3117
+ r2,
3118
+ Handle<Map>(receiver_map),
3119
+ Handle<Code>(stub),
3120
+ DO_SMI_CHECK);
3121
+
3122
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3123
+ __ Jump(ic, RelocInfo::CODE_TARGET);
3124
+
3125
+ // Return the generated code.
3126
+ return GetCode(NORMAL, NULL);
3127
+ }
3128
+
3129
+
3130
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadMegamorphic(
3131
+ MapList* receiver_maps,
3132
+ CodeList* handler_ics) {
3133
+ // ----------- S t a t e -------------
3134
+ // -- lr : return address
3135
+ // -- r0 : key
3136
+ // -- r1 : receiver
3137
+ // -----------------------------------
3138
+ Label miss;
3139
+ __ JumpIfSmi(r1, &miss);
3140
+
3141
+ int receiver_count = receiver_maps->length();
3142
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
3143
+ for (int current = 0; current < receiver_count; ++current) {
3144
+ Handle<Map> map(receiver_maps->at(current));
3145
+ Handle<Code> code(handler_ics->at(current));
3146
+ __ mov(ip, Operand(map));
3147
+ __ cmp(r2, ip);
3148
+ __ Jump(code, RelocInfo::CODE_TARGET, eq);
3149
+ }
3150
+
3151
+ __ bind(&miss);
3152
+ Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
3153
+ __ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
3154
+
3155
+ // Return the generated code.
3156
+ return GetCode(NORMAL, NULL, MEGAMORPHIC);
3157
+ }
3158
+
3159
+
3160
+ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
3161
+ int index,
3162
+ Map* transition,
3163
+ String* name) {
3164
+ // ----------- S t a t e -------------
3165
+ // -- r0 : value
3166
+ // -- r1 : name
3167
+ // -- r2 : receiver
3168
+ // -- lr : return address
3169
+ // -----------------------------------
3170
+ Label miss;
3171
+
3172
+ Counters* counters = masm()->isolate()->counters();
3173
+ __ IncrementCounter(counters->keyed_store_field(), 1, r3, r4);
3174
+
3175
+ // Check that the name has not changed.
3176
+ __ cmp(r1, Operand(Handle<String>(name)));
3177
+ __ b(ne, &miss);
3178
+
3179
+ // r3 is used as scratch register. r1 and r2 keep their values if a jump to
3180
+ // the miss label is generated.
3181
+ GenerateStoreField(masm(),
3182
+ object,
3183
+ index,
3184
+ transition,
3185
+ r2, r1, r3,
3186
+ &miss);
3187
+ __ bind(&miss);
3188
+
3189
+ __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4);
3190
+ Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3191
+ __ Jump(ic, RelocInfo::CODE_TARGET);
3192
+
3193
+ // Return the generated code.
3194
+ return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
3195
+ }
3196
+
3197
+
3198
+ MaybeObject* KeyedStoreStubCompiler::CompileStoreFastElement(
3199
+ Map* receiver_map) {
3200
+ // ----------- S t a t e -------------
3201
+ // -- r0 : value
3202
+ // -- r1 : key
3203
+ // -- r2 : receiver
3204
+ // -- lr : return address
3205
+ // -- r3 : scratch
3206
+ // -----------------------------------
3207
+ bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
3208
+ MaybeObject* maybe_stub =
3209
+ KeyedStoreFastElementStub(is_js_array).TryGetCode();
3210
+ Code* stub;
3211
+ if (!maybe_stub->To(&stub)) return maybe_stub;
3212
+ __ DispatchMap(r2,
3213
+ r3,
3214
+ Handle<Map>(receiver_map),
3215
+ Handle<Code>(stub),
3216
+ DO_SMI_CHECK);
3217
+
3218
+ Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3219
+ __ Jump(ic, RelocInfo::CODE_TARGET);
3220
+
3221
+ // Return the generated code.
3222
+ return GetCode(NORMAL, NULL);
3223
+ }
3224
+
3225
+
3226
+ MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic(
3227
+ MapList* receiver_maps,
3228
+ CodeList* handler_ics) {
3229
+ // ----------- S t a t e -------------
3230
+ // -- r0 : value
3231
+ // -- r1 : key
3232
+ // -- r2 : receiver
3233
+ // -- lr : return address
3234
+ // -- r3 : scratch
3235
+ // -----------------------------------
3236
+ Label miss;
3237
+ __ JumpIfSmi(r2, &miss);
3238
+
3239
+ int receiver_count = receiver_maps->length();
3240
+ __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
3241
+ for (int current = 0; current < receiver_count; ++current) {
3242
+ Handle<Map> map(receiver_maps->at(current));
3243
+ Handle<Code> code(handler_ics->at(current));
3244
+ __ mov(ip, Operand(map));
3245
+ __ cmp(r3, ip);
3246
+ __ Jump(code, RelocInfo::CODE_TARGET, eq);
3247
+ }
3248
+
3249
+ __ bind(&miss);
3250
+ Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
3251
+ __ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
3252
+
3253
+ // Return the generated code.
3254
+ return GetCode(NORMAL, NULL, MEGAMORPHIC);
3255
+ }
3256
+
3257
+
3258
+ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3259
+ // ----------- S t a t e -------------
3260
+ // -- r0 : argc
3261
+ // -- r1 : constructor
3262
+ // -- lr : return address
3263
+ // -- [sp] : last argument
3264
+ // -----------------------------------
3265
+ Label generic_stub_call;
3266
+
3267
+ // Use r7 for holding undefined which is used in several places below.
3268
+ __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
3269
+
3270
+ #ifdef ENABLE_DEBUGGER_SUPPORT
3271
+ // Check to see whether there are any break points in the function code. If
3272
+ // there are jump to the generic constructor stub which calls the actual
3273
+ // code for the function thereby hitting the break points.
3274
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
3275
+ __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
3276
+ __ cmp(r2, r7);
3277
+ __ b(ne, &generic_stub_call);
3278
+ #endif
3279
+
3280
+ // Load the initial map and verify that it is in fact a map.
3281
+ // r1: constructor function
3282
+ // r7: undefined
3283
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
3284
+ __ tst(r2, Operand(kSmiTagMask));
3285
+ __ b(eq, &generic_stub_call);
3286
+ __ CompareObjectType(r2, r3, r4, MAP_TYPE);
3287
+ __ b(ne, &generic_stub_call);
3288
+
3289
+ #ifdef DEBUG
3290
+ // Cannot construct functions this way.
3291
+ // r0: argc
3292
+ // r1: constructor function
3293
+ // r2: initial map
3294
+ // r7: undefined
3295
+ __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
3296
+ __ Check(ne, "Function constructed by construct stub.");
3297
+ #endif
3298
+
3299
+ // Now allocate the JSObject in new space.
3300
+ // r0: argc
3301
+ // r1: constructor function
3302
+ // r2: initial map
3303
+ // r7: undefined
3304
+ __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
3305
+ __ AllocateInNewSpace(r3,
3306
+ r4,
3307
+ r5,
3308
+ r6,
3309
+ &generic_stub_call,
3310
+ SIZE_IN_WORDS);
3311
+
3312
+ // Allocated the JSObject, now initialize the fields. Map is set to initial
3313
+ // map and properties and elements are set to empty fixed array.
3314
+ // r0: argc
3315
+ // r1: constructor function
3316
+ // r2: initial map
3317
+ // r3: object size (in words)
3318
+ // r4: JSObject (not tagged)
3319
+ // r7: undefined
3320
+ __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
3321
+ __ mov(r5, r4);
3322
+ ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
3323
+ __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3324
+ ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
3325
+ __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3326
+ ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
3327
+ __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3328
+
3329
+ // Calculate the location of the first argument. The stack contains only the
3330
+ // argc arguments.
3331
+ __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
3332
+
3333
+ // Fill all the in-object properties with undefined.
3334
+ // r0: argc
3335
+ // r1: first argument
3336
+ // r3: object size (in words)
3337
+ // r4: JSObject (not tagged)
3338
+ // r5: First in-object property of JSObject (not tagged)
3339
+ // r7: undefined
3340
+ // Fill the initialized properties with a constant value or a passed argument
3341
+ // depending on the this.x = ...; assignment in the function.
3342
+ SharedFunctionInfo* shared = function->shared();
3343
+ for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3344
+ if (shared->IsThisPropertyAssignmentArgument(i)) {
3345
+ Label not_passed, next;
3346
+ // Check if the argument assigned to the property is actually passed.
3347
+ int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3348
+ __ cmp(r0, Operand(arg_number));
3349
+ __ b(le, &not_passed);
3350
+ // Argument passed - find it on the stack.
3351
+ __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize));
3352
+ __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3353
+ __ b(&next);
3354
+ __ bind(&not_passed);
3355
+ // Set the property to undefined.
3356
+ __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3357
+ __ bind(&next);
3358
+ } else {
3359
+ // Set the property to the constant value.
3360
+ Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3361
+ __ mov(r2, Operand(constant));
3362
+ __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3363
+ }
3364
+ }
3365
+
3366
+ // Fill the unused in-object property fields with undefined.
3367
+ ASSERT(function->has_initial_map());
3368
+ for (int i = shared->this_property_assignments_count();
3369
+ i < function->initial_map()->inobject_properties();
3370
+ i++) {
3371
+ __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3372
+ }
3373
+
3374
+ // r0: argc
3375
+ // r4: JSObject (not tagged)
3376
+ // Move argc to r1 and the JSObject to return to r0 and tag it.
3377
+ __ mov(r1, r0);
3378
+ __ mov(r0, r4);
3379
+ __ orr(r0, r0, Operand(kHeapObjectTag));
3380
+
3381
+ // r0: JSObject
3382
+ // r1: argc
3383
+ // Remove caller arguments and receiver from the stack and return.
3384
+ __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
3385
+ __ add(sp, sp, Operand(kPointerSize));
3386
+ Counters* counters = masm()->isolate()->counters();
3387
+ __ IncrementCounter(counters->constructed_objects(), 1, r1, r2);
3388
+ __ IncrementCounter(counters->constructed_objects_stub(), 1, r1, r2);
3389
+ __ Jump(lr);
3390
+
3391
+ // Jump to the generic stub in case the specialized code cannot handle the
3392
+ // construction.
3393
+ __ bind(&generic_stub_call);
3394
+ Handle<Code> code = masm()->isolate()->builtins()->JSConstructStubGeneric();
3395
+ __ Jump(code, RelocInfo::CODE_TARGET);
3396
+
3397
+ // Return the generated code.
3398
+ return GetCode();
3399
+ }
3400
+
3401
+
3402
+ MaybeObject* ExternalArrayLoadStubCompiler::CompileLoad(
3403
+ JSObject*receiver, ExternalArrayType array_type) {
3404
+ // ----------- S t a t e -------------
3405
+ // -- lr : return address
3406
+ // -- r0 : key
3407
+ // -- r1 : receiver
3408
+ // -----------------------------------
3409
+ MaybeObject* maybe_stub =
3410
+ KeyedLoadExternalArrayStub(array_type).TryGetCode();
3411
+ Code* stub;
3412
+ if (!maybe_stub->To(&stub)) return maybe_stub;
3413
+ __ DispatchMap(r1,
3414
+ r2,
3415
+ Handle<Map>(receiver->map()),
3416
+ Handle<Code>(stub),
3417
+ DO_SMI_CHECK);
3418
+
3419
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3420
+ __ Jump(ic, RelocInfo::CODE_TARGET);
3421
+
3422
+ // Return the generated code.
3423
+ return GetCode();
3424
+ }
3425
+
3426
+
3427
+ MaybeObject* ExternalArrayStoreStubCompiler::CompileStore(
3428
+ JSObject* receiver, ExternalArrayType array_type) {
3429
+ // ----------- S t a t e -------------
3430
+ // -- r0 : value
3431
+ // -- r1 : name
3432
+ // -- r2 : receiver
3433
+ // -- lr : return address
3434
+ // -----------------------------------
3435
+ MaybeObject* maybe_stub =
3436
+ KeyedStoreExternalArrayStub(array_type).TryGetCode();
3437
+ Code* stub;
3438
+ if (!maybe_stub->To(&stub)) return maybe_stub;
3439
+ __ DispatchMap(r2,
3440
+ r3,
3441
+ Handle<Map>(receiver->map()),
3442
+ Handle<Code>(stub),
3443
+ DO_SMI_CHECK);
3444
+
3445
+ Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3446
+ __ Jump(ic, RelocInfo::CODE_TARGET);
3447
+
3448
+ return GetCode();
3449
+ }
3450
+
3451
+
3452
+ #undef __
3453
+ #define __ ACCESS_MASM(masm)
3454
+
3455
+
3456
+ static bool IsElementTypeSigned(ExternalArrayType array_type) {
3457
+ switch (array_type) {
3458
+ case kExternalByteArray:
3459
+ case kExternalShortArray:
3460
+ case kExternalIntArray:
3461
+ return true;
3462
+
3463
+ case kExternalUnsignedByteArray:
3464
+ case kExternalUnsignedShortArray:
3465
+ case kExternalUnsignedIntArray:
3466
+ return false;
3467
+
3468
+ default:
3469
+ UNREACHABLE();
3470
+ return false;
3471
+ }
3472
+ }
3473
+
3474
+
3475
+ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3476
+ MacroAssembler* masm,
3477
+ ExternalArrayType array_type) {
3478
+ // ---------- S t a t e --------------
3479
+ // -- lr : return address
3480
+ // -- r0 : key
3481
+ // -- r1 : receiver
3482
+ // -----------------------------------
3483
+ Label miss_force_generic, slow, failed_allocation;
3484
+
3485
+ Register key = r0;
3486
+ Register receiver = r1;
3487
+
3488
+ // This stub is meant to be tail-jumped to, the receiver must already
3489
+ // have been verified by the caller to not be a smi.
3490
+
3491
+ // Check that the key is a smi.
3492
+ __ JumpIfNotSmi(key, &miss_force_generic);
3493
+
3494
+ __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3495
+ // r3: elements array
3496
+
3497
+ // Check that the index is in range.
3498
+ __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3499
+ __ cmp(ip, Operand(key, ASR, kSmiTagSize));
3500
+ // Unsigned comparison catches both negative and too-large values.
3501
+ __ b(lo, &miss_force_generic);
3502
+
3503
+ __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3504
+ // r3: base pointer of external storage
3505
+
3506
+ // We are not untagging smi key and instead work with it
3507
+ // as if it was premultiplied by 2.
3508
+ ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3509
+
3510
+ Register value = r2;
3511
+ switch (array_type) {
3512
+ case kExternalByteArray:
3513
+ __ ldrsb(value, MemOperand(r3, key, LSR, 1));
3514
+ break;
3515
+ case kExternalPixelArray:
3516
+ case kExternalUnsignedByteArray:
3517
+ __ ldrb(value, MemOperand(r3, key, LSR, 1));
3518
+ break;
3519
+ case kExternalShortArray:
3520
+ __ ldrsh(value, MemOperand(r3, key, LSL, 0));
3521
+ break;
3522
+ case kExternalUnsignedShortArray:
3523
+ __ ldrh(value, MemOperand(r3, key, LSL, 0));
3524
+ break;
3525
+ case kExternalIntArray:
3526
+ case kExternalUnsignedIntArray:
3527
+ __ ldr(value, MemOperand(r3, key, LSL, 1));
3528
+ break;
3529
+ case kExternalFloatArray:
3530
+ if (CpuFeatures::IsSupported(VFP3)) {
3531
+ CpuFeatures::Scope scope(VFP3);
3532
+ __ add(r2, r3, Operand(key, LSL, 1));
3533
+ __ vldr(s0, r2, 0);
3534
+ } else {
3535
+ __ ldr(value, MemOperand(r3, key, LSL, 1));
3536
+ }
3537
+ break;
3538
+ case kExternalDoubleArray:
3539
+ if (CpuFeatures::IsSupported(VFP3)) {
3540
+ CpuFeatures::Scope scope(VFP3);
3541
+ __ add(r2, r3, Operand(key, LSL, 2));
3542
+ __ vldr(d0, r2, 0);
3543
+ } else {
3544
+ __ add(r4, r3, Operand(key, LSL, 2));
3545
+ // r4: pointer to the beginning of the double we want to load.
3546
+ __ ldr(r2, MemOperand(r4, 0));
3547
+ __ ldr(r3, MemOperand(r4, Register::kSizeInBytes));
3548
+ }
3549
+ break;
3550
+ default:
3551
+ UNREACHABLE();
3552
+ break;
3553
+ }
3554
+
3555
+ // For integer array types:
3556
+ // r2: value
3557
+ // For float array type:
3558
+ // s0: value (if VFP3 is supported)
3559
+ // r2: value (if VFP3 is not supported)
3560
+ // For double array type:
3561
+ // d0: value (if VFP3 is supported)
3562
+ // r2/r3: value (if VFP3 is not supported)
3563
+
3564
+ if (array_type == kExternalIntArray) {
3565
+ // For the Int and UnsignedInt array types, we need to see whether
3566
+ // the value can be represented in a Smi. If not, we need to convert
3567
+ // it to a HeapNumber.
3568
+ Label box_int;
3569
+ __ cmp(value, Operand(0xC0000000));
3570
+ __ b(mi, &box_int);
3571
+ // Tag integer as smi and return it.
3572
+ __ mov(r0, Operand(value, LSL, kSmiTagSize));
3573
+ __ Ret();
3574
+
3575
+ __ bind(&box_int);
3576
+ // Allocate a HeapNumber for the result and perform int-to-double
3577
+ // conversion. Don't touch r0 or r1 as they are needed if allocation
3578
+ // fails.
3579
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3580
+ __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3581
+ // Now we can use r0 for the result as key is not needed any more.
3582
+ __ mov(r0, r5);
3583
+
3584
+ if (CpuFeatures::IsSupported(VFP3)) {
3585
+ CpuFeatures::Scope scope(VFP3);
3586
+ __ vmov(s0, value);
3587
+ __ vcvt_f64_s32(d0, s0);
3588
+ __ sub(r3, r0, Operand(kHeapObjectTag));
3589
+ __ vstr(d0, r3, HeapNumber::kValueOffset);
3590
+ __ Ret();
3591
+ } else {
3592
+ Register dst1 = r1;
3593
+ Register dst2 = r3;
3594
+ FloatingPointHelper::Destination dest =
3595
+ FloatingPointHelper::kCoreRegisters;
3596
+ FloatingPointHelper::ConvertIntToDouble(masm,
3597
+ value,
3598
+ dest,
3599
+ d0,
3600
+ dst1,
3601
+ dst2,
3602
+ r9,
3603
+ s0);
3604
+ __ str(dst1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3605
+ __ str(dst2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3606
+ __ Ret();
3607
+ }
3608
+ } else if (array_type == kExternalUnsignedIntArray) {
3609
+ // The test is different for unsigned int values. Since we need
3610
+ // the value to be in the range of a positive smi, we can't
3611
+ // handle either of the top two bits being set in the value.
3612
+ if (CpuFeatures::IsSupported(VFP3)) {
3613
+ CpuFeatures::Scope scope(VFP3);
3614
+ Label box_int, done;
3615
+ __ tst(value, Operand(0xC0000000));
3616
+ __ b(ne, &box_int);
3617
+ // Tag integer as smi and return it.
3618
+ __ mov(r0, Operand(value, LSL, kSmiTagSize));
3619
+ __ Ret();
3620
+
3621
+ __ bind(&box_int);
3622
+ __ vmov(s0, value);
3623
+ // Allocate a HeapNumber for the result and perform int-to-double
3624
+ // conversion. Don't use r0 and r1 as AllocateHeapNumber clobbers all
3625
+ // registers - also when jumping due to exhausted young space.
3626
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3627
+ __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3628
+
3629
+ __ vcvt_f64_u32(d0, s0);
3630
+ __ sub(r1, r2, Operand(kHeapObjectTag));
3631
+ __ vstr(d0, r1, HeapNumber::kValueOffset);
3632
+
3633
+ __ mov(r0, r2);
3634
+ __ Ret();
3635
+ } else {
3636
+ // Check whether unsigned integer fits into smi.
3637
+ Label box_int_0, box_int_1, done;
3638
+ __ tst(value, Operand(0x80000000));
3639
+ __ b(ne, &box_int_0);
3640
+ __ tst(value, Operand(0x40000000));
3641
+ __ b(ne, &box_int_1);
3642
+ // Tag integer as smi and return it.
3643
+ __ mov(r0, Operand(value, LSL, kSmiTagSize));
3644
+ __ Ret();
3645
+
3646
+ Register hiword = value; // r2.
3647
+ Register loword = r3;
3648
+
3649
+ __ bind(&box_int_0);
3650
+ // Integer does not have leading zeros.
3651
+ GenerateUInt2Double(masm, hiword, loword, r4, 0);
3652
+ __ b(&done);
3653
+
3654
+ __ bind(&box_int_1);
3655
+ // Integer has one leading zero.
3656
+ GenerateUInt2Double(masm, hiword, loword, r4, 1);
3657
+
3658
+
3659
+ __ bind(&done);
3660
+ // Integer was converted to double in registers hiword:loword.
3661
+ // Wrap it into a HeapNumber. Don't use r0 and r1 as AllocateHeapNumber
3662
+ // clobbers all registers - also when jumping due to exhausted young
3663
+ // space.
3664
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3665
+ __ AllocateHeapNumber(r4, r5, r7, r6, &slow);
3666
+
3667
+ __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset));
3668
+ __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
3669
+
3670
+ __ mov(r0, r4);
3671
+ __ Ret();
3672
+ }
3673
+ } else if (array_type == kExternalFloatArray) {
3674
+ // For the floating-point array type, we need to always allocate a
3675
+ // HeapNumber.
3676
+ if (CpuFeatures::IsSupported(VFP3)) {
3677
+ CpuFeatures::Scope scope(VFP3);
3678
+ // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3679
+ // AllocateHeapNumber clobbers all registers - also when jumping due to
3680
+ // exhausted young space.
3681
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3682
+ __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3683
+ __ vcvt_f64_f32(d0, s0);
3684
+ __ sub(r1, r2, Operand(kHeapObjectTag));
3685
+ __ vstr(d0, r1, HeapNumber::kValueOffset);
3686
+
3687
+ __ mov(r0, r2);
3688
+ __ Ret();
3689
+ } else {
3690
+ // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3691
+ // AllocateHeapNumber clobbers all registers - also when jumping due to
3692
+ // exhausted young space.
3693
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3694
+ __ AllocateHeapNumber(r3, r4, r5, r6, &slow);
3695
+ // VFP is not available, do manual single to double conversion.
3696
+
3697
+ // r2: floating point value (binary32)
3698
+ // r3: heap number for result
3699
+
3700
+ // Extract mantissa to r0. OK to clobber r0 now as there are no jumps to
3701
+ // the slow case from here.
3702
+ __ and_(r0, value, Operand(kBinary32MantissaMask));
3703
+
3704
+ // Extract exponent to r1. OK to clobber r1 now as there are no jumps to
3705
+ // the slow case from here.
3706
+ __ mov(r1, Operand(value, LSR, kBinary32MantissaBits));
3707
+ __ and_(r1, r1, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3708
+
3709
+ Label exponent_rebiased;
3710
+ __ teq(r1, Operand(0x00));
3711
+ __ b(eq, &exponent_rebiased);
3712
+
3713
+ __ teq(r1, Operand(0xff));
3714
+ __ mov(r1, Operand(0x7ff), LeaveCC, eq);
3715
+ __ b(eq, &exponent_rebiased);
3716
+
3717
+ // Rebias exponent.
3718
+ __ add(r1,
3719
+ r1,
3720
+ Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
3721
+
3722
+ __ bind(&exponent_rebiased);
3723
+ __ and_(r2, value, Operand(kBinary32SignMask));
3724
+ value = no_reg;
3725
+ __ orr(r2, r2, Operand(r1, LSL, HeapNumber::kMantissaBitsInTopWord));
3726
+
3727
+ // Shift mantissa.
3728
+ static const int kMantissaShiftForHiWord =
3729
+ kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3730
+
3731
+ static const int kMantissaShiftForLoWord =
3732
+ kBitsPerInt - kMantissaShiftForHiWord;
3733
+
3734
+ __ orr(r2, r2, Operand(r0, LSR, kMantissaShiftForHiWord));
3735
+ __ mov(r0, Operand(r0, LSL, kMantissaShiftForLoWord));
3736
+
3737
+ __ str(r2, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3738
+ __ str(r0, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3739
+
3740
+ __ mov(r0, r3);
3741
+ __ Ret();
3742
+ }
3743
+ } else if (array_type == kExternalDoubleArray) {
3744
+ if (CpuFeatures::IsSupported(VFP3)) {
3745
+ CpuFeatures::Scope scope(VFP3);
3746
+ // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3747
+ // AllocateHeapNumber clobbers all registers - also when jumping due to
3748
+ // exhausted young space.
3749
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3750
+ __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3751
+ __ sub(r1, r2, Operand(kHeapObjectTag));
3752
+ __ vstr(d0, r1, HeapNumber::kValueOffset);
3753
+
3754
+ __ mov(r0, r2);
3755
+ __ Ret();
3756
+ } else {
3757
+ // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3758
+ // AllocateHeapNumber clobbers all registers - also when jumping due to
3759
+ // exhausted young space.
3760
+ __ LoadRoot(r7, Heap::kHeapNumberMapRootIndex);
3761
+ __ AllocateHeapNumber(r4, r5, r6, r7, &slow);
3762
+
3763
+ __ str(r2, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
3764
+ __ str(r3, FieldMemOperand(r4, HeapNumber::kExponentOffset));
3765
+ __ mov(r0, r4);
3766
+ __ Ret();
3767
+ }
3768
+
3769
+ } else {
3770
+ // Tag integer as smi and return it.
3771
+ __ mov(r0, Operand(value, LSL, kSmiTagSize));
3772
+ __ Ret();
3773
+ }
3774
+
3775
+ // Slow case, key and receiver still in r0 and r1.
3776
+ __ bind(&slow);
3777
+ __ IncrementCounter(
3778
+ masm->isolate()->counters()->keyed_load_external_array_slow(),
3779
+ 1, r2, r3);
3780
+
3781
+ // ---------- S t a t e --------------
3782
+ // -- lr : return address
3783
+ // -- r0 : key
3784
+ // -- r1 : receiver
3785
+ // -----------------------------------
3786
+
3787
+ __ Push(r1, r0);
3788
+
3789
+ __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3790
+
3791
+ __ bind(&miss_force_generic);
3792
+ Code* stub = masm->isolate()->builtins()->builtin(
3793
+ Builtins::kKeyedLoadIC_MissForceGeneric);
3794
+ __ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET);
3795
+ }
3796
+
3797
+
3798
+ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3799
+ MacroAssembler* masm,
3800
+ ExternalArrayType array_type) {
3801
+ // ---------- S t a t e --------------
3802
+ // -- r0 : value
3803
+ // -- r1 : key
3804
+ // -- r2 : receiver
3805
+ // -- lr : return address
3806
+ // -----------------------------------
3807
+ Label slow, check_heap_number, miss_force_generic;
3808
+
3809
+ // Register usage.
3810
+ Register value = r0;
3811
+ Register key = r1;
3812
+ Register receiver = r2;
3813
+ // r3 mostly holds the elements array or the destination external array.
3814
+
3815
+ // This stub is meant to be tail-jumped to, the receiver must already
3816
+ // have been verified by the caller to not be a smi.
3817
+
3818
+ __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3819
+
3820
+ // Check that the key is a smi.
3821
+ __ JumpIfNotSmi(key, &miss_force_generic);
3822
+
3823
+ // Check that the index is in range
3824
+ __ SmiUntag(r4, key);
3825
+ __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3826
+ __ cmp(r4, ip);
3827
+ // Unsigned comparison catches both negative and too-large values.
3828
+ __ b(hs, &miss_force_generic);
3829
+
3830
+ // Handle both smis and HeapNumbers in the fast path. Go to the
3831
+ // runtime for all other kinds of values.
3832
+ // r3: external array.
3833
+ // r4: key (integer).
3834
+ if (array_type == kExternalPixelArray) {
3835
+ // Double to pixel conversion is only implemented in the runtime for now.
3836
+ __ JumpIfNotSmi(value, &slow);
3837
+ } else {
3838
+ __ JumpIfNotSmi(value, &check_heap_number);
3839
+ }
3840
+ __ SmiUntag(r5, value);
3841
+ __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3842
+
3843
+ // r3: base pointer of external storage.
3844
+ // r4: key (integer).
3845
+ // r5: value (integer).
3846
+ switch (array_type) {
3847
+ case kExternalPixelArray:
3848
+ // Clamp the value to [0..255].
3849
+ __ Usat(r5, 8, Operand(r5));
3850
+ __ strb(r5, MemOperand(r3, r4, LSL, 0));
3851
+ break;
3852
+ case kExternalByteArray:
3853
+ case kExternalUnsignedByteArray:
3854
+ __ strb(r5, MemOperand(r3, r4, LSL, 0));
3855
+ break;
3856
+ case kExternalShortArray:
3857
+ case kExternalUnsignedShortArray:
3858
+ __ strh(r5, MemOperand(r3, r4, LSL, 1));
3859
+ break;
3860
+ case kExternalIntArray:
3861
+ case kExternalUnsignedIntArray:
3862
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
3863
+ break;
3864
+ case kExternalFloatArray:
3865
+ // Perform int-to-float conversion and store to memory.
3866
+ StoreIntAsFloat(masm, r3, r4, r5, r6, r7, r9);
3867
+ break;
3868
+ case kExternalDoubleArray:
3869
+ __ add(r3, r3, Operand(r4, LSL, 3));
3870
+ // r3: effective address of the double element
3871
+ FloatingPointHelper::Destination destination;
3872
+ if (CpuFeatures::IsSupported(VFP3)) {
3873
+ destination = FloatingPointHelper::kVFPRegisters;
3874
+ } else {
3875
+ destination = FloatingPointHelper::kCoreRegisters;
3876
+ }
3877
+ FloatingPointHelper::ConvertIntToDouble(
3878
+ masm, r5, destination,
3879
+ d0, r6, r7, // These are: double_dst, dst1, dst2.
3880
+ r4, s2); // These are: scratch2, single_scratch.
3881
+ if (destination == FloatingPointHelper::kVFPRegisters) {
3882
+ CpuFeatures::Scope scope(VFP3);
3883
+ __ vstr(d0, r3, 0);
3884
+ } else {
3885
+ __ str(r6, MemOperand(r3, 0));
3886
+ __ str(r7, MemOperand(r3, Register::kSizeInBytes));
3887
+ }
3888
+ break;
3889
+ default:
3890
+ UNREACHABLE();
3891
+ break;
3892
+ }
3893
+
3894
+ // Entry registers are intact, r0 holds the value which is the return value.
3895
+ __ Ret();
3896
+
3897
+ if (array_type != kExternalPixelArray) {
3898
+ // r3: external array.
3899
+ // r4: index (integer).
3900
+ __ bind(&check_heap_number);
3901
+ __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
3902
+ __ b(ne, &slow);
3903
+
3904
+ __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3905
+
3906
+ // r3: base pointer of external storage.
3907
+ // r4: key (integer).
3908
+
3909
+ // The WebGL specification leaves the behavior of storing NaN and
3910
+ // +/-Infinity into integer arrays basically undefined. For more
3911
+ // reproducible behavior, convert these to zero.
3912
+ if (CpuFeatures::IsSupported(VFP3)) {
3913
+ CpuFeatures::Scope scope(VFP3);
3914
+
3915
+ if (array_type == kExternalFloatArray) {
3916
+ // vldr requires offset to be a multiple of 4 so we can not
3917
+ // include -kHeapObjectTag into it.
3918
+ __ sub(r5, r0, Operand(kHeapObjectTag));
3919
+ __ vldr(d0, r5, HeapNumber::kValueOffset);
3920
+ __ add(r5, r3, Operand(r4, LSL, 2));
3921
+ __ vcvt_f32_f64(s0, d0);
3922
+ __ vstr(s0, r5, 0);
3923
+ } else if (array_type == kExternalDoubleArray) {
3924
+ __ sub(r5, r0, Operand(kHeapObjectTag));
3925
+ __ vldr(d0, r5, HeapNumber::kValueOffset);
3926
+ __ add(r5, r3, Operand(r4, LSL, 3));
3927
+ __ vstr(d0, r5, 0);
3928
+ } else {
3929
+ // Need to perform float-to-int conversion.
3930
+ // Test for NaN or infinity (both give zero).
3931
+ __ ldr(r6, FieldMemOperand(value, HeapNumber::kExponentOffset));
3932
+
3933
+ // Hoisted load. vldr requires offset to be a multiple of 4 so we can
3934
+ // not include -kHeapObjectTag into it.
3935
+ __ sub(r5, value, Operand(kHeapObjectTag));
3936
+ __ vldr(d0, r5, HeapNumber::kValueOffset);
3937
+
3938
+ __ Sbfx(r6, r6, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
3939
+ // NaNs and Infinities have all-one exponents so they sign extend to -1.
3940
+ __ cmp(r6, Operand(-1));
3941
+ __ mov(r5, Operand(0), LeaveCC, eq);
3942
+
3943
+ // Not infinity or NaN simply convert to int.
3944
+ if (IsElementTypeSigned(array_type)) {
3945
+ __ vcvt_s32_f64(s0, d0, kDefaultRoundToZero, ne);
3946
+ } else {
3947
+ __ vcvt_u32_f64(s0, d0, kDefaultRoundToZero, ne);
3948
+ }
3949
+ __ vmov(r5, s0, ne);
3950
+
3951
+ switch (array_type) {
3952
+ case kExternalByteArray:
3953
+ case kExternalUnsignedByteArray:
3954
+ __ strb(r5, MemOperand(r3, r4, LSL, 0));
3955
+ break;
3956
+ case kExternalShortArray:
3957
+ case kExternalUnsignedShortArray:
3958
+ __ strh(r5, MemOperand(r3, r4, LSL, 1));
3959
+ break;
3960
+ case kExternalIntArray:
3961
+ case kExternalUnsignedIntArray:
3962
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
3963
+ break;
3964
+ default:
3965
+ UNREACHABLE();
3966
+ break;
3967
+ }
3968
+ }
3969
+
3970
+ // Entry registers are intact, r0 holds the value which is the return
3971
+ // value.
3972
+ __ Ret();
3973
+ } else {
3974
+ // VFP3 is not available do manual conversions.
3975
+ __ ldr(r5, FieldMemOperand(value, HeapNumber::kExponentOffset));
3976
+ __ ldr(r6, FieldMemOperand(value, HeapNumber::kMantissaOffset));
3977
+
3978
+ if (array_type == kExternalFloatArray) {
3979
+ Label done, nan_or_infinity_or_zero;
3980
+ static const int kMantissaInHiWordShift =
3981
+ kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3982
+
3983
+ static const int kMantissaInLoWordShift =
3984
+ kBitsPerInt - kMantissaInHiWordShift;
3985
+
3986
+ // Test for all special exponent values: zeros, subnormal numbers, NaNs
3987
+ // and infinities. All these should be converted to 0.
3988
+ __ mov(r7, Operand(HeapNumber::kExponentMask));
3989
+ __ and_(r9, r5, Operand(r7), SetCC);
3990
+ __ b(eq, &nan_or_infinity_or_zero);
3991
+
3992
+ __ teq(r9, Operand(r7));
3993
+ __ mov(r9, Operand(kBinary32ExponentMask), LeaveCC, eq);
3994
+ __ b(eq, &nan_or_infinity_or_zero);
3995
+
3996
+ // Rebias exponent.
3997
+ __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
3998
+ __ add(r9,
3999
+ r9,
4000
+ Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
4001
+
4002
+ __ cmp(r9, Operand(kBinary32MaxExponent));
4003
+ __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, gt);
4004
+ __ orr(r5, r5, Operand(kBinary32ExponentMask), LeaveCC, gt);
4005
+ __ b(gt, &done);
4006
+
4007
+ __ cmp(r9, Operand(kBinary32MinExponent));
4008
+ __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, lt);
4009
+ __ b(lt, &done);
4010
+
4011
+ __ and_(r7, r5, Operand(HeapNumber::kSignMask));
4012
+ __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
4013
+ __ orr(r7, r7, Operand(r5, LSL, kMantissaInHiWordShift));
4014
+ __ orr(r7, r7, Operand(r6, LSR, kMantissaInLoWordShift));
4015
+ __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift));
4016
+
4017
+ __ bind(&done);
4018
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
4019
+ // Entry registers are intact, r0 holds the value which is the return
4020
+ // value.
4021
+ __ Ret();
4022
+
4023
+ __ bind(&nan_or_infinity_or_zero);
4024
+ __ and_(r7, r5, Operand(HeapNumber::kSignMask));
4025
+ __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
4026
+ __ orr(r9, r9, r7);
4027
+ __ orr(r9, r9, Operand(r5, LSL, kMantissaInHiWordShift));
4028
+ __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift));
4029
+ __ b(&done);
4030
+ } else if (array_type == kExternalDoubleArray) {
4031
+ __ add(r7, r3, Operand(r4, LSL, 3));
4032
+ // r7: effective address of destination element.
4033
+ __ str(r6, MemOperand(r7, 0));
4034
+ __ str(r5, MemOperand(r7, Register::kSizeInBytes));
4035
+ __ Ret();
4036
+ } else {
4037
+ bool is_signed_type = IsElementTypeSigned(array_type);
4038
+ int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
4039
+ int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
4040
+
4041
+ Label done, sign;
4042
+
4043
+ // Test for all special exponent values: zeros, subnormal numbers, NaNs
4044
+ // and infinities. All these should be converted to 0.
4045
+ __ mov(r7, Operand(HeapNumber::kExponentMask));
4046
+ __ and_(r9, r5, Operand(r7), SetCC);
4047
+ __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
4048
+ __ b(eq, &done);
4049
+
4050
+ __ teq(r9, Operand(r7));
4051
+ __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
4052
+ __ b(eq, &done);
4053
+
4054
+ // Unbias exponent.
4055
+ __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
4056
+ __ sub(r9, r9, Operand(HeapNumber::kExponentBias), SetCC);
4057
+ // If exponent is negative then result is 0.
4058
+ __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, mi);
4059
+ __ b(mi, &done);
4060
+
4061
+ // If exponent is too big then result is minimal value.
4062
+ __ cmp(r9, Operand(meaningfull_bits - 1));
4063
+ __ mov(r5, Operand(min_value), LeaveCC, ge);
4064
+ __ b(ge, &done);
4065
+
4066
+ __ and_(r7, r5, Operand(HeapNumber::kSignMask), SetCC);
4067
+ __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
4068
+ __ orr(r5, r5, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4069
+
4070
+ __ rsb(r9, r9, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
4071
+ __ mov(r5, Operand(r5, LSR, r9), LeaveCC, pl);
4072
+ __ b(pl, &sign);
4073
+
4074
+ __ rsb(r9, r9, Operand(0, RelocInfo::NONE));
4075
+ __ mov(r5, Operand(r5, LSL, r9));
4076
+ __ rsb(r9, r9, Operand(meaningfull_bits));
4077
+ __ orr(r5, r5, Operand(r6, LSR, r9));
4078
+
4079
+ __ bind(&sign);
4080
+ __ teq(r7, Operand(0, RelocInfo::NONE));
4081
+ __ rsb(r5, r5, Operand(0, RelocInfo::NONE), LeaveCC, ne);
4082
+
4083
+ __ bind(&done);
4084
+ switch (array_type) {
4085
+ case kExternalByteArray:
4086
+ case kExternalUnsignedByteArray:
4087
+ __ strb(r5, MemOperand(r3, r4, LSL, 0));
4088
+ break;
4089
+ case kExternalShortArray:
4090
+ case kExternalUnsignedShortArray:
4091
+ __ strh(r5, MemOperand(r3, r4, LSL, 1));
4092
+ break;
4093
+ case kExternalIntArray:
4094
+ case kExternalUnsignedIntArray:
4095
+ __ str(r5, MemOperand(r3, r4, LSL, 2));
4096
+ break;
4097
+ default:
4098
+ UNREACHABLE();
4099
+ break;
4100
+ }
4101
+ }
4102
+ }
4103
+ }
4104
+
4105
+ // Slow case, key and receiver still in r0 and r1.
4106
+ __ bind(&slow);
4107
+ __ IncrementCounter(
4108
+ masm->isolate()->counters()->keyed_load_external_array_slow(),
4109
+ 1, r2, r3);
4110
+
4111
+ // ---------- S t a t e --------------
4112
+ // -- lr : return address
4113
+ // -- r0 : key
4114
+ // -- r1 : receiver
4115
+ // -----------------------------------
4116
+ Handle<Code> slow_ic =
4117
+ masm->isolate()->builtins()->KeyedStoreIC_Slow();
4118
+ __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4119
+
4120
+ // Miss case, call the runtime.
4121
+ __ bind(&miss_force_generic);
4122
+
4123
+ // ---------- S t a t e --------------
4124
+ // -- lr : return address
4125
+ // -- r0 : key
4126
+ // -- r1 : receiver
4127
+ // -----------------------------------
4128
+
4129
+ Handle<Code> miss_ic =
4130
+ masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4131
+ __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4132
+ }
4133
+
4134
+
4135
+ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
4136
+ // ----------- S t a t e -------------
4137
+ // -- lr : return address
4138
+ // -- r0 : key
4139
+ // -- r1 : receiver
4140
+ // -----------------------------------
4141
+ Label miss_force_generic;
4142
+
4143
+ // This stub is meant to be tail-jumped to, the receiver must already
4144
+ // have been verified by the caller to not be a smi.
4145
+
4146
+ // Check that the key is a smi.
4147
+ __ JumpIfNotSmi(r0, &miss_force_generic);
4148
+
4149
+ // Get the elements array.
4150
+ __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
4151
+ __ AssertFastElements(r2);
4152
+
4153
+ // Check that the key is within bounds.
4154
+ __ ldr(r3, FieldMemOperand(r2, FixedArray::kLengthOffset));
4155
+ __ cmp(r0, Operand(r3));
4156
+ __ b(hs, &miss_force_generic);
4157
+
4158
+ // Load the result and make sure it's not the hole.
4159
+ __ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4160
+ ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4161
+ __ ldr(r4,
4162
+ MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
4163
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
4164
+ __ cmp(r4, ip);
4165
+ __ b(eq, &miss_force_generic);
4166
+ __ mov(r0, r4);
4167
+ __ Ret();
4168
+
4169
+ __ bind(&miss_force_generic);
4170
+ Code* stub = masm->isolate()->builtins()->builtin(
4171
+ Builtins::kKeyedLoadIC_MissForceGeneric);
4172
+ __ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET);
4173
+ }
4174
+
4175
+
4176
+ void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
4177
+ bool is_js_array) {
4178
+ // ----------- S t a t e -------------
4179
+ // -- r0 : value
4180
+ // -- r1 : key
4181
+ // -- r2 : receiver
4182
+ // -- lr : return address
4183
+ // -- r3 : scratch
4184
+ // -- r4 : scratch (elements)
4185
+ // -----------------------------------
4186
+ Label miss_force_generic;
4187
+
4188
+ Register value_reg = r0;
4189
+ Register key_reg = r1;
4190
+ Register receiver_reg = r2;
4191
+ Register scratch = r3;
4192
+ Register elements_reg = r4;
4193
+
4194
+ // This stub is meant to be tail-jumped to, the receiver must already
4195
+ // have been verified by the caller to not be a smi.
4196
+
4197
+ // Check that the key is a smi.
4198
+ __ JumpIfNotSmi(r0, &miss_force_generic);
4199
+
4200
+ // Get the elements array and make sure it is a fast element array, not 'cow'.
4201
+ __ ldr(elements_reg,
4202
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4203
+ __ CheckMap(elements_reg,
4204
+ scratch,
4205
+ Heap::kFixedArrayMapRootIndex,
4206
+ &miss_force_generic,
4207
+ DONT_DO_SMI_CHECK);
4208
+
4209
+ // Check that the key is within bounds.
4210
+ if (is_js_array) {
4211
+ __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4212
+ } else {
4213
+ __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4214
+ }
4215
+ // Compare smis.
4216
+ __ cmp(key_reg, scratch);
4217
+ __ b(hs, &miss_force_generic);
4218
+
4219
+ __ add(scratch,
4220
+ elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4221
+ ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4222
+ __ str(value_reg,
4223
+ MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
4224
+ __ RecordWrite(scratch,
4225
+ Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize),
4226
+ receiver_reg , elements_reg);
4227
+
4228
+ // value_reg (r0) is preserved.
4229
+ // Done.
4230
+ __ Ret();
4231
+
4232
+ __ bind(&miss_force_generic);
4233
+ Handle<Code> ic =
4234
+ masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4235
+ __ Jump(ic, RelocInfo::CODE_TARGET);
4236
+ }
4237
+
4238
+
4239
+ #undef __
4240
+
4241
+ } } // namespace v8::internal
4242
+
4243
+ #endif // V8_TARGET_ARCH_ARM