libv8 3.11.8.17 → 3.16.14.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (754) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +1 -2
  3. data/Gemfile +1 -1
  4. data/Rakefile +6 -7
  5. data/lib/libv8/version.rb +1 -1
  6. data/vendor/v8/.gitignore +24 -3
  7. data/vendor/v8/AUTHORS +7 -0
  8. data/vendor/v8/ChangeLog +839 -0
  9. data/vendor/v8/DEPS +1 -1
  10. data/vendor/v8/Makefile.android +92 -0
  11. data/vendor/v8/OWNERS +11 -0
  12. data/vendor/v8/PRESUBMIT.py +71 -0
  13. data/vendor/v8/SConstruct +34 -39
  14. data/vendor/v8/build/android.gypi +56 -37
  15. data/vendor/v8/build/common.gypi +112 -30
  16. data/vendor/v8/build/gyp_v8 +1 -1
  17. data/vendor/v8/build/standalone.gypi +15 -11
  18. data/vendor/v8/include/v8-debug.h +9 -1
  19. data/vendor/v8/include/v8-preparser.h +4 -3
  20. data/vendor/v8/include/v8-profiler.h +25 -25
  21. data/vendor/v8/include/v8-testing.h +4 -3
  22. data/vendor/v8/include/v8.h +994 -540
  23. data/vendor/v8/preparser/preparser-process.cc +3 -3
  24. data/vendor/v8/samples/lineprocessor.cc +20 -27
  25. data/vendor/v8/samples/process.cc +18 -14
  26. data/vendor/v8/samples/shell.cc +16 -15
  27. data/vendor/v8/src/SConscript +15 -14
  28. data/vendor/v8/src/accessors.cc +169 -77
  29. data/vendor/v8/src/accessors.h +4 -0
  30. data/vendor/v8/src/allocation-inl.h +2 -2
  31. data/vendor/v8/src/allocation.h +7 -7
  32. data/vendor/v8/src/api.cc +810 -497
  33. data/vendor/v8/src/api.h +85 -60
  34. data/vendor/v8/src/arm/assembler-arm-inl.h +179 -22
  35. data/vendor/v8/src/arm/assembler-arm.cc +633 -264
  36. data/vendor/v8/src/arm/assembler-arm.h +264 -197
  37. data/vendor/v8/src/arm/builtins-arm.cc +117 -27
  38. data/vendor/v8/src/arm/code-stubs-arm.cc +1241 -700
  39. data/vendor/v8/src/arm/code-stubs-arm.h +35 -138
  40. data/vendor/v8/src/arm/codegen-arm.cc +285 -16
  41. data/vendor/v8/src/arm/codegen-arm.h +22 -0
  42. data/vendor/v8/src/arm/constants-arm.cc +5 -3
  43. data/vendor/v8/src/arm/constants-arm.h +24 -11
  44. data/vendor/v8/src/arm/debug-arm.cc +3 -3
  45. data/vendor/v8/src/arm/deoptimizer-arm.cc +382 -92
  46. data/vendor/v8/src/arm/disasm-arm.cc +61 -12
  47. data/vendor/v8/src/arm/frames-arm.h +0 -14
  48. data/vendor/v8/src/arm/full-codegen-arm.cc +332 -304
  49. data/vendor/v8/src/arm/ic-arm.cc +180 -259
  50. data/vendor/v8/src/arm/lithium-arm.cc +364 -316
  51. data/vendor/v8/src/arm/lithium-arm.h +512 -275
  52. data/vendor/v8/src/arm/lithium-codegen-arm.cc +1768 -809
  53. data/vendor/v8/src/arm/lithium-codegen-arm.h +97 -35
  54. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +12 -5
  55. data/vendor/v8/src/arm/macro-assembler-arm.cc +439 -228
  56. data/vendor/v8/src/arm/macro-assembler-arm.h +116 -70
  57. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +54 -44
  58. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +3 -10
  59. data/vendor/v8/src/arm/simulator-arm.cc +272 -238
  60. data/vendor/v8/src/arm/simulator-arm.h +38 -8
  61. data/vendor/v8/src/arm/stub-cache-arm.cc +522 -895
  62. data/vendor/v8/src/array.js +101 -70
  63. data/vendor/v8/src/assembler.cc +270 -19
  64. data/vendor/v8/src/assembler.h +110 -15
  65. data/vendor/v8/src/ast.cc +79 -69
  66. data/vendor/v8/src/ast.h +255 -301
  67. data/vendor/v8/src/atomicops.h +7 -1
  68. data/vendor/v8/src/atomicops_internals_tsan.h +335 -0
  69. data/vendor/v8/src/bootstrapper.cc +481 -418
  70. data/vendor/v8/src/bootstrapper.h +4 -4
  71. data/vendor/v8/src/builtins.cc +498 -311
  72. data/vendor/v8/src/builtins.h +75 -47
  73. data/vendor/v8/src/checks.cc +2 -1
  74. data/vendor/v8/src/checks.h +8 -0
  75. data/vendor/v8/src/code-stubs-hydrogen.cc +253 -0
  76. data/vendor/v8/src/code-stubs.cc +249 -84
  77. data/vendor/v8/src/code-stubs.h +501 -169
  78. data/vendor/v8/src/codegen.cc +36 -18
  79. data/vendor/v8/src/codegen.h +25 -3
  80. data/vendor/v8/src/collection.js +54 -17
  81. data/vendor/v8/src/compilation-cache.cc +24 -16
  82. data/vendor/v8/src/compilation-cache.h +15 -6
  83. data/vendor/v8/src/compiler.cc +497 -195
  84. data/vendor/v8/src/compiler.h +246 -38
  85. data/vendor/v8/src/contexts.cc +64 -24
  86. data/vendor/v8/src/contexts.h +60 -29
  87. data/vendor/v8/src/conversions-inl.h +24 -14
  88. data/vendor/v8/src/conversions.h +7 -4
  89. data/vendor/v8/src/counters.cc +21 -12
  90. data/vendor/v8/src/counters.h +44 -16
  91. data/vendor/v8/src/cpu-profiler.h +1 -1
  92. data/vendor/v8/src/d8-debug.cc +2 -2
  93. data/vendor/v8/src/d8-readline.cc +13 -2
  94. data/vendor/v8/src/d8.cc +681 -273
  95. data/vendor/v8/src/d8.gyp +4 -4
  96. data/vendor/v8/src/d8.h +38 -18
  97. data/vendor/v8/src/d8.js +0 -617
  98. data/vendor/v8/src/data-flow.h +55 -0
  99. data/vendor/v8/src/date.js +1 -42
  100. data/vendor/v8/src/dateparser-inl.h +5 -1
  101. data/vendor/v8/src/debug-agent.cc +10 -15
  102. data/vendor/v8/src/debug-debugger.js +147 -149
  103. data/vendor/v8/src/debug.cc +323 -164
  104. data/vendor/v8/src/debug.h +26 -14
  105. data/vendor/v8/src/deoptimizer.cc +765 -290
  106. data/vendor/v8/src/deoptimizer.h +130 -28
  107. data/vendor/v8/src/disassembler.cc +10 -4
  108. data/vendor/v8/src/elements-kind.cc +7 -2
  109. data/vendor/v8/src/elements-kind.h +19 -0
  110. data/vendor/v8/src/elements.cc +607 -285
  111. data/vendor/v8/src/elements.h +36 -13
  112. data/vendor/v8/src/execution.cc +52 -31
  113. data/vendor/v8/src/execution.h +4 -4
  114. data/vendor/v8/src/extensions/externalize-string-extension.cc +5 -4
  115. data/vendor/v8/src/extensions/gc-extension.cc +5 -1
  116. data/vendor/v8/src/extensions/statistics-extension.cc +153 -0
  117. data/vendor/v8/src/{inspector.h → extensions/statistics-extension.h} +12 -23
  118. data/vendor/v8/src/factory.cc +101 -134
  119. data/vendor/v8/src/factory.h +36 -31
  120. data/vendor/v8/src/flag-definitions.h +102 -25
  121. data/vendor/v8/src/flags.cc +9 -5
  122. data/vendor/v8/src/frames-inl.h +10 -0
  123. data/vendor/v8/src/frames.cc +116 -26
  124. data/vendor/v8/src/frames.h +96 -12
  125. data/vendor/v8/src/full-codegen.cc +219 -74
  126. data/vendor/v8/src/full-codegen.h +63 -21
  127. data/vendor/v8/src/func-name-inferrer.cc +8 -7
  128. data/vendor/v8/src/func-name-inferrer.h +5 -3
  129. data/vendor/v8/src/gdb-jit.cc +71 -57
  130. data/vendor/v8/src/global-handles.cc +230 -101
  131. data/vendor/v8/src/global-handles.h +26 -27
  132. data/vendor/v8/src/globals.h +17 -19
  133. data/vendor/v8/src/handles-inl.h +59 -12
  134. data/vendor/v8/src/handles.cc +180 -200
  135. data/vendor/v8/src/handles.h +80 -11
  136. data/vendor/v8/src/hashmap.h +60 -40
  137. data/vendor/v8/src/heap-inl.h +107 -45
  138. data/vendor/v8/src/heap-profiler.cc +38 -19
  139. data/vendor/v8/src/heap-profiler.h +24 -14
  140. data/vendor/v8/src/heap.cc +1123 -738
  141. data/vendor/v8/src/heap.h +385 -146
  142. data/vendor/v8/src/hydrogen-instructions.cc +700 -217
  143. data/vendor/v8/src/hydrogen-instructions.h +1158 -472
  144. data/vendor/v8/src/hydrogen.cc +3319 -1662
  145. data/vendor/v8/src/hydrogen.h +411 -170
  146. data/vendor/v8/src/ia32/assembler-ia32-inl.h +46 -16
  147. data/vendor/v8/src/ia32/assembler-ia32.cc +131 -61
  148. data/vendor/v8/src/ia32/assembler-ia32.h +115 -57
  149. data/vendor/v8/src/ia32/builtins-ia32.cc +99 -5
  150. data/vendor/v8/src/ia32/code-stubs-ia32.cc +787 -495
  151. data/vendor/v8/src/ia32/code-stubs-ia32.h +10 -100
  152. data/vendor/v8/src/ia32/codegen-ia32.cc +227 -23
  153. data/vendor/v8/src/ia32/codegen-ia32.h +14 -0
  154. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +428 -87
  155. data/vendor/v8/src/ia32/disasm-ia32.cc +28 -1
  156. data/vendor/v8/src/ia32/frames-ia32.h +6 -16
  157. data/vendor/v8/src/ia32/full-codegen-ia32.cc +280 -272
  158. data/vendor/v8/src/ia32/ic-ia32.cc +150 -250
  159. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +1600 -517
  160. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +90 -24
  161. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +10 -6
  162. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.h +2 -2
  163. data/vendor/v8/src/ia32/lithium-ia32.cc +405 -302
  164. data/vendor/v8/src/ia32/lithium-ia32.h +526 -271
  165. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +378 -119
  166. data/vendor/v8/src/ia32/macro-assembler-ia32.h +62 -28
  167. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +43 -30
  168. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +2 -10
  169. data/vendor/v8/src/ia32/stub-cache-ia32.cc +492 -678
  170. data/vendor/v8/src/ic-inl.h +9 -4
  171. data/vendor/v8/src/ic.cc +836 -923
  172. data/vendor/v8/src/ic.h +228 -247
  173. data/vendor/v8/src/incremental-marking-inl.h +26 -30
  174. data/vendor/v8/src/incremental-marking.cc +276 -248
  175. data/vendor/v8/src/incremental-marking.h +29 -37
  176. data/vendor/v8/src/interface.cc +34 -25
  177. data/vendor/v8/src/interface.h +69 -25
  178. data/vendor/v8/src/interpreter-irregexp.cc +2 -2
  179. data/vendor/v8/src/isolate.cc +382 -76
  180. data/vendor/v8/src/isolate.h +109 -56
  181. data/vendor/v8/src/json-parser.h +217 -104
  182. data/vendor/v8/src/json-stringifier.h +745 -0
  183. data/vendor/v8/src/json.js +10 -132
  184. data/vendor/v8/src/jsregexp-inl.h +106 -0
  185. data/vendor/v8/src/jsregexp.cc +517 -285
  186. data/vendor/v8/src/jsregexp.h +145 -117
  187. data/vendor/v8/src/list-inl.h +35 -22
  188. data/vendor/v8/src/list.h +46 -19
  189. data/vendor/v8/src/lithium-allocator-inl.h +22 -2
  190. data/vendor/v8/src/lithium-allocator.cc +85 -70
  191. data/vendor/v8/src/lithium-allocator.h +21 -39
  192. data/vendor/v8/src/lithium.cc +259 -5
  193. data/vendor/v8/src/lithium.h +131 -32
  194. data/vendor/v8/src/liveedit-debugger.js +52 -3
  195. data/vendor/v8/src/liveedit.cc +393 -113
  196. data/vendor/v8/src/liveedit.h +7 -3
  197. data/vendor/v8/src/log-utils.cc +4 -2
  198. data/vendor/v8/src/log.cc +170 -140
  199. data/vendor/v8/src/log.h +62 -11
  200. data/vendor/v8/src/macro-assembler.h +17 -0
  201. data/vendor/v8/src/macros.py +2 -0
  202. data/vendor/v8/src/mark-compact-inl.h +3 -23
  203. data/vendor/v8/src/mark-compact.cc +801 -830
  204. data/vendor/v8/src/mark-compact.h +154 -47
  205. data/vendor/v8/src/marking-thread.cc +85 -0
  206. data/vendor/v8/src/{inspector.cc → marking-thread.h} +32 -24
  207. data/vendor/v8/src/math.js +12 -18
  208. data/vendor/v8/src/messages.cc +18 -8
  209. data/vendor/v8/src/messages.js +314 -261
  210. data/vendor/v8/src/mips/assembler-mips-inl.h +58 -6
  211. data/vendor/v8/src/mips/assembler-mips.cc +92 -75
  212. data/vendor/v8/src/mips/assembler-mips.h +54 -60
  213. data/vendor/v8/src/mips/builtins-mips.cc +116 -17
  214. data/vendor/v8/src/mips/code-stubs-mips.cc +919 -556
  215. data/vendor/v8/src/mips/code-stubs-mips.h +22 -131
  216. data/vendor/v8/src/mips/codegen-mips.cc +281 -6
  217. data/vendor/v8/src/mips/codegen-mips.h +22 -0
  218. data/vendor/v8/src/mips/constants-mips.cc +2 -0
  219. data/vendor/v8/src/mips/constants-mips.h +12 -2
  220. data/vendor/v8/src/mips/deoptimizer-mips.cc +286 -50
  221. data/vendor/v8/src/mips/disasm-mips.cc +13 -0
  222. data/vendor/v8/src/mips/full-codegen-mips.cc +297 -284
  223. data/vendor/v8/src/mips/ic-mips.cc +182 -263
  224. data/vendor/v8/src/mips/lithium-codegen-mips.cc +1208 -556
  225. data/vendor/v8/src/mips/lithium-codegen-mips.h +72 -19
  226. data/vendor/v8/src/mips/lithium-gap-resolver-mips.cc +9 -2
  227. data/vendor/v8/src/mips/lithium-mips.cc +290 -302
  228. data/vendor/v8/src/mips/lithium-mips.h +463 -266
  229. data/vendor/v8/src/mips/macro-assembler-mips.cc +208 -115
  230. data/vendor/v8/src/mips/macro-assembler-mips.h +67 -24
  231. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +40 -25
  232. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +3 -9
  233. data/vendor/v8/src/mips/simulator-mips.cc +112 -40
  234. data/vendor/v8/src/mips/simulator-mips.h +5 -0
  235. data/vendor/v8/src/mips/stub-cache-mips.cc +502 -884
  236. data/vendor/v8/src/mirror-debugger.js +157 -30
  237. data/vendor/v8/src/mksnapshot.cc +88 -14
  238. data/vendor/v8/src/object-observe.js +235 -0
  239. data/vendor/v8/src/objects-debug.cc +178 -176
  240. data/vendor/v8/src/objects-inl.h +1333 -486
  241. data/vendor/v8/src/objects-printer.cc +125 -43
  242. data/vendor/v8/src/objects-visiting-inl.h +578 -6
  243. data/vendor/v8/src/objects-visiting.cc +2 -2
  244. data/vendor/v8/src/objects-visiting.h +172 -79
  245. data/vendor/v8/src/objects.cc +3533 -2885
  246. data/vendor/v8/src/objects.h +1352 -1131
  247. data/vendor/v8/src/optimizing-compiler-thread.cc +152 -0
  248. data/vendor/v8/src/optimizing-compiler-thread.h +111 -0
  249. data/vendor/v8/src/parser.cc +390 -500
  250. data/vendor/v8/src/parser.h +45 -33
  251. data/vendor/v8/src/platform-cygwin.cc +10 -21
  252. data/vendor/v8/src/platform-freebsd.cc +36 -41
  253. data/vendor/v8/src/platform-linux.cc +160 -124
  254. data/vendor/v8/src/platform-macos.cc +30 -27
  255. data/vendor/v8/src/platform-nullos.cc +17 -1
  256. data/vendor/v8/src/platform-openbsd.cc +19 -50
  257. data/vendor/v8/src/platform-posix.cc +14 -0
  258. data/vendor/v8/src/platform-solaris.cc +20 -53
  259. data/vendor/v8/src/platform-win32.cc +49 -26
  260. data/vendor/v8/src/platform.h +40 -1
  261. data/vendor/v8/src/preparser.cc +8 -5
  262. data/vendor/v8/src/preparser.h +2 -2
  263. data/vendor/v8/src/prettyprinter.cc +16 -0
  264. data/vendor/v8/src/prettyprinter.h +2 -0
  265. data/vendor/v8/src/profile-generator-inl.h +1 -0
  266. data/vendor/v8/src/profile-generator.cc +209 -147
  267. data/vendor/v8/src/profile-generator.h +15 -12
  268. data/vendor/v8/src/property-details.h +46 -31
  269. data/vendor/v8/src/property.cc +27 -46
  270. data/vendor/v8/src/property.h +163 -83
  271. data/vendor/v8/src/proxy.js +7 -2
  272. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +4 -13
  273. data/vendor/v8/src/regexp-macro-assembler-irregexp.h +1 -2
  274. data/vendor/v8/src/regexp-macro-assembler-tracer.cc +1 -11
  275. data/vendor/v8/src/regexp-macro-assembler-tracer.h +0 -1
  276. data/vendor/v8/src/regexp-macro-assembler.cc +31 -14
  277. data/vendor/v8/src/regexp-macro-assembler.h +14 -11
  278. data/vendor/v8/src/regexp-stack.cc +1 -0
  279. data/vendor/v8/src/regexp.js +9 -8
  280. data/vendor/v8/src/rewriter.cc +18 -7
  281. data/vendor/v8/src/runtime-profiler.cc +52 -43
  282. data/vendor/v8/src/runtime-profiler.h +0 -25
  283. data/vendor/v8/src/runtime.cc +2006 -2023
  284. data/vendor/v8/src/runtime.h +56 -49
  285. data/vendor/v8/src/safepoint-table.cc +12 -18
  286. data/vendor/v8/src/safepoint-table.h +11 -8
  287. data/vendor/v8/src/scanner.cc +1 -0
  288. data/vendor/v8/src/scanner.h +4 -10
  289. data/vendor/v8/src/scopeinfo.cc +35 -9
  290. data/vendor/v8/src/scopeinfo.h +64 -3
  291. data/vendor/v8/src/scopes.cc +251 -156
  292. data/vendor/v8/src/scopes.h +61 -27
  293. data/vendor/v8/src/serialize.cc +348 -396
  294. data/vendor/v8/src/serialize.h +125 -114
  295. data/vendor/v8/src/small-pointer-list.h +11 -11
  296. data/vendor/v8/src/{smart-array-pointer.h → smart-pointers.h} +64 -15
  297. data/vendor/v8/src/snapshot-common.cc +64 -15
  298. data/vendor/v8/src/snapshot-empty.cc +7 -1
  299. data/vendor/v8/src/snapshot.h +9 -2
  300. data/vendor/v8/src/spaces-inl.h +17 -0
  301. data/vendor/v8/src/spaces.cc +477 -183
  302. data/vendor/v8/src/spaces.h +238 -58
  303. data/vendor/v8/src/splay-tree-inl.h +8 -7
  304. data/vendor/v8/src/splay-tree.h +24 -10
  305. data/vendor/v8/src/store-buffer.cc +12 -5
  306. data/vendor/v8/src/store-buffer.h +2 -4
  307. data/vendor/v8/src/string-search.h +22 -6
  308. data/vendor/v8/src/string-stream.cc +11 -8
  309. data/vendor/v8/src/string.js +47 -15
  310. data/vendor/v8/src/stub-cache.cc +461 -224
  311. data/vendor/v8/src/stub-cache.h +164 -102
  312. data/vendor/v8/src/sweeper-thread.cc +105 -0
  313. data/vendor/v8/src/sweeper-thread.h +81 -0
  314. data/vendor/v8/src/token.h +1 -0
  315. data/vendor/v8/src/transitions-inl.h +220 -0
  316. data/vendor/v8/src/transitions.cc +160 -0
  317. data/vendor/v8/src/transitions.h +207 -0
  318. data/vendor/v8/src/type-info.cc +182 -181
  319. data/vendor/v8/src/type-info.h +31 -19
  320. data/vendor/v8/src/unicode-inl.h +62 -106
  321. data/vendor/v8/src/unicode.cc +57 -67
  322. data/vendor/v8/src/unicode.h +45 -91
  323. data/vendor/v8/src/uri.js +57 -29
  324. data/vendor/v8/src/utils.h +105 -5
  325. data/vendor/v8/src/v8-counters.cc +54 -11
  326. data/vendor/v8/src/v8-counters.h +134 -19
  327. data/vendor/v8/src/v8.cc +29 -29
  328. data/vendor/v8/src/v8.h +1 -0
  329. data/vendor/v8/src/v8conversions.cc +26 -22
  330. data/vendor/v8/src/v8globals.h +56 -43
  331. data/vendor/v8/src/v8natives.js +83 -30
  332. data/vendor/v8/src/v8threads.cc +42 -21
  333. data/vendor/v8/src/v8threads.h +4 -1
  334. data/vendor/v8/src/v8utils.cc +9 -93
  335. data/vendor/v8/src/v8utils.h +37 -33
  336. data/vendor/v8/src/variables.cc +6 -3
  337. data/vendor/v8/src/variables.h +6 -13
  338. data/vendor/v8/src/version.cc +2 -2
  339. data/vendor/v8/src/vm-state-inl.h +11 -0
  340. data/vendor/v8/src/x64/assembler-x64-inl.h +39 -8
  341. data/vendor/v8/src/x64/assembler-x64.cc +78 -64
  342. data/vendor/v8/src/x64/assembler-x64.h +38 -33
  343. data/vendor/v8/src/x64/builtins-x64.cc +105 -7
  344. data/vendor/v8/src/x64/code-stubs-x64.cc +790 -413
  345. data/vendor/v8/src/x64/code-stubs-x64.h +10 -106
  346. data/vendor/v8/src/x64/codegen-x64.cc +210 -8
  347. data/vendor/v8/src/x64/codegen-x64.h +20 -1
  348. data/vendor/v8/src/x64/deoptimizer-x64.cc +336 -75
  349. data/vendor/v8/src/x64/disasm-x64.cc +15 -0
  350. data/vendor/v8/src/x64/frames-x64.h +0 -14
  351. data/vendor/v8/src/x64/full-codegen-x64.cc +293 -270
  352. data/vendor/v8/src/x64/ic-x64.cc +153 -251
  353. data/vendor/v8/src/x64/lithium-codegen-x64.cc +1379 -531
  354. data/vendor/v8/src/x64/lithium-codegen-x64.h +67 -23
  355. data/vendor/v8/src/x64/lithium-gap-resolver-x64.cc +2 -2
  356. data/vendor/v8/src/x64/lithium-x64.cc +349 -289
  357. data/vendor/v8/src/x64/lithium-x64.h +460 -250
  358. data/vendor/v8/src/x64/macro-assembler-x64.cc +350 -177
  359. data/vendor/v8/src/x64/macro-assembler-x64.h +67 -49
  360. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +46 -33
  361. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +2 -3
  362. data/vendor/v8/src/x64/stub-cache-x64.cc +484 -653
  363. data/vendor/v8/src/zone-inl.h +9 -27
  364. data/vendor/v8/src/zone.cc +5 -5
  365. data/vendor/v8/src/zone.h +53 -27
  366. data/vendor/v8/test/benchmarks/testcfg.py +5 -0
  367. data/vendor/v8/test/cctest/cctest.cc +4 -0
  368. data/vendor/v8/test/cctest/cctest.gyp +3 -1
  369. data/vendor/v8/test/cctest/cctest.h +57 -9
  370. data/vendor/v8/test/cctest/cctest.status +15 -15
  371. data/vendor/v8/test/cctest/test-accessors.cc +26 -0
  372. data/vendor/v8/test/cctest/test-alloc.cc +22 -30
  373. data/vendor/v8/test/cctest/test-api.cc +1943 -314
  374. data/vendor/v8/test/cctest/test-assembler-arm.cc +133 -13
  375. data/vendor/v8/test/cctest/test-assembler-ia32.cc +1 -1
  376. data/vendor/v8/test/cctest/test-assembler-mips.cc +12 -0
  377. data/vendor/v8/test/cctest/test-ast.cc +4 -2
  378. data/vendor/v8/test/cctest/test-compiler.cc +61 -29
  379. data/vendor/v8/test/cctest/test-dataflow.cc +2 -2
  380. data/vendor/v8/test/cctest/test-debug.cc +212 -33
  381. data/vendor/v8/test/cctest/test-decls.cc +257 -11
  382. data/vendor/v8/test/cctest/test-dictionary.cc +24 -10
  383. data/vendor/v8/test/cctest/test-disasm-arm.cc +118 -1
  384. data/vendor/v8/test/cctest/test-disasm-ia32.cc +3 -2
  385. data/vendor/v8/test/cctest/test-flags.cc +14 -1
  386. data/vendor/v8/test/cctest/test-func-name-inference.cc +7 -4
  387. data/vendor/v8/test/cctest/test-global-object.cc +51 -0
  388. data/vendor/v8/test/cctest/test-hashing.cc +32 -23
  389. data/vendor/v8/test/cctest/test-heap-profiler.cc +131 -77
  390. data/vendor/v8/test/cctest/test-heap.cc +1084 -143
  391. data/vendor/v8/test/cctest/test-list.cc +1 -1
  392. data/vendor/v8/test/cctest/test-liveedit.cc +3 -2
  393. data/vendor/v8/test/cctest/test-lockers.cc +12 -13
  394. data/vendor/v8/test/cctest/test-log.cc +10 -8
  395. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +2 -2
  396. data/vendor/v8/test/cctest/test-mark-compact.cc +44 -22
  397. data/vendor/v8/test/cctest/test-object-observe.cc +434 -0
  398. data/vendor/v8/test/cctest/test-parsing.cc +86 -39
  399. data/vendor/v8/test/cctest/test-platform-linux.cc +6 -0
  400. data/vendor/v8/test/cctest/test-platform-win32.cc +7 -0
  401. data/vendor/v8/test/cctest/test-random.cc +5 -4
  402. data/vendor/v8/test/cctest/test-regexp.cc +137 -101
  403. data/vendor/v8/test/cctest/test-serialize.cc +150 -230
  404. data/vendor/v8/test/cctest/test-sockets.cc +1 -1
  405. data/vendor/v8/test/cctest/test-spaces.cc +139 -0
  406. data/vendor/v8/test/cctest/test-strings.cc +736 -74
  407. data/vendor/v8/test/cctest/test-thread-termination.cc +10 -11
  408. data/vendor/v8/test/cctest/test-threads.cc +4 -4
  409. data/vendor/v8/test/cctest/test-utils.cc +16 -0
  410. data/vendor/v8/test/cctest/test-weakmaps.cc +7 -3
  411. data/vendor/v8/test/cctest/testcfg.py +64 -5
  412. data/vendor/v8/test/es5conform/testcfg.py +5 -0
  413. data/vendor/v8/test/message/message.status +1 -1
  414. data/vendor/v8/test/message/overwritten-builtins.out +3 -0
  415. data/vendor/v8/test/message/testcfg.py +89 -8
  416. data/vendor/v8/test/message/try-catch-finally-no-message.out +26 -26
  417. data/vendor/v8/test/mjsunit/accessor-map-sharing.js +18 -2
  418. data/vendor/v8/test/mjsunit/allocation-site-info.js +126 -0
  419. data/vendor/v8/test/mjsunit/array-bounds-check-removal.js +62 -1
  420. data/vendor/v8/test/mjsunit/array-iteration.js +1 -1
  421. data/vendor/v8/test/mjsunit/array-literal-transitions.js +2 -0
  422. data/vendor/v8/test/mjsunit/array-natives-elements.js +317 -0
  423. data/vendor/v8/test/mjsunit/array-reduce.js +8 -8
  424. data/vendor/v8/test/mjsunit/array-slice.js +12 -0
  425. data/vendor/v8/test/mjsunit/array-store-and-grow.js +4 -1
  426. data/vendor/v8/test/mjsunit/assert-opt-and-deopt.js +1 -1
  427. data/vendor/v8/test/mjsunit/bugs/bug-2337.js +53 -0
  428. data/vendor/v8/test/mjsunit/compare-known-objects-slow.js +69 -0
  429. data/vendor/v8/test/mjsunit/compiler/alloc-object-huge.js +3 -1
  430. data/vendor/v8/test/mjsunit/compiler/inline-accessors.js +368 -0
  431. data/vendor/v8/test/mjsunit/compiler/inline-arguments.js +87 -1
  432. data/vendor/v8/test/mjsunit/compiler/inline-closures.js +49 -0
  433. data/vendor/v8/test/mjsunit/compiler/inline-construct.js +55 -43
  434. data/vendor/v8/test/mjsunit/compiler/inline-literals.js +39 -0
  435. data/vendor/v8/test/mjsunit/compiler/multiply-add.js +69 -0
  436. data/vendor/v8/test/mjsunit/compiler/optimized-closures.js +57 -0
  437. data/vendor/v8/test/mjsunit/compiler/parallel-proto-change.js +44 -0
  438. data/vendor/v8/test/mjsunit/compiler/property-static.js +69 -0
  439. data/vendor/v8/test/mjsunit/compiler/proto-chain-constant.js +55 -0
  440. data/vendor/v8/test/mjsunit/compiler/proto-chain-load.js +44 -0
  441. data/vendor/v8/test/mjsunit/compiler/regress-gvn.js +3 -2
  442. data/vendor/v8/test/mjsunit/compiler/regress-or.js +6 -2
  443. data/vendor/v8/test/mjsunit/compiler/rotate.js +224 -0
  444. data/vendor/v8/test/mjsunit/compiler/uint32.js +173 -0
  445. data/vendor/v8/test/mjsunit/count-based-osr.js +2 -1
  446. data/vendor/v8/test/mjsunit/d8-os.js +3 -3
  447. data/vendor/v8/test/mjsunit/date-parse.js +3 -0
  448. data/vendor/v8/test/mjsunit/date.js +22 -0
  449. data/vendor/v8/test/mjsunit/debug-break-inline.js +1 -0
  450. data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js +22 -12
  451. data/vendor/v8/test/mjsunit/debug-evaluate-locals-optimized.js +21 -10
  452. data/vendor/v8/test/mjsunit/debug-liveedit-compile-error.js +60 -0
  453. data/vendor/v8/test/mjsunit/debug-liveedit-double-call.js +142 -0
  454. data/vendor/v8/test/mjsunit/debug-liveedit-literals.js +94 -0
  455. data/vendor/v8/test/mjsunit/debug-liveedit-restart-frame.js +153 -0
  456. data/vendor/v8/test/mjsunit/debug-multiple-breakpoints.js +1 -1
  457. data/vendor/v8/test/mjsunit/debug-script-breakpoints-closure.js +67 -0
  458. data/vendor/v8/test/mjsunit/debug-script-breakpoints-nested.js +82 -0
  459. data/vendor/v8/test/mjsunit/debug-script.js +4 -2
  460. data/vendor/v8/test/mjsunit/debug-set-variable-value.js +308 -0
  461. data/vendor/v8/test/mjsunit/debug-stepout-scope-part1.js +190 -0
  462. data/vendor/v8/test/mjsunit/debug-stepout-scope-part2.js +83 -0
  463. data/vendor/v8/test/mjsunit/debug-stepout-scope-part3.js +80 -0
  464. data/vendor/v8/test/mjsunit/debug-stepout-scope-part4.js +80 -0
  465. data/vendor/v8/test/mjsunit/debug-stepout-scope-part5.js +77 -0
  466. data/vendor/v8/test/mjsunit/debug-stepout-scope-part6.js +79 -0
  467. data/vendor/v8/test/mjsunit/debug-stepout-scope-part7.js +79 -0
  468. data/vendor/v8/test/mjsunit/{debug-stepout-scope.js → debug-stepout-scope-part8.js} +0 -189
  469. data/vendor/v8/test/mjsunit/delete-non-configurable.js +74 -0
  470. data/vendor/v8/test/mjsunit/deopt-minus-zero.js +56 -0
  471. data/vendor/v8/test/mjsunit/elements-kind.js +6 -4
  472. data/vendor/v8/test/mjsunit/elements-length-no-holey.js +33 -0
  473. data/vendor/v8/test/mjsunit/elements-transition-hoisting.js +46 -19
  474. data/vendor/v8/test/mjsunit/error-accessors.js +54 -0
  475. data/vendor/v8/test/mjsunit/error-constructors.js +1 -14
  476. data/vendor/v8/test/mjsunit/error-tostring.js +8 -0
  477. data/vendor/v8/test/mjsunit/eval-stack-trace.js +204 -0
  478. data/vendor/v8/test/mjsunit/external-array.js +364 -1
  479. data/vendor/v8/test/mjsunit/fast-array-length.js +37 -0
  480. data/vendor/v8/test/mjsunit/fast-non-keyed.js +113 -0
  481. data/vendor/v8/test/mjsunit/fast-prototype.js +117 -0
  482. data/vendor/v8/test/mjsunit/function-call.js +14 -18
  483. data/vendor/v8/test/mjsunit/fuzz-natives-part1.js +230 -0
  484. data/vendor/v8/test/mjsunit/fuzz-natives-part2.js +229 -0
  485. data/vendor/v8/test/mjsunit/fuzz-natives-part3.js +229 -0
  486. data/vendor/v8/test/mjsunit/{fuzz-natives.js → fuzz-natives-part4.js} +12 -2
  487. data/vendor/v8/test/mjsunit/generated-transition-stub.js +218 -0
  488. data/vendor/v8/test/mjsunit/greedy.js +1 -1
  489. data/vendor/v8/test/mjsunit/harmony/block-conflicts.js +2 -1
  490. data/vendor/v8/test/mjsunit/harmony/block-let-crankshaft.js +1 -1
  491. data/vendor/v8/test/mjsunit/harmony/collections.js +69 -11
  492. data/vendor/v8/test/mjsunit/harmony/debug-blockscopes.js +2 -2
  493. data/vendor/v8/test/mjsunit/harmony/module-linking.js +180 -3
  494. data/vendor/v8/test/mjsunit/harmony/module-parsing.js +31 -0
  495. data/vendor/v8/test/mjsunit/harmony/module-recompile.js +87 -0
  496. data/vendor/v8/test/mjsunit/harmony/module-resolution.js +15 -2
  497. data/vendor/v8/test/mjsunit/harmony/object-observe.js +1056 -0
  498. data/vendor/v8/test/mjsunit/harmony/proxies-json.js +178 -0
  499. data/vendor/v8/test/mjsunit/harmony/proxies.js +25 -10
  500. data/vendor/v8/test/mjsunit/json-parser-recursive.js +33 -0
  501. data/vendor/v8/test/mjsunit/json-stringify-recursive.js +52 -0
  502. data/vendor/v8/test/mjsunit/json.js +38 -2
  503. data/vendor/v8/test/mjsunit/json2.js +153 -0
  504. data/vendor/v8/test/mjsunit/limit-locals.js +5 -4
  505. data/vendor/v8/test/mjsunit/manual-parallel-recompile.js +79 -0
  506. data/vendor/v8/test/mjsunit/math-exp-precision.js +64 -0
  507. data/vendor/v8/test/mjsunit/math-floor-negative.js +59 -0
  508. data/vendor/v8/test/mjsunit/math-floor-of-div-minus-zero.js +41 -0
  509. data/vendor/v8/test/mjsunit/math-floor-of-div-nosudiv.js +288 -0
  510. data/vendor/v8/test/mjsunit/math-floor-of-div.js +81 -9
  511. data/vendor/v8/test/mjsunit/{math-floor.js → math-floor-part1.js} +1 -72
  512. data/vendor/v8/test/mjsunit/math-floor-part2.js +76 -0
  513. data/vendor/v8/test/mjsunit/math-floor-part3.js +78 -0
  514. data/vendor/v8/test/mjsunit/math-floor-part4.js +76 -0
  515. data/vendor/v8/test/mjsunit/mirror-object.js +43 -9
  516. data/vendor/v8/test/mjsunit/mjsunit.js +1 -1
  517. data/vendor/v8/test/mjsunit/mjsunit.status +52 -27
  518. data/vendor/v8/test/mjsunit/mul-exhaustive-part1.js +491 -0
  519. data/vendor/v8/test/mjsunit/mul-exhaustive-part10.js +470 -0
  520. data/vendor/v8/test/mjsunit/mul-exhaustive-part2.js +525 -0
  521. data/vendor/v8/test/mjsunit/mul-exhaustive-part3.js +532 -0
  522. data/vendor/v8/test/mjsunit/mul-exhaustive-part4.js +509 -0
  523. data/vendor/v8/test/mjsunit/mul-exhaustive-part5.js +505 -0
  524. data/vendor/v8/test/mjsunit/mul-exhaustive-part6.js +554 -0
  525. data/vendor/v8/test/mjsunit/mul-exhaustive-part7.js +497 -0
  526. data/vendor/v8/test/mjsunit/mul-exhaustive-part8.js +526 -0
  527. data/vendor/v8/test/mjsunit/mul-exhaustive-part9.js +533 -0
  528. data/vendor/v8/test/mjsunit/new-function.js +34 -0
  529. data/vendor/v8/test/mjsunit/numops-fuzz-part1.js +1172 -0
  530. data/vendor/v8/test/mjsunit/numops-fuzz-part2.js +1178 -0
  531. data/vendor/v8/test/mjsunit/numops-fuzz-part3.js +1178 -0
  532. data/vendor/v8/test/mjsunit/numops-fuzz-part4.js +1177 -0
  533. data/vendor/v8/test/mjsunit/object-define-property.js +107 -2
  534. data/vendor/v8/test/mjsunit/override-read-only-property.js +6 -4
  535. data/vendor/v8/test/mjsunit/packed-elements.js +2 -2
  536. data/vendor/v8/test/mjsunit/parse-int-float.js +4 -4
  537. data/vendor/v8/test/mjsunit/pixel-array-rounding.js +1 -1
  538. data/vendor/v8/test/mjsunit/readonly.js +228 -0
  539. data/vendor/v8/test/mjsunit/regexp-capture-3.js +16 -18
  540. data/vendor/v8/test/mjsunit/regexp-capture.js +2 -0
  541. data/vendor/v8/test/mjsunit/regexp-global.js +122 -0
  542. data/vendor/v8/test/mjsunit/regexp-results-cache.js +78 -0
  543. data/vendor/v8/test/mjsunit/regress/regress-1117.js +12 -3
  544. data/vendor/v8/test/mjsunit/regress/regress-1118.js +1 -1
  545. data/vendor/v8/test/mjsunit/regress/regress-115100.js +36 -0
  546. data/vendor/v8/test/mjsunit/regress/regress-1199637.js +1 -3
  547. data/vendor/v8/test/mjsunit/regress/regress-121407.js +1 -1
  548. data/vendor/v8/test/mjsunit/regress/regress-131923.js +30 -0
  549. data/vendor/v8/test/mjsunit/regress/regress-131994.js +70 -0
  550. data/vendor/v8/test/mjsunit/regress/regress-133211.js +35 -0
  551. data/vendor/v8/test/mjsunit/regress/regress-133211b.js +39 -0
  552. data/vendor/v8/test/mjsunit/regress/regress-136048.js +34 -0
  553. data/vendor/v8/test/mjsunit/regress/regress-137768.js +73 -0
  554. data/vendor/v8/test/mjsunit/regress/regress-143967.js +34 -0
  555. data/vendor/v8/test/mjsunit/regress/regress-145201.js +107 -0
  556. data/vendor/v8/test/mjsunit/regress/regress-147497.js +45 -0
  557. data/vendor/v8/test/mjsunit/regress/regress-148378.js +38 -0
  558. data/vendor/v8/test/mjsunit/regress/regress-1563.js +1 -1
  559. data/vendor/v8/test/mjsunit/regress/regress-1591.js +48 -0
  560. data/vendor/v8/test/mjsunit/regress/regress-164442.js +45 -0
  561. data/vendor/v8/test/mjsunit/regress/regress-165637.js +61 -0
  562. data/vendor/v8/test/mjsunit/regress/regress-166379.js +39 -0
  563. data/vendor/v8/test/mjsunit/regress/regress-166553.js +33 -0
  564. data/vendor/v8/test/mjsunit/regress/regress-1692.js +1 -1
  565. data/vendor/v8/test/mjsunit/regress/regress-171641.js +40 -0
  566. data/vendor/v8/test/mjsunit/regress/regress-1980.js +1 -1
  567. data/vendor/v8/test/mjsunit/regress/regress-2073.js +99 -0
  568. data/vendor/v8/test/mjsunit/regress/regress-2119.js +36 -0
  569. data/vendor/v8/test/mjsunit/regress/regress-2156.js +39 -0
  570. data/vendor/v8/test/mjsunit/regress/regress-2163.js +70 -0
  571. data/vendor/v8/test/mjsunit/regress/regress-2170.js +58 -0
  572. data/vendor/v8/test/mjsunit/regress/regress-2172.js +35 -0
  573. data/vendor/v8/test/mjsunit/regress/regress-2185-2.js +145 -0
  574. data/vendor/v8/test/mjsunit/regress/regress-2185.js +38 -0
  575. data/vendor/v8/test/mjsunit/regress/regress-2186.js +49 -0
  576. data/vendor/v8/test/mjsunit/regress/regress-2193.js +58 -0
  577. data/vendor/v8/test/mjsunit/regress/regress-2219.js +32 -0
  578. data/vendor/v8/test/mjsunit/regress/regress-2225.js +65 -0
  579. data/vendor/v8/test/mjsunit/regress/regress-2226.js +36 -0
  580. data/vendor/v8/test/mjsunit/regress/regress-2234.js +41 -0
  581. data/vendor/v8/test/mjsunit/regress/regress-2243.js +31 -0
  582. data/vendor/v8/test/mjsunit/regress/regress-2249.js +33 -0
  583. data/vendor/v8/test/mjsunit/regress/regress-2250.js +68 -0
  584. data/vendor/v8/test/mjsunit/regress/regress-2261.js +113 -0
  585. data/vendor/v8/test/mjsunit/regress/regress-2263.js +30 -0
  586. data/vendor/v8/test/mjsunit/regress/regress-2284.js +32 -0
  587. data/vendor/v8/test/mjsunit/regress/regress-2285.js +32 -0
  588. data/vendor/v8/test/mjsunit/regress/regress-2286.js +32 -0
  589. data/vendor/v8/test/mjsunit/regress/regress-2289.js +34 -0
  590. data/vendor/v8/test/mjsunit/regress/regress-2291.js +36 -0
  591. data/vendor/v8/test/mjsunit/regress/regress-2294.js +70 -0
  592. data/vendor/v8/test/mjsunit/regress/regress-2296.js +40 -0
  593. data/vendor/v8/test/mjsunit/regress/regress-2315.js +40 -0
  594. data/vendor/v8/test/mjsunit/regress/regress-2318.js +66 -0
  595. data/vendor/v8/test/mjsunit/regress/regress-2322.js +36 -0
  596. data/vendor/v8/test/mjsunit/regress/regress-2326.js +54 -0
  597. data/vendor/v8/test/mjsunit/regress/regress-2336.js +53 -0
  598. data/vendor/v8/test/mjsunit/regress/regress-2339.js +59 -0
  599. data/vendor/v8/test/mjsunit/regress/regress-2346.js +123 -0
  600. data/vendor/v8/test/mjsunit/regress/regress-2373.js +29 -0
  601. data/vendor/v8/test/mjsunit/regress/regress-2374.js +33 -0
  602. data/vendor/v8/test/mjsunit/regress/regress-2398.js +41 -0
  603. data/vendor/v8/test/mjsunit/regress/regress-2410.js +36 -0
  604. data/vendor/v8/test/mjsunit/regress/regress-2416.js +75 -0
  605. data/vendor/v8/test/mjsunit/regress/regress-2419.js +37 -0
  606. data/vendor/v8/test/mjsunit/regress/regress-2433.js +36 -0
  607. data/vendor/v8/test/mjsunit/regress/regress-2437.js +156 -0
  608. data/vendor/v8/test/mjsunit/regress/regress-2438.js +52 -0
  609. data/vendor/v8/test/mjsunit/regress/regress-2443.js +129 -0
  610. data/vendor/v8/test/mjsunit/regress/regress-2444.js +120 -0
  611. data/vendor/v8/test/mjsunit/regress/regress-2489.js +50 -0
  612. data/vendor/v8/test/mjsunit/regress/regress-2499.js +40 -0
  613. data/vendor/v8/test/mjsunit/regress/regress-334.js +1 -1
  614. data/vendor/v8/test/mjsunit/regress/regress-492.js +39 -1
  615. data/vendor/v8/test/mjsunit/regress/regress-builtin-array-op.js +38 -0
  616. data/vendor/v8/test/mjsunit/regress/regress-cnlt-elements.js +43 -0
  617. data/vendor/v8/test/mjsunit/regress/regress-cnlt-enum-indices.js +45 -0
  618. data/vendor/v8/test/mjsunit/regress/regress-cntl-descriptors-enum.js +46 -0
  619. data/vendor/v8/test/mjsunit/regress/regress-convert-enum.js +60 -0
  620. data/vendor/v8/test/mjsunit/regress/regress-convert-enum2.js +46 -0
  621. data/vendor/v8/test/mjsunit/regress/regress-convert-transition.js +40 -0
  622. data/vendor/v8/test/mjsunit/regress/regress-crbug-119926.js +3 -1
  623. data/vendor/v8/test/mjsunit/regress/regress-crbug-125148.js +90 -0
  624. data/vendor/v8/test/mjsunit/regress/regress-crbug-134055.js +63 -0
  625. data/vendor/v8/test/mjsunit/regress/regress-crbug-134609.js +59 -0
  626. data/vendor/v8/test/mjsunit/regress/regress-crbug-135008.js +45 -0
  627. data/vendor/v8/test/mjsunit/regress/regress-crbug-135066.js +55 -0
  628. data/vendor/v8/test/mjsunit/regress/regress-crbug-137689.js +47 -0
  629. data/vendor/v8/test/mjsunit/regress/regress-crbug-138887.js +48 -0
  630. data/vendor/v8/test/mjsunit/regress/regress-crbug-140083.js +44 -0
  631. data/vendor/v8/test/mjsunit/regress/regress-crbug-142087.js +38 -0
  632. data/vendor/v8/test/mjsunit/regress/regress-crbug-142218.js +44 -0
  633. data/vendor/v8/test/mjsunit/regress/regress-crbug-145961.js +39 -0
  634. data/vendor/v8/test/mjsunit/regress/regress-crbug-146910.js +33 -0
  635. data/vendor/v8/test/mjsunit/regress/regress-crbug-147475.js +48 -0
  636. data/vendor/v8/test/mjsunit/regress/regress-crbug-148376.js +35 -0
  637. data/vendor/v8/test/mjsunit/regress/regress-crbug-150545.js +53 -0
  638. data/vendor/v8/test/mjsunit/regress/regress-crbug-150729.js +39 -0
  639. data/vendor/v8/test/mjsunit/regress/regress-crbug-157019.js +54 -0
  640. data/vendor/v8/test/mjsunit/regress/regress-crbug-157520.js +38 -0
  641. data/vendor/v8/test/mjsunit/regress/regress-crbug-158185.js +39 -0
  642. data/vendor/v8/test/mjsunit/regress/regress-crbug-160010.js +35 -0
  643. data/vendor/v8/test/mjsunit/regress/regress-crbug-162085.js +71 -0
  644. data/vendor/v8/test/mjsunit/regress/regress-crbug-168545.js +34 -0
  645. data/vendor/v8/test/mjsunit/regress/regress-crbug-170856.js +33 -0
  646. data/vendor/v8/test/mjsunit/regress/regress-crbug-172345.js +34 -0
  647. data/vendor/v8/test/mjsunit/regress/regress-crbug-173974.js +36 -0
  648. data/vendor/v8/test/mjsunit/regress/regress-crbug-18639.js +9 -5
  649. data/vendor/v8/test/mjsunit/regress/regress-debug-code-recompilation.js +2 -1
  650. data/vendor/v8/test/mjsunit/regress/regress-deep-proto.js +45 -0
  651. data/vendor/v8/test/mjsunit/regress/regress-delete-empty-double.js +40 -0
  652. data/vendor/v8/test/mjsunit/regress/regress-iteration-order.js +42 -0
  653. data/vendor/v8/test/mjsunit/regress/regress-json-stringify-gc.js +41 -0
  654. data/vendor/v8/test/mjsunit/regress/regress-latin-1.js +78 -0
  655. data/vendor/v8/test/mjsunit/regress/regress-load-elements.js +49 -0
  656. data/vendor/v8/test/mjsunit/regress/regress-observe-empty-double-array.js +38 -0
  657. data/vendor/v8/test/mjsunit/regress/regress-undefined-store-keyed-fast-element.js +37 -0
  658. data/vendor/v8/test/mjsunit/shift-for-integer-div.js +59 -0
  659. data/vendor/v8/test/mjsunit/stack-traces-gc.js +119 -0
  660. data/vendor/v8/test/mjsunit/stack-traces-overflow.js +122 -0
  661. data/vendor/v8/test/mjsunit/stack-traces.js +39 -1
  662. data/vendor/v8/test/mjsunit/str-to-num.js +7 -2
  663. data/vendor/v8/test/mjsunit/strict-mode.js +36 -11
  664. data/vendor/v8/test/mjsunit/string-charcodeat.js +3 -0
  665. data/vendor/v8/test/mjsunit/string-natives.js +72 -0
  666. data/vendor/v8/test/mjsunit/string-split.js +17 -0
  667. data/vendor/v8/test/mjsunit/testcfg.py +76 -6
  668. data/vendor/v8/test/mjsunit/tools/tickprocessor.js +4 -1
  669. data/vendor/v8/test/mjsunit/try-finally-continue.js +72 -0
  670. data/vendor/v8/test/mjsunit/typed-array-slice.js +61 -0
  671. data/vendor/v8/test/mjsunit/unbox-double-arrays.js +2 -0
  672. data/vendor/v8/test/mjsunit/uri.js +12 -0
  673. data/vendor/v8/test/mjsunit/with-readonly.js +4 -2
  674. data/vendor/v8/test/mozilla/mozilla.status +19 -113
  675. data/vendor/v8/test/mozilla/testcfg.py +122 -3
  676. data/vendor/v8/test/preparser/preparser.status +5 -0
  677. data/vendor/v8/test/preparser/strict-identifiers.pyt +1 -1
  678. data/vendor/v8/test/preparser/testcfg.py +101 -5
  679. data/vendor/v8/test/sputnik/sputnik.status +1 -1
  680. data/vendor/v8/test/sputnik/testcfg.py +5 -0
  681. data/vendor/v8/test/test262/README +2 -2
  682. data/vendor/v8/test/test262/test262.status +13 -36
  683. data/vendor/v8/test/test262/testcfg.py +102 -8
  684. data/vendor/v8/tools/android-build.sh +0 -0
  685. data/vendor/v8/tools/android-ll-prof.sh +69 -0
  686. data/vendor/v8/tools/android-run.py +109 -0
  687. data/vendor/v8/tools/android-sync.sh +105 -0
  688. data/vendor/v8/tools/bash-completion.sh +0 -0
  689. data/vendor/v8/tools/check-static-initializers.sh +0 -0
  690. data/vendor/v8/tools/common-includes.sh +15 -22
  691. data/vendor/v8/tools/disasm.py +4 -4
  692. data/vendor/v8/tools/fuzz-harness.sh +0 -0
  693. data/vendor/v8/tools/gen-postmortem-metadata.py +6 -8
  694. data/vendor/v8/tools/grokdump.py +404 -129
  695. data/vendor/v8/tools/gyp/v8.gyp +105 -43
  696. data/vendor/v8/tools/linux-tick-processor +5 -5
  697. data/vendor/v8/tools/ll_prof.py +75 -15
  698. data/vendor/v8/tools/merge-to-branch.sh +2 -2
  699. data/vendor/v8/tools/plot-timer-events +70 -0
  700. data/vendor/v8/tools/plot-timer-events.js +510 -0
  701. data/vendor/v8/tools/presubmit.py +1 -0
  702. data/vendor/v8/tools/push-to-trunk.sh +14 -4
  703. data/vendor/v8/tools/run-llprof.sh +69 -0
  704. data/vendor/v8/tools/run-tests.py +372 -0
  705. data/vendor/v8/tools/run-valgrind.py +1 -1
  706. data/vendor/v8/tools/status-file-converter.py +39 -0
  707. data/vendor/v8/tools/test-server.py +224 -0
  708. data/vendor/v8/tools/test-wrapper-gypbuild.py +13 -16
  709. data/vendor/v8/tools/test.py +10 -19
  710. data/vendor/v8/tools/testrunner/README +174 -0
  711. data/vendor/v8/tools/testrunner/__init__.py +26 -0
  712. data/vendor/v8/tools/testrunner/local/__init__.py +26 -0
  713. data/vendor/v8/tools/testrunner/local/commands.py +153 -0
  714. data/vendor/v8/tools/testrunner/local/execution.py +182 -0
  715. data/vendor/v8/tools/testrunner/local/old_statusfile.py +460 -0
  716. data/vendor/v8/tools/testrunner/local/progress.py +238 -0
  717. data/vendor/v8/tools/testrunner/local/statusfile.py +145 -0
  718. data/vendor/v8/tools/testrunner/local/testsuite.py +187 -0
  719. data/vendor/v8/tools/testrunner/local/utils.py +108 -0
  720. data/vendor/v8/tools/testrunner/local/verbose.py +99 -0
  721. data/vendor/v8/tools/testrunner/network/__init__.py +26 -0
  722. data/vendor/v8/tools/testrunner/network/distro.py +90 -0
  723. data/vendor/v8/tools/testrunner/network/endpoint.py +124 -0
  724. data/vendor/v8/tools/testrunner/network/network_execution.py +253 -0
  725. data/vendor/v8/tools/testrunner/network/perfdata.py +120 -0
  726. data/vendor/v8/tools/testrunner/objects/__init__.py +26 -0
  727. data/vendor/v8/tools/testrunner/objects/context.py +50 -0
  728. data/vendor/v8/tools/testrunner/objects/output.py +60 -0
  729. data/vendor/v8/tools/testrunner/objects/peer.py +80 -0
  730. data/vendor/v8/tools/testrunner/objects/testcase.py +83 -0
  731. data/vendor/v8/tools/testrunner/objects/workpacket.py +90 -0
  732. data/vendor/v8/tools/testrunner/server/__init__.py +26 -0
  733. data/vendor/v8/tools/testrunner/server/compression.py +111 -0
  734. data/vendor/v8/tools/testrunner/server/constants.py +51 -0
  735. data/vendor/v8/tools/testrunner/server/daemon.py +147 -0
  736. data/vendor/v8/tools/testrunner/server/local_handler.py +119 -0
  737. data/vendor/v8/tools/testrunner/server/main.py +245 -0
  738. data/vendor/v8/tools/testrunner/server/presence_handler.py +120 -0
  739. data/vendor/v8/tools/testrunner/server/signatures.py +63 -0
  740. data/vendor/v8/tools/testrunner/server/status_handler.py +112 -0
  741. data/vendor/v8/tools/testrunner/server/work_handler.py +150 -0
  742. data/vendor/v8/tools/tick-processor.html +168 -0
  743. data/vendor/v8/tools/tickprocessor-driver.js +5 -3
  744. data/vendor/v8/tools/tickprocessor.js +58 -15
  745. metadata +534 -30
  746. data/patches/add-freebsd9-and-freebsd10-to-gyp-GetFlavor.patch +0 -11
  747. data/patches/do-not-imply-vfp3-and-armv7.patch +0 -44
  748. data/patches/fPIC-on-x64.patch +0 -14
  749. data/vendor/v8/src/liveobjectlist-inl.h +0 -126
  750. data/vendor/v8/src/liveobjectlist.cc +0 -2631
  751. data/vendor/v8/src/liveobjectlist.h +0 -319
  752. data/vendor/v8/test/mjsunit/mul-exhaustive.js +0 -4629
  753. data/vendor/v8/test/mjsunit/numops-fuzz.js +0 -4609
  754. data/vendor/v8/test/mjsunit/regress/regress-1969.js +0 -5045
data/vendor/v8/src/log.h CHANGED
@@ -74,8 +74,8 @@ namespace internal {
74
74
  class LogMessageBuilder;
75
75
  class Profiler;
76
76
  class Semaphore;
77
- class SlidingStateWindow;
78
77
  class Ticker;
78
+ class Isolate;
79
79
 
80
80
  #undef LOG
81
81
  #define LOG(isolate, Call) \
@@ -86,6 +86,15 @@ class Ticker;
86
86
  logger->Call; \
87
87
  } while (false)
88
88
 
89
+ #define LOG_CODE_EVENT(isolate, Call) \
90
+ do { \
91
+ v8::internal::Logger* logger = \
92
+ (isolate)->logger(); \
93
+ if (logger->is_logging_code_events()) \
94
+ logger->Call; \
95
+ } while (false)
96
+
97
+
89
98
  #define LOG_EVENTS_AND_TAGS_LIST(V) \
90
99
  V(CODE_CREATION_EVENT, "code-creation") \
91
100
  V(CODE_MOVE_EVENT, "code-move") \
@@ -118,10 +127,10 @@ class Ticker;
118
127
  V(EVAL_TAG, "Eval") \
119
128
  V(FUNCTION_TAG, "Function") \
120
129
  V(KEYED_LOAD_IC_TAG, "KeyedLoadIC") \
121
- V(KEYED_LOAD_MEGAMORPHIC_IC_TAG, "KeyedLoadMegamorphicIC") \
130
+ V(KEYED_LOAD_POLYMORPHIC_IC_TAG, "KeyedLoadPolymorphicIC") \
122
131
  V(KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG, "KeyedExternalArrayLoadIC") \
123
132
  V(KEYED_STORE_IC_TAG, "KeyedStoreIC") \
124
- V(KEYED_STORE_MEGAMORPHIC_IC_TAG, "KeyedStoreMegamorphicIC") \
133
+ V(KEYED_STORE_POLYMORPHIC_IC_TAG, "KeyedStorePolymorphicIC") \
125
134
  V(KEYED_EXTERNAL_ARRAY_STORE_IC_TAG, "KeyedExternalArrayStoreIC") \
126
135
  V(LAZY_COMPILE_TAG, "LazyCompile") \
127
136
  V(LOAD_IC_TAG, "LoadIC") \
@@ -151,6 +160,10 @@ class Logger {
151
160
  // Acquires resources for logging if the right flags are set.
152
161
  bool SetUp();
153
162
 
163
+ // Sets the current code event handler.
164
+ void SetCodeEventHandler(uint32_t options,
165
+ JitCodeEventHandler event_handler);
166
+
154
167
  void EnsureTickerStarted();
155
168
  void EnsureTickerStopped();
156
169
 
@@ -161,9 +174,6 @@ class Logger {
161
174
  // leaving the file open.
162
175
  FILE* TearDown();
163
176
 
164
- // Enable the computation of a sliding window of states.
165
- void EnableSlidingStateWindow();
166
-
167
177
  // Emits an event with a string value -> (name, value).
168
178
  void StringEvent(const char* name, const char* value);
169
179
 
@@ -262,6 +272,38 @@ class Logger {
262
272
  uintptr_t start,
263
273
  uintptr_t end);
264
274
 
275
+ // ==== Events logged by --log-timer-events. ====
276
+ enum StartEnd { START, END };
277
+
278
+ void TimerEvent(StartEnd se, const char* name);
279
+
280
+ static void EnterExternal();
281
+ static void LeaveExternal();
282
+
283
+ class TimerEventScope {
284
+ public:
285
+ TimerEventScope(Isolate* isolate, const char* name)
286
+ : isolate_(isolate), name_(name) {
287
+ if (FLAG_log_internal_timer_events) LogTimerEvent(START);
288
+ }
289
+
290
+ ~TimerEventScope() {
291
+ if (FLAG_log_internal_timer_events) LogTimerEvent(END);
292
+ }
293
+
294
+ void LogTimerEvent(StartEnd se);
295
+
296
+ static const char* v8_recompile_synchronous;
297
+ static const char* v8_recompile_parallel;
298
+ static const char* v8_compile_full_code;
299
+ static const char* v8_execute;
300
+ static const char* v8_external;
301
+
302
+ private:
303
+ Isolate* isolate_;
304
+ const char* name_;
305
+ };
306
+
265
307
  // ==== Events logged by --log-regexp ====
266
308
  // Regexp compilation and execution events.
267
309
 
@@ -274,6 +316,10 @@ class Logger {
274
316
  return logging_nesting_ > 0;
275
317
  }
276
318
 
319
+ bool is_logging_code_events() {
320
+ return is_logging() || code_event_handler_ != NULL;
321
+ }
322
+
277
323
  // Pause/Resume collection of profiling data.
278
324
  // When data collection is paused, CPU Tick events are discarded until
279
325
  // data collection is Resumed.
@@ -312,6 +358,11 @@ class Logger {
312
358
  Logger();
313
359
  ~Logger();
314
360
 
361
+ // Issue code notifications.
362
+ void IssueCodeAddedEvent(Code* code, const char* name, size_t name_len);
363
+ void IssueCodeMovedEvent(Address from, Address to);
364
+ void IssueCodeRemovedEvent(Address from);
365
+
315
366
  // Emits the profiler's first message.
316
367
  void ProfilerBeginEvent();
317
368
 
@@ -379,10 +430,6 @@ class Logger {
379
430
  // of samples.
380
431
  Profiler* profiler_;
381
432
 
382
- // SlidingStateWindow instance keeping a sliding window of the most
383
- // recent VM states.
384
- SlidingStateWindow* sliding_state_window_;
385
-
386
433
  // An array of log events names.
387
434
  const char* const* log_events_;
388
435
 
@@ -393,7 +440,6 @@ class Logger {
393
440
  friend class LogMessageBuilder;
394
441
  friend class TimeLog;
395
442
  friend class Profiler;
396
- friend class SlidingStateWindow;
397
443
  friend class StackTracer;
398
444
  friend class VMState;
399
445
 
@@ -413,6 +459,9 @@ class Logger {
413
459
  // 'true' between SetUp() and TearDown().
414
460
  bool is_initialized_;
415
461
 
462
+ // The code event handler - if any.
463
+ JitCodeEventHandler code_event_handler_;
464
+
416
465
  // Support for 'incremental addresses' in compressed logs:
417
466
  // LogMessageBuilder::AppendAddress(Address addr)
418
467
  Address last_address_;
@@ -424,6 +473,8 @@ class Logger {
424
473
  // Logger::FunctionCreateEvent(...)
425
474
  Address prev_code_;
426
475
 
476
+ int64_t epoch_;
477
+
427
478
  friend class CpuProfiler;
428
479
  };
429
480
 
@@ -36,6 +36,23 @@ enum InvokeFlag {
36
36
  };
37
37
 
38
38
 
39
+ // Flags used for the AllocateInNewSpace functions.
40
+ enum AllocationFlags {
41
+ // No special flags.
42
+ NO_ALLOCATION_FLAGS = 0,
43
+ // Return the pointer to the allocated already tagged as a heap object.
44
+ TAG_OBJECT = 1 << 0,
45
+ // The content of the result register already contains the allocation top in
46
+ // new space.
47
+ RESULT_CONTAINS_TOP = 1 << 1,
48
+ // Specify that the requested size of the space to allocate is specified in
49
+ // words instead of bytes.
50
+ SIZE_IN_WORDS = 1 << 2,
51
+ // Align the allocation to a multiple of kDoubleSize
52
+ DOUBLE_ALIGNMENT = 1 << 3
53
+ };
54
+
55
+
39
56
  // Invalid depth in prototype chain.
40
57
  const int kInvalidProtoDepth = -1;
41
58
 
@@ -32,6 +32,8 @@ const NONE = 0;
32
32
  const READ_ONLY = 1;
33
33
  const DONT_ENUM = 2;
34
34
  const DONT_DELETE = 4;
35
+ const NEW_ONE_BYTE_STRING = true;
36
+ const NEW_TWO_BYTE_STRING = false;
35
37
 
36
38
  # Constants used for getter and setter operations.
37
39
  const GETTER = 0;
@@ -52,32 +52,15 @@ void MarkCompactCollector::SetFlags(int flags) {
52
52
  }
53
53
 
54
54
 
55
- bool MarkCompactCollector::MarkObjectAndPush(HeapObject* obj) {
56
- if (MarkObjectWithoutPush(obj)) {
57
- marking_deque_.PushBlack(obj);
58
- return true;
59
- }
60
- return false;
61
- }
62
-
63
-
64
55
  void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
65
56
  ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
66
57
  if (!mark_bit.Get()) {
67
58
  mark_bit.Set();
68
59
  MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
69
- ProcessNewlyMarkedObject(obj);
70
- }
71
- }
72
-
73
-
74
- bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* obj) {
75
- MarkBit mark_bit = Marking::MarkBitFrom(obj);
76
- if (!mark_bit.Get()) {
77
- SetMark(obj, mark_bit);
78
- return true;
60
+ ASSERT(IsMarked(obj));
61
+ ASSERT(HEAP->Contains(obj));
62
+ marking_deque_.PushBlack(obj);
79
63
  }
80
- return false;
81
64
  }
82
65
 
83
66
 
@@ -86,9 +69,6 @@ void MarkCompactCollector::SetMark(HeapObject* obj, MarkBit mark_bit) {
86
69
  ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
87
70
  mark_bit.Set();
88
71
  MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
89
- if (obj->IsMap()) {
90
- heap_->ClearCacheOnMap(Map::cast(obj));
91
- }
92
72
  }
93
73
 
94
74
 
@@ -36,11 +36,12 @@
36
36
  #include "heap-profiler.h"
37
37
  #include "ic-inl.h"
38
38
  #include "incremental-marking.h"
39
- #include "liveobjectlist-inl.h"
40
39
  #include "mark-compact.h"
40
+ #include "marking-thread.h"
41
41
  #include "objects-visiting.h"
42
42
  #include "objects-visiting-inl.h"
43
43
  #include "stub-cache.h"
44
+ #include "sweeper-thread.h"
44
45
 
45
46
  namespace v8 {
46
47
  namespace internal {
@@ -62,28 +63,37 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT
62
63
  sweep_precisely_(false),
63
64
  reduce_memory_footprint_(false),
64
65
  abort_incremental_marking_(false),
66
+ marking_parity_(ODD_MARKING_PARITY),
65
67
  compacting_(false),
66
68
  was_marked_incrementally_(false),
67
- flush_monomorphic_ics_(false),
68
69
  tracer_(NULL),
69
70
  migration_slots_buffer_(NULL),
70
71
  heap_(NULL),
71
72
  code_flusher_(NULL),
72
- encountered_weak_maps_(NULL),
73
- marker_(this, this) { }
73
+ encountered_weak_maps_(NULL) { }
74
74
 
75
75
 
76
- #ifdef DEBUG
76
+ #ifdef VERIFY_HEAP
77
77
  class VerifyMarkingVisitor: public ObjectVisitor {
78
78
  public:
79
79
  void VisitPointers(Object** start, Object** end) {
80
80
  for (Object** current = start; current < end; current++) {
81
81
  if ((*current)->IsHeapObject()) {
82
82
  HeapObject* object = HeapObject::cast(*current);
83
- ASSERT(HEAP->mark_compact_collector()->IsMarked(object));
83
+ CHECK(HEAP->mark_compact_collector()->IsMarked(object));
84
84
  }
85
85
  }
86
86
  }
87
+
88
+ void VisitEmbeddedPointer(RelocInfo* rinfo) {
89
+ ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
90
+ if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps ||
91
+ rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION ||
92
+ !rinfo->target_object()->IsMap() ||
93
+ !Map::cast(rinfo->target_object())->CanTransition()) {
94
+ VisitPointer(rinfo->target_object_address());
95
+ }
96
+ }
87
97
  };
88
98
 
89
99
 
@@ -97,7 +107,7 @@ static void VerifyMarking(Address bottom, Address top) {
97
107
  current += kPointerSize) {
98
108
  object = HeapObject::FromAddress(current);
99
109
  if (MarkCompactCollector::IsMarked(object)) {
100
- ASSERT(current >= next_object_must_be_here_or_later);
110
+ CHECK(current >= next_object_must_be_here_or_later);
101
111
  object->Iterate(&visitor);
102
112
  next_object_must_be_here_or_later = current + object->Size();
103
113
  }
@@ -110,12 +120,12 @@ static void VerifyMarking(NewSpace* space) {
110
120
  NewSpacePageIterator it(space->bottom(), end);
111
121
  // The bottom position is at the start of its page. Allows us to use
112
122
  // page->area_start() as start of range on all pages.
113
- ASSERT_EQ(space->bottom(),
123
+ CHECK_EQ(space->bottom(),
114
124
  NewSpacePage::FromAddress(space->bottom())->area_start());
115
125
  while (it.has_next()) {
116
126
  NewSpacePage* page = it.next();
117
127
  Address limit = it.has_next() ? page->area_end() : end;
118
- ASSERT(limit == end || !page->Contains(end));
128
+ CHECK(limit == end || !page->Contains(end));
119
129
  VerifyMarking(page->area_start(), limit);
120
130
  }
121
131
  }
@@ -175,7 +185,7 @@ static void VerifyEvacuation(Address bottom, Address top) {
175
185
  current += kPointerSize) {
176
186
  object = HeapObject::FromAddress(current);
177
187
  if (MarkCompactCollector::IsMarked(object)) {
178
- ASSERT(current >= next_object_must_be_here_or_later);
188
+ CHECK(current >= next_object_must_be_here_or_later);
179
189
  object->Iterate(&visitor);
180
190
  next_object_must_be_here_or_later = current + object->Size();
181
191
  }
@@ -191,7 +201,7 @@ static void VerifyEvacuation(NewSpace* space) {
191
201
  NewSpacePage* page = it.next();
192
202
  Address current = page->area_start();
193
203
  Address limit = it.has_next() ? page->area_end() : space->top();
194
- ASSERT(limit == space->top() || !page->Contains(space->top()));
204
+ CHECK(limit == space->top() || !page->Contains(space->top()));
195
205
  while (current < limit) {
196
206
  HeapObject* object = HeapObject::FromAddress(current);
197
207
  object->Iterate(&visitor);
@@ -223,6 +233,101 @@ static void VerifyEvacuation(Heap* heap) {
223
233
  VerifyEvacuationVisitor visitor;
224
234
  heap->IterateStrongRoots(&visitor, VISIT_ALL);
225
235
  }
236
+ #endif // VERIFY_HEAP
237
+
238
+
239
+ #ifdef DEBUG
240
+ class VerifyNativeContextSeparationVisitor: public ObjectVisitor {
241
+ public:
242
+ VerifyNativeContextSeparationVisitor() : current_native_context_(NULL) {}
243
+
244
+ void VisitPointers(Object** start, Object** end) {
245
+ for (Object** current = start; current < end; current++) {
246
+ if ((*current)->IsHeapObject()) {
247
+ HeapObject* object = HeapObject::cast(*current);
248
+ if (object->IsString()) continue;
249
+ switch (object->map()->instance_type()) {
250
+ case JS_FUNCTION_TYPE:
251
+ CheckContext(JSFunction::cast(object)->context());
252
+ break;
253
+ case JS_GLOBAL_PROXY_TYPE:
254
+ CheckContext(JSGlobalProxy::cast(object)->native_context());
255
+ break;
256
+ case JS_GLOBAL_OBJECT_TYPE:
257
+ case JS_BUILTINS_OBJECT_TYPE:
258
+ CheckContext(GlobalObject::cast(object)->native_context());
259
+ break;
260
+ case JS_ARRAY_TYPE:
261
+ case JS_DATE_TYPE:
262
+ case JS_OBJECT_TYPE:
263
+ case JS_REGEXP_TYPE:
264
+ VisitPointer(HeapObject::RawField(object, JSObject::kMapOffset));
265
+ break;
266
+ case MAP_TYPE:
267
+ VisitPointer(HeapObject::RawField(object, Map::kPrototypeOffset));
268
+ VisitPointer(HeapObject::RawField(object, Map::kConstructorOffset));
269
+ break;
270
+ case FIXED_ARRAY_TYPE:
271
+ if (object->IsContext()) {
272
+ CheckContext(object);
273
+ } else {
274
+ FixedArray* array = FixedArray::cast(object);
275
+ int length = array->length();
276
+ // Set array length to zero to prevent cycles while iterating
277
+ // over array bodies, this is easier than intrusive marking.
278
+ array->set_length(0);
279
+ array->IterateBody(
280
+ FIXED_ARRAY_TYPE, FixedArray::SizeFor(length), this);
281
+ array->set_length(length);
282
+ }
283
+ break;
284
+ case JS_GLOBAL_PROPERTY_CELL_TYPE:
285
+ case JS_PROXY_TYPE:
286
+ case JS_VALUE_TYPE:
287
+ case TYPE_FEEDBACK_INFO_TYPE:
288
+ object->Iterate(this);
289
+ break;
290
+ case ACCESSOR_INFO_TYPE:
291
+ case BYTE_ARRAY_TYPE:
292
+ case CALL_HANDLER_INFO_TYPE:
293
+ case CODE_TYPE:
294
+ case FIXED_DOUBLE_ARRAY_TYPE:
295
+ case HEAP_NUMBER_TYPE:
296
+ case INTERCEPTOR_INFO_TYPE:
297
+ case ODDBALL_TYPE:
298
+ case SCRIPT_TYPE:
299
+ case SHARED_FUNCTION_INFO_TYPE:
300
+ break;
301
+ default:
302
+ UNREACHABLE();
303
+ }
304
+ }
305
+ }
306
+ }
307
+
308
+ private:
309
+ void CheckContext(Object* context) {
310
+ if (!context->IsContext()) return;
311
+ Context* native_context = Context::cast(context)->native_context();
312
+ if (current_native_context_ == NULL) {
313
+ current_native_context_ = native_context;
314
+ } else {
315
+ CHECK_EQ(current_native_context_, native_context);
316
+ }
317
+ }
318
+
319
+ Context* current_native_context_;
320
+ };
321
+
322
+
323
+ static void VerifyNativeContextSeparation(Heap* heap) {
324
+ HeapObjectIterator it(heap->code_space());
325
+
326
+ for (Object* object = it.Next(); object != NULL; object = it.Next()) {
327
+ VerifyNativeContextSeparationVisitor visitor;
328
+ Code::cast(object)->CodeIterateBody(&visitor);
329
+ }
330
+ }
226
331
  #endif
227
332
 
228
333
 
@@ -248,10 +353,17 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
248
353
  if (!compacting_) {
249
354
  ASSERT(evacuation_candidates_.length() == 0);
250
355
 
356
+ #ifdef ENABLE_GDB_JIT_INTERFACE
357
+ // If GDBJIT interface is active disable compaction.
358
+ if (FLAG_gdbjit) return false;
359
+ #endif
360
+
251
361
  CollectEvacuationCandidates(heap()->old_pointer_space());
252
362
  CollectEvacuationCandidates(heap()->old_data_space());
253
363
 
254
- if (FLAG_compact_code_space && mode == NON_INCREMENTAL_COMPACTION) {
364
+ if (FLAG_compact_code_space &&
365
+ (mode == NON_INCREMENTAL_COMPACTION ||
366
+ FLAG_incremental_code_compaction)) {
255
367
  CollectEvacuationCandidates(heap()->code_space());
256
368
  } else if (FLAG_trace_fragmentation) {
257
369
  TraceFragmentation(heap()->code_space());
@@ -282,11 +394,11 @@ void MarkCompactCollector::CollectGarbage() {
282
394
  MarkLiveObjects();
283
395
  ASSERT(heap_->incremental_marking()->IsStopped());
284
396
 
285
- if (FLAG_collect_maps) ClearNonLiveTransitions();
397
+ if (FLAG_collect_maps) ClearNonLiveReferences();
286
398
 
287
399
  ClearWeakMaps();
288
400
 
289
- #ifdef DEBUG
401
+ #ifdef VERIFY_HEAP
290
402
  if (FLAG_verify_heap) {
291
403
  VerifyMarking(heap_);
292
404
  }
@@ -296,13 +408,33 @@ void MarkCompactCollector::CollectGarbage() {
296
408
 
297
409
  if (!FLAG_collect_maps) ReattachInitialMaps();
298
410
 
411
+ #ifdef DEBUG
412
+ if (FLAG_verify_native_context_separation) {
413
+ VerifyNativeContextSeparation(heap_);
414
+ }
415
+ #endif
416
+
417
+ #ifdef VERIFY_HEAP
418
+ if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_optimized_code &&
419
+ heap()->weak_embedded_maps_verification_enabled()) {
420
+ VerifyWeakEmbeddedMapsInOptimizedCode();
421
+ }
422
+ #endif
423
+
299
424
  Finish();
300
425
 
426
+ if (marking_parity_ == EVEN_MARKING_PARITY) {
427
+ marking_parity_ = ODD_MARKING_PARITY;
428
+ } else {
429
+ ASSERT(marking_parity_ == ODD_MARKING_PARITY);
430
+ marking_parity_ = EVEN_MARKING_PARITY;
431
+ }
432
+
301
433
  tracer_ = NULL;
302
434
  }
303
435
 
304
436
 
305
- #ifdef DEBUG
437
+ #ifdef VERIFY_HEAP
306
438
  void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
307
439
  PageIterator it(space);
308
440
 
@@ -313,6 +445,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
313
445
  }
314
446
  }
315
447
 
448
+
316
449
  void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
317
450
  NewSpacePageIterator it(space->bottom(), space->top());
318
451
 
@@ -323,6 +456,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
323
456
  }
324
457
  }
325
458
 
459
+
326
460
  void MarkCompactCollector::VerifyMarkbitsAreClean() {
327
461
  VerifyMarkbitsAreClean(heap_->old_pointer_space());
328
462
  VerifyMarkbitsAreClean(heap_->old_data_space());
@@ -334,11 +468,24 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
334
468
  LargeObjectIterator it(heap_->lo_space());
335
469
  for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
336
470
  MarkBit mark_bit = Marking::MarkBitFrom(obj);
337
- ASSERT(Marking::IsWhite(mark_bit));
338
- ASSERT_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
471
+ CHECK(Marking::IsWhite(mark_bit));
472
+ CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
339
473
  }
340
474
  }
341
- #endif
475
+
476
+
477
+ void MarkCompactCollector::VerifyWeakEmbeddedMapsInOptimizedCode() {
478
+ HeapObjectIterator code_iterator(heap()->code_space());
479
+ for (HeapObject* obj = code_iterator.Next();
480
+ obj != NULL;
481
+ obj = code_iterator.Next()) {
482
+ Code* code = Code::cast(obj);
483
+ if (code->kind() != Code::OPTIMIZED_FUNCTION) continue;
484
+ if (code->marked_for_deoptimization()) continue;
485
+ code->VerifyEmbeddedMapsDependency();
486
+ }
487
+ }
488
+ #endif // VERIFY_HEAP
342
489
 
343
490
 
344
491
  static void ClearMarkbitsInPagedSpace(PagedSpace* space) {
@@ -372,11 +519,67 @@ void MarkCompactCollector::ClearMarkbits() {
372
519
  MarkBit mark_bit = Marking::MarkBitFrom(obj);
373
520
  mark_bit.Clear();
374
521
  mark_bit.Next().Clear();
522
+ Page::FromAddress(obj->address())->ResetProgressBar();
375
523
  Page::FromAddress(obj->address())->ResetLiveBytes();
376
524
  }
377
525
  }
378
526
 
379
527
 
528
+ void MarkCompactCollector::StartSweeperThreads() {
529
+ SweeperThread::set_sweeping_pending(true);
530
+ for (int i = 0; i < FLAG_sweeper_threads; i++) {
531
+ heap()->isolate()->sweeper_threads()[i]->StartSweeping();
532
+ }
533
+ }
534
+
535
+
536
+ void MarkCompactCollector::WaitUntilSweepingCompleted() {
537
+ if (SweeperThread::sweeping_pending()) {
538
+ for (int i = 0; i < FLAG_sweeper_threads; i++) {
539
+ heap()->isolate()->sweeper_threads()[i]->WaitForSweeperThread();
540
+ }
541
+ SweeperThread::set_sweeping_pending(false);
542
+ StealMemoryFromSweeperThreads(heap()->paged_space(OLD_DATA_SPACE));
543
+ StealMemoryFromSweeperThreads(heap()->paged_space(OLD_POINTER_SPACE));
544
+ heap()->FreeQueuedChunks();
545
+ }
546
+ }
547
+
548
+
549
+ intptr_t MarkCompactCollector::
550
+ StealMemoryFromSweeperThreads(PagedSpace* space) {
551
+ intptr_t freed_bytes = 0;
552
+ for (int i = 0; i < FLAG_sweeper_threads; i++) {
553
+ freed_bytes += heap()->isolate()->sweeper_threads()[i]->StealMemory(space);
554
+ }
555
+ return freed_bytes;
556
+ }
557
+
558
+
559
+ bool MarkCompactCollector::AreSweeperThreadsActivated() {
560
+ return heap()->isolate()->sweeper_threads() != NULL;
561
+ }
562
+
563
+
564
+ bool MarkCompactCollector::IsConcurrentSweepingInProgress() {
565
+ return SweeperThread::sweeping_pending();
566
+ }
567
+
568
+
569
+ void MarkCompactCollector::MarkInParallel() {
570
+ for (int i = 0; i < FLAG_marking_threads; i++) {
571
+ heap()->isolate()->marking_threads()[i]->StartMarking();
572
+ }
573
+ }
574
+
575
+
576
+ void MarkCompactCollector::WaitUntilMarkingCompleted() {
577
+ for (int i = 0; i < FLAG_marking_threads; i++) {
578
+ heap()->isolate()->marking_threads()[i]->WaitForMarkingThread();
579
+ }
580
+ }
581
+
582
+
380
583
  bool Marking::TransferMark(Address old_start, Address new_start) {
381
584
  // This is only used when resizing an object.
382
585
  ASSERT(MemoryChunk::FromAddress(old_start) ==
@@ -500,12 +703,10 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
500
703
  space->identity() == OLD_DATA_SPACE ||
501
704
  space->identity() == CODE_SPACE);
502
705
 
706
+ static const int kMaxMaxEvacuationCandidates = 1000;
503
707
  int number_of_pages = space->CountTotalPages();
504
-
505
- const int kMaxMaxEvacuationCandidates = 1000;
506
- int max_evacuation_candidates = Min(
507
- kMaxMaxEvacuationCandidates,
508
- static_cast<int>(sqrt(static_cast<double>(number_of_pages / 2)) + 1));
708
+ int max_evacuation_candidates =
709
+ static_cast<int>(sqrt(number_of_pages / 2.0) + 1);
509
710
 
510
711
  if (FLAG_stress_compaction || FLAG_always_compact) {
511
712
  max_evacuation_candidates = kMaxMaxEvacuationCandidates;
@@ -535,25 +736,37 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
535
736
  intptr_t over_reserved = reserved - space->SizeOfObjects();
536
737
  static const intptr_t kFreenessThreshold = 50;
537
738
 
538
- if (over_reserved >= 2 * space->AreaSize() &&
539
- reduce_memory_footprint_) {
739
+ if (reduce_memory_footprint_ && over_reserved >= space->AreaSize()) {
740
+ // If reduction of memory footprint was requested, we are aggressive
741
+ // about choosing pages to free. We expect that half-empty pages
742
+ // are easier to compact so slightly bump the limit.
540
743
  mode = REDUCE_MEMORY_FOOTPRINT;
541
-
542
- // We expect that empty pages are easier to compact so slightly bump the
543
- // limit.
544
744
  max_evacuation_candidates += 2;
745
+ }
545
746
 
546
- if (FLAG_trace_fragmentation) {
547
- PrintF("Estimated over reserved memory: %.1f MB (setting threshold %d)\n",
548
- static_cast<double>(over_reserved) / MB,
549
- static_cast<int>(kFreenessThreshold));
550
- }
747
+
748
+ if (over_reserved > reserved / 3 && over_reserved >= 2 * space->AreaSize()) {
749
+ // If over-usage is very high (more than a third of the space), we
750
+ // try to free all mostly empty pages. We expect that almost empty
751
+ // pages are even easier to compact so bump the limit even more.
752
+ mode = REDUCE_MEMORY_FOOTPRINT;
753
+ max_evacuation_candidates *= 2;
754
+ }
755
+
756
+ if (FLAG_trace_fragmentation && mode == REDUCE_MEMORY_FOOTPRINT) {
757
+ PrintF("Estimated over reserved memory: %.1f / %.1f MB (threshold %d)\n",
758
+ static_cast<double>(over_reserved) / MB,
759
+ static_cast<double>(reserved) / MB,
760
+ static_cast<int>(kFreenessThreshold));
551
761
  }
552
762
 
553
763
  intptr_t estimated_release = 0;
554
764
 
555
765
  Candidate candidates[kMaxMaxEvacuationCandidates];
556
766
 
767
+ max_evacuation_candidates =
768
+ Min(kMaxMaxEvacuationCandidates, max_evacuation_candidates);
769
+
557
770
  int count = 0;
558
771
  int fragmentation = 0;
559
772
  Candidate* least = NULL;
@@ -566,7 +779,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
566
779
  p->ClearEvacuationCandidate();
567
780
 
568
781
  if (FLAG_stress_compaction) {
569
- int counter = space->heap()->ms_count();
782
+ unsigned int counter = space->heap()->ms_count();
570
783
  uintptr_t page_number = reinterpret_cast<uintptr_t>(p) >> kPageSizeBits;
571
784
  if ((counter & 1) == (page_number & 1)) fragmentation = 1;
572
785
  } else if (mode == REDUCE_MEMORY_FOOTPRINT) {
@@ -658,12 +871,6 @@ void MarkCompactCollector::AbortCompaction() {
658
871
  void MarkCompactCollector::Prepare(GCTracer* tracer) {
659
872
  was_marked_incrementally_ = heap()->incremental_marking()->IsMarking();
660
873
 
661
- // Monomorphic ICs are preserved when possible, but need to be flushed
662
- // when they might be keeping a Context alive, or when the heap is about
663
- // to be serialized.
664
- flush_monomorphic_ics_ =
665
- heap()->isolate()->context_exit_happened() || Serializer::enabled();
666
-
667
874
  // Rather than passing the tracer around we stash it in a static member
668
875
  // variable.
669
876
  tracer_ = tracer;
@@ -675,12 +882,10 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
675
882
 
676
883
  ASSERT(!FLAG_never_compact || !FLAG_always_compact);
677
884
 
678
- #ifdef ENABLE_GDB_JIT_INTERFACE
679
- if (FLAG_gdbjit) {
680
- // If GDBJIT interface is active disable compaction.
681
- compacting_collection_ = false;
885
+ if (AreSweeperThreadsActivated() && FLAG_concurrent_sweeping) {
886
+ // Instead of waiting we could also abort the sweeper threads here.
887
+ WaitUntilSweepingCompleted();
682
888
  }
683
- #endif
684
889
 
685
890
  // Clear marking bits if incremental marking is aborted.
686
891
  if (was_marked_incrementally_ && abort_incremental_marking_) {
@@ -703,7 +908,7 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
703
908
  space->PrepareForMarkCompact();
704
909
  }
705
910
 
706
- #ifdef DEBUG
911
+ #ifdef VERIFY_HEAP
707
912
  if (!was_marked_incrementally_ && FLAG_verify_heap) {
708
913
  VerifyMarkbitsAreClean();
709
914
  }
@@ -711,6 +916,14 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
711
916
  }
712
917
 
713
918
 
919
+ class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter {
920
+ public:
921
+ virtual bool TakeFunction(JSFunction* function) {
922
+ return function->code()->marked_for_deoptimization();
923
+ }
924
+ };
925
+
926
+
714
927
  void MarkCompactCollector::Finish() {
715
928
  #ifdef DEBUG
716
929
  ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
@@ -722,7 +935,8 @@ void MarkCompactCollector::Finish() {
722
935
  // objects (empty string, illegal builtin).
723
936
  heap()->isolate()->stub_cache()->Clear();
724
937
 
725
- heap()->external_string_table_.CleanUp();
938
+ DeoptimizeMarkedCodeFilter filter;
939
+ Deoptimizer::DeoptimizeAllFunctionsWith(&filter);
726
940
  }
727
941
 
728
942
 
@@ -754,133 +968,182 @@ void MarkCompactCollector::Finish() {
754
968
  // and continue with marking. This process repeats until all reachable
755
969
  // objects have been marked.
756
970
 
757
- class CodeFlusher {
758
- public:
759
- explicit CodeFlusher(Isolate* isolate)
760
- : isolate_(isolate),
761
- jsfunction_candidates_head_(NULL),
762
- shared_function_info_candidates_head_(NULL) {}
971
+ void CodeFlusher::ProcessJSFunctionCandidates() {
972
+ Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
973
+ Object* undefined = isolate_->heap()->undefined_value();
974
+
975
+ JSFunction* candidate = jsfunction_candidates_head_;
976
+ JSFunction* next_candidate;
977
+ while (candidate != NULL) {
978
+ next_candidate = GetNextCandidate(candidate);
979
+ ClearNextCandidate(candidate, undefined);
980
+
981
+ SharedFunctionInfo* shared = candidate->shared();
982
+
983
+ Code* code = shared->code();
984
+ MarkBit code_mark = Marking::MarkBitFrom(code);
985
+ if (!code_mark.Get()) {
986
+ shared->set_code(lazy_compile);
987
+ candidate->set_code(lazy_compile);
988
+ } else {
989
+ candidate->set_code(code);
990
+ }
991
+
992
+ // We are in the middle of a GC cycle so the write barrier in the code
993
+ // setter did not record the slot update and we have to do that manually.
994
+ Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
995
+ Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
996
+ isolate_->heap()->mark_compact_collector()->
997
+ RecordCodeEntrySlot(slot, target);
763
998
 
764
- void AddCandidate(SharedFunctionInfo* shared_info) {
765
- SetNextCandidate(shared_info, shared_function_info_candidates_head_);
766
- shared_function_info_candidates_head_ = shared_info;
999
+ Object** shared_code_slot =
1000
+ HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset);
1001
+ isolate_->heap()->mark_compact_collector()->
1002
+ RecordSlot(shared_code_slot, shared_code_slot, *shared_code_slot);
1003
+
1004
+ candidate = next_candidate;
767
1005
  }
768
1006
 
769
- void AddCandidate(JSFunction* function) {
770
- ASSERT(function->code() == function->shared()->code());
1007
+ jsfunction_candidates_head_ = NULL;
1008
+ }
1009
+
1010
+
1011
+ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
1012
+ Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
1013
+
1014
+ SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
1015
+ SharedFunctionInfo* next_candidate;
1016
+ while (candidate != NULL) {
1017
+ next_candidate = GetNextCandidate(candidate);
1018
+ ClearNextCandidate(candidate);
1019
+
1020
+ Code* code = candidate->code();
1021
+ MarkBit code_mark = Marking::MarkBitFrom(code);
1022
+ if (!code_mark.Get()) {
1023
+ candidate->set_code(lazy_compile);
1024
+ }
1025
+
1026
+ Object** code_slot =
1027
+ HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset);
1028
+ isolate_->heap()->mark_compact_collector()->
1029
+ RecordSlot(code_slot, code_slot, *code_slot);
771
1030
 
772
- SetNextCandidate(function, jsfunction_candidates_head_);
773
- jsfunction_candidates_head_ = function;
1031
+ candidate = next_candidate;
774
1032
  }
775
1033
 
776
- void ProcessCandidates() {
777
- ProcessSharedFunctionInfoCandidates();
778
- ProcessJSFunctionCandidates();
1034
+ shared_function_info_candidates_head_ = NULL;
1035
+ }
1036
+
1037
+
1038
+ bool CodeFlusher::ContainsCandidate(SharedFunctionInfo* shared_info) {
1039
+ SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
1040
+ while (candidate != NULL) {
1041
+ if (candidate == shared_info) return true;
1042
+ candidate = GetNextCandidate(candidate);
779
1043
  }
1044
+ return false;
1045
+ }
780
1046
 
781
- private:
782
- void ProcessJSFunctionCandidates() {
783
- Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
784
1047
 
785
- JSFunction* candidate = jsfunction_candidates_head_;
786
- JSFunction* next_candidate;
1048
+ void CodeFlusher::EvictCandidate(SharedFunctionInfo* shared_info) {
1049
+ // Make sure previous flushing decisions are revisited.
1050
+ isolate_->heap()->incremental_marking()->RecordWrites(shared_info);
1051
+
1052
+ SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
1053
+ SharedFunctionInfo* next_candidate;
1054
+ if (candidate == shared_info) {
1055
+ next_candidate = GetNextCandidate(shared_info);
1056
+ shared_function_info_candidates_head_ = next_candidate;
1057
+ ClearNextCandidate(shared_info);
1058
+ } else {
787
1059
  while (candidate != NULL) {
788
1060
  next_candidate = GetNextCandidate(candidate);
789
1061
 
790
- SharedFunctionInfo* shared = candidate->shared();
791
-
792
- Code* code = shared->code();
793
- MarkBit code_mark = Marking::MarkBitFrom(code);
794
- if (!code_mark.Get()) {
795
- shared->set_code(lazy_compile);
796
- candidate->set_code(lazy_compile);
797
- } else {
798
- candidate->set_code(shared->code());
1062
+ if (next_candidate == shared_info) {
1063
+ next_candidate = GetNextCandidate(shared_info);
1064
+ SetNextCandidate(candidate, next_candidate);
1065
+ ClearNextCandidate(shared_info);
1066
+ break;
799
1067
  }
800
1068
 
801
- // We are in the middle of a GC cycle so the write barrier in the code
802
- // setter did not record the slot update and we have to do that manually.
803
- Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
804
- Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
805
- isolate_->heap()->mark_compact_collector()->
806
- RecordCodeEntrySlot(slot, target);
807
-
808
- RecordSharedFunctionInfoCodeSlot(shared);
809
-
810
1069
  candidate = next_candidate;
811
1070
  }
812
-
813
- jsfunction_candidates_head_ = NULL;
814
1071
  }
1072
+ }
815
1073
 
816
1074
 
817
- void ProcessSharedFunctionInfoCandidates() {
818
- Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
1075
+ void CodeFlusher::EvictCandidate(JSFunction* function) {
1076
+ ASSERT(!function->next_function_link()->IsUndefined());
1077
+ Object* undefined = isolate_->heap()->undefined_value();
819
1078
 
820
- SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
821
- SharedFunctionInfo* next_candidate;
1079
+ // Make sure previous flushing decisions are revisited.
1080
+ isolate_->heap()->incremental_marking()->RecordWrites(function);
1081
+ isolate_->heap()->incremental_marking()->RecordWrites(function->shared());
1082
+
1083
+ JSFunction* candidate = jsfunction_candidates_head_;
1084
+ JSFunction* next_candidate;
1085
+ if (candidate == function) {
1086
+ next_candidate = GetNextCandidate(function);
1087
+ jsfunction_candidates_head_ = next_candidate;
1088
+ ClearNextCandidate(function, undefined);
1089
+ } else {
822
1090
  while (candidate != NULL) {
823
1091
  next_candidate = GetNextCandidate(candidate);
824
- SetNextCandidate(candidate, NULL);
825
1092
 
826
- Code* code = candidate->code();
827
- MarkBit code_mark = Marking::MarkBitFrom(code);
828
- if (!code_mark.Get()) {
829
- candidate->set_code(lazy_compile);
1093
+ if (next_candidate == function) {
1094
+ next_candidate = GetNextCandidate(function);
1095
+ SetNextCandidate(candidate, next_candidate);
1096
+ ClearNextCandidate(function, undefined);
1097
+ break;
830
1098
  }
831
1099
 
832
- RecordSharedFunctionInfoCodeSlot(candidate);
833
-
834
1100
  candidate = next_candidate;
835
1101
  }
836
-
837
- shared_function_info_candidates_head_ = NULL;
838
1102
  }
1103
+ }
839
1104
 
840
- void RecordSharedFunctionInfoCodeSlot(SharedFunctionInfo* shared) {
841
- Object** slot = HeapObject::RawField(shared,
842
- SharedFunctionInfo::kCodeOffset);
843
- isolate_->heap()->mark_compact_collector()->
844
- RecordSlot(slot, slot, HeapObject::cast(*slot));
845
- }
846
1105
 
847
- static JSFunction** GetNextCandidateField(JSFunction* candidate) {
848
- return reinterpret_cast<JSFunction**>(
849
- candidate->address() + JSFunction::kCodeEntryOffset);
850
- }
1106
+ void CodeFlusher::EvictJSFunctionCandidates() {
1107
+ Object* undefined = isolate_->heap()->undefined_value();
851
1108
 
852
- static JSFunction* GetNextCandidate(JSFunction* candidate) {
853
- return *GetNextCandidateField(candidate);
1109
+ JSFunction* candidate = jsfunction_candidates_head_;
1110
+ JSFunction* next_candidate;
1111
+ while (candidate != NULL) {
1112
+ next_candidate = GetNextCandidate(candidate);
1113
+ ClearNextCandidate(candidate, undefined);
1114
+ candidate = next_candidate;
854
1115
  }
855
1116
 
856
- static void SetNextCandidate(JSFunction* candidate,
857
- JSFunction* next_candidate) {
858
- *GetNextCandidateField(candidate) = next_candidate;
859
- }
1117
+ jsfunction_candidates_head_ = NULL;
1118
+ }
860
1119
 
861
- static SharedFunctionInfo** GetNextCandidateField(
862
- SharedFunctionInfo* candidate) {
863
- Code* code = candidate->code();
864
- return reinterpret_cast<SharedFunctionInfo**>(
865
- code->address() + Code::kGCMetadataOffset);
866
- }
867
1120
 
868
- static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
869
- return reinterpret_cast<SharedFunctionInfo*>(
870
- candidate->code()->gc_metadata());
1121
+ void CodeFlusher::EvictSharedFunctionInfoCandidates() {
1122
+ SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
1123
+ SharedFunctionInfo* next_candidate;
1124
+ while (candidate != NULL) {
1125
+ next_candidate = GetNextCandidate(candidate);
1126
+ ClearNextCandidate(candidate);
1127
+ candidate = next_candidate;
871
1128
  }
872
1129
 
873
- static void SetNextCandidate(SharedFunctionInfo* candidate,
874
- SharedFunctionInfo* next_candidate) {
875
- candidate->code()->set_gc_metadata(next_candidate);
876
- }
1130
+ shared_function_info_candidates_head_ = NULL;
1131
+ }
877
1132
 
878
- Isolate* isolate_;
879
- JSFunction* jsfunction_candidates_head_;
880
- SharedFunctionInfo* shared_function_info_candidates_head_;
881
1133
 
882
- DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
883
- };
1134
+ void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) {
1135
+ Heap* heap = isolate_->heap();
1136
+
1137
+ JSFunction** slot = &jsfunction_candidates_head_;
1138
+ JSFunction* candidate = jsfunction_candidates_head_;
1139
+ while (candidate != NULL) {
1140
+ if (heap->InFromSpace(candidate)) {
1141
+ v->VisitPointer(reinterpret_cast<Object**>(slot));
1142
+ }
1143
+ candidate = GetNextCandidate(*slot);
1144
+ slot = GetNextCandidateSlot(*slot);
1145
+ }
1146
+ }
884
1147
 
885
1148
 
886
1149
  MarkCompactCollector::~MarkCompactCollector() {
@@ -927,81 +1190,24 @@ static inline HeapObject* ShortCircuitConsString(Object** p) {
927
1190
  }
928
1191
 
929
1192
 
930
- class StaticMarkingVisitor : public StaticVisitorBase {
1193
+ class MarkCompactMarkingVisitor
1194
+ : public StaticMarkingVisitor<MarkCompactMarkingVisitor> {
931
1195
  public:
932
- static inline void IterateBody(Map* map, HeapObject* obj) {
933
- table_.GetVisitor(map)(map, obj);
934
- }
935
-
936
- static void Initialize() {
937
- table_.Register(kVisitShortcutCandidate,
938
- &FixedBodyVisitor<StaticMarkingVisitor,
939
- ConsString::BodyDescriptor,
940
- void>::Visit);
941
-
942
- table_.Register(kVisitConsString,
943
- &FixedBodyVisitor<StaticMarkingVisitor,
944
- ConsString::BodyDescriptor,
945
- void>::Visit);
946
-
947
- table_.Register(kVisitSlicedString,
948
- &FixedBodyVisitor<StaticMarkingVisitor,
949
- SlicedString::BodyDescriptor,
950
- void>::Visit);
951
-
952
- table_.Register(kVisitFixedArray,
953
- &FlexibleBodyVisitor<StaticMarkingVisitor,
954
- FixedArray::BodyDescriptor,
955
- void>::Visit);
956
-
957
- table_.Register(kVisitGlobalContext, &VisitGlobalContext);
958
-
959
- table_.Register(kVisitFixedDoubleArray, DataObjectVisitor::Visit);
960
-
961
- table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
962
- table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
963
- table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
964
- table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
965
-
966
- table_.Register(kVisitJSWeakMap, &VisitJSWeakMap);
967
-
968
- table_.Register(kVisitOddball,
969
- &FixedBodyVisitor<StaticMarkingVisitor,
970
- Oddball::BodyDescriptor,
971
- void>::Visit);
972
- table_.Register(kVisitMap,
973
- &FixedBodyVisitor<StaticMarkingVisitor,
974
- Map::BodyDescriptor,
975
- void>::Visit);
1196
+ static void ObjectStatsVisitBase(StaticVisitorBase::VisitorId id,
1197
+ Map* map, HeapObject* obj);
976
1198
 
977
- table_.Register(kVisitCode, &VisitCode);
1199
+ static void ObjectStatsCountFixedArray(
1200
+ FixedArrayBase* fixed_array,
1201
+ FixedArraySubInstanceType fast_type,
1202
+ FixedArraySubInstanceType dictionary_type);
978
1203
 
979
- table_.Register(kVisitSharedFunctionInfo,
980
- &VisitSharedFunctionInfoAndFlushCode);
981
-
982
- table_.Register(kVisitJSFunction,
983
- &VisitJSFunctionAndFlushCode);
984
-
985
- table_.Register(kVisitJSRegExp,
986
- &VisitRegExpAndFlushCode);
987
-
988
- table_.Register(kVisitPropertyCell,
989
- &FixedBodyVisitor<StaticMarkingVisitor,
990
- JSGlobalPropertyCell::BodyDescriptor,
991
- void>::Visit);
992
-
993
- table_.RegisterSpecializations<DataObjectVisitor,
994
- kVisitDataObject,
995
- kVisitDataObjectGeneric>();
996
-
997
- table_.RegisterSpecializations<JSObjectVisitor,
998
- kVisitJSObject,
999
- kVisitJSObjectGeneric>();
1204
+ template<MarkCompactMarkingVisitor::VisitorId id>
1205
+ class ObjectStatsTracker {
1206
+ public:
1207
+ static inline void Visit(Map* map, HeapObject* obj);
1208
+ };
1000
1209
 
1001
- table_.RegisterSpecializations<StructObjectVisitor,
1002
- kVisitStruct,
1003
- kVisitStructGeneric>();
1004
- }
1210
+ static void Initialize();
1005
1211
 
1006
1212
  INLINE(static void VisitPointer(Heap* heap, Object** p)) {
1007
1213
  MarkObjectByPointer(heap->mark_compact_collector(), p, p);
@@ -1020,48 +1226,21 @@ class StaticMarkingVisitor : public StaticVisitorBase {
1020
1226
  }
1021
1227
  }
1022
1228
 
1023
- static void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
1024
- ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
1025
- JSGlobalPropertyCell* cell =
1026
- JSGlobalPropertyCell::cast(rinfo->target_cell());
1027
- MarkBit mark = Marking::MarkBitFrom(cell);
1028
- heap->mark_compact_collector()->MarkObject(cell, mark);
1029
- }
1030
-
1031
- static inline void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo) {
1032
- ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
1033
- // TODO(mstarzinger): We do not short-circuit cons strings here, verify
1034
- // that there can be no such embedded pointers and add assertion here.
1035
- HeapObject* object = HeapObject::cast(rinfo->target_object());
1036
- heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
1229
+ // Marks the object black and pushes it on the marking stack.
1230
+ INLINE(static void MarkObject(Heap* heap, HeapObject* object)) {
1037
1231
  MarkBit mark = Marking::MarkBitFrom(object);
1038
1232
  heap->mark_compact_collector()->MarkObject(object, mark);
1039
1233
  }
1040
1234
 
1041
- static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
1042
- ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
1043
- Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1044
- if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
1045
- && (target->ic_state() == MEGAMORPHIC ||
1046
- heap->mark_compact_collector()->flush_monomorphic_ics_ ||
1047
- target->ic_age() != heap->global_ic_age())) {
1048
- IC::Clear(rinfo->pc());
1049
- target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1235
+ // Marks the object black without pushing it on the marking stack.
1236
+ // Returns true if object needed marking and false otherwise.
1237
+ INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
1238
+ MarkBit mark_bit = Marking::MarkBitFrom(object);
1239
+ if (!mark_bit.Get()) {
1240
+ heap->mark_compact_collector()->SetMark(object, mark_bit);
1241
+ return true;
1050
1242
  }
1051
- MarkBit code_mark = Marking::MarkBitFrom(target);
1052
- heap->mark_compact_collector()->MarkObject(target, code_mark);
1053
- heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
1054
- }
1055
-
1056
- static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
1057
- ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
1058
- rinfo->IsPatchedReturnSequence()) ||
1059
- (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
1060
- rinfo->IsPatchedDebugBreakSlotSequence()));
1061
- Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
1062
- MarkBit code_mark = Marking::MarkBitFrom(target);
1063
- heap->mark_compact_collector()->MarkObject(target, code_mark);
1064
- heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
1243
+ return false;
1065
1244
  }
1066
1245
 
1067
1246
  // Mark object pointed to by p.
@@ -1116,28 +1295,10 @@ class StaticMarkingVisitor : public StaticVisitorBase {
1116
1295
  return true;
1117
1296
  }
1118
1297
 
1119
- static inline void VisitExternalReference(Address* p) { }
1120
- static inline void VisitExternalReference(RelocInfo* rinfo) { }
1121
- static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
1122
-
1123
- private:
1124
- class DataObjectVisitor {
1125
- public:
1126
- template<int size>
1127
- static void VisitSpecialized(Map* map, HeapObject* object) {
1128
- }
1129
-
1130
- static void Visit(Map* map, HeapObject* object) {
1131
- }
1132
- };
1133
-
1134
- typedef FlexibleBodyVisitor<StaticMarkingVisitor,
1135
- JSObject::BodyDescriptor,
1136
- void> JSObjectVisitor;
1137
-
1138
- typedef FlexibleBodyVisitor<StaticMarkingVisitor,
1139
- StructBodyDescriptor,
1140
- void> StructObjectVisitor;
1298
+ INLINE(static void BeforeVisitingSharedFunctionInfo(HeapObject* object)) {
1299
+ SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
1300
+ shared->BeforeVisitingPointers();
1301
+ }
1141
1302
 
1142
1303
  static void VisitJSWeakMap(Map* map, HeapObject* object) {
1143
1304
  MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
@@ -1151,12 +1312,12 @@ class StaticMarkingVisitor : public StaticVisitorBase {
1151
1312
 
1152
1313
  // Skip visiting the backing hash table containing the mappings.
1153
1314
  int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object);
1154
- BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
1315
+ BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers(
1155
1316
  map->GetHeap(),
1156
1317
  object,
1157
1318
  JSWeakMap::BodyDescriptor::kStartOffset,
1158
1319
  JSWeakMap::kTableOffset);
1159
- BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
1320
+ BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers(
1160
1321
  map->GetHeap(),
1161
1322
  object,
1162
1323
  JSWeakMap::kTableOffset + kPointerSize,
@@ -1176,136 +1337,14 @@ class StaticMarkingVisitor : public StaticVisitorBase {
1176
1337
  ASSERT(MarkCompactCollector::IsMarked(table->map()));
1177
1338
  }
1178
1339
 
1179
- static void VisitCode(Map* map, HeapObject* object) {
1180
- Heap* heap = map->GetHeap();
1181
- Code* code = reinterpret_cast<Code*>(object);
1182
- if (FLAG_cleanup_code_caches_at_gc) {
1183
- code->ClearTypeFeedbackCells(heap);
1184
- }
1185
- code->CodeIterateBody<StaticMarkingVisitor>(heap);
1186
- }
1340
+ private:
1341
+ template<int id>
1342
+ static inline void TrackObjectStatsAndVisit(Map* map, HeapObject* obj);
1187
1343
 
1188
1344
  // Code flushing support.
1189
1345
 
1190
- // How many collections newly compiled code object will survive before being
1191
- // flushed.
1192
- static const int kCodeAgeThreshold = 5;
1193
-
1194
1346
  static const int kRegExpCodeThreshold = 5;
1195
1347
 
1196
- inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
1197
- Object* undefined = heap->undefined_value();
1198
- return (info->script() != undefined) &&
1199
- (reinterpret_cast<Script*>(info->script())->source() != undefined);
1200
- }
1201
-
1202
-
1203
- inline static bool IsCompiled(JSFunction* function) {
1204
- return function->code() !=
1205
- function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
1206
- }
1207
-
1208
- inline static bool IsCompiled(SharedFunctionInfo* function) {
1209
- return function->code() !=
1210
- function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
1211
- }
1212
-
1213
- inline static bool IsFlushable(Heap* heap, JSFunction* function) {
1214
- SharedFunctionInfo* shared_info = function->unchecked_shared();
1215
-
1216
- // Code is either on stack, in compilation cache or referenced
1217
- // by optimized version of function.
1218
- MarkBit code_mark = Marking::MarkBitFrom(function->code());
1219
- if (code_mark.Get()) {
1220
- if (!Marking::MarkBitFrom(shared_info).Get()) {
1221
- shared_info->set_code_age(0);
1222
- }
1223
- return false;
1224
- }
1225
-
1226
- // We do not flush code for optimized functions.
1227
- if (function->code() != shared_info->code()) {
1228
- return false;
1229
- }
1230
-
1231
- return IsFlushable(heap, shared_info);
1232
- }
1233
-
1234
- inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) {
1235
- // Code is either on stack, in compilation cache or referenced
1236
- // by optimized version of function.
1237
- MarkBit code_mark =
1238
- Marking::MarkBitFrom(shared_info->code());
1239
- if (code_mark.Get()) {
1240
- return false;
1241
- }
1242
-
1243
- // The function must be compiled and have the source code available,
1244
- // to be able to recompile it in case we need the function again.
1245
- if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
1246
- return false;
1247
- }
1248
-
1249
- // We never flush code for Api functions.
1250
- Object* function_data = shared_info->function_data();
1251
- if (function_data->IsFunctionTemplateInfo()) {
1252
- return false;
1253
- }
1254
-
1255
- // Only flush code for functions.
1256
- if (shared_info->code()->kind() != Code::FUNCTION) {
1257
- return false;
1258
- }
1259
-
1260
- // Function must be lazy compilable.
1261
- if (!shared_info->allows_lazy_compilation()) {
1262
- return false;
1263
- }
1264
-
1265
- // If this is a full script wrapped in a function we do no flush the code.
1266
- if (shared_info->is_toplevel()) {
1267
- return false;
1268
- }
1269
-
1270
- // Age this shared function info.
1271
- if (shared_info->code_age() < kCodeAgeThreshold) {
1272
- shared_info->set_code_age(shared_info->code_age() + 1);
1273
- return false;
1274
- }
1275
-
1276
- return true;
1277
- }
1278
-
1279
-
1280
- static bool FlushCodeForFunction(Heap* heap, JSFunction* function) {
1281
- if (!IsFlushable(heap, function)) return false;
1282
-
1283
- // This function's code looks flushable. But we have to postpone the
1284
- // decision until we see all functions that point to the same
1285
- // SharedFunctionInfo because some of them might be optimized.
1286
- // That would make the nonoptimized version of the code nonflushable,
1287
- // because it is required for bailing out from optimized code.
1288
- heap->mark_compact_collector()->code_flusher()->AddCandidate(function);
1289
- return true;
1290
- }
1291
-
1292
- static inline bool IsValidNotBuiltinContext(Object* ctx) {
1293
- return ctx->IsContext() &&
1294
- !Context::cast(ctx)->global()->IsJSBuiltinsObject();
1295
- }
1296
-
1297
-
1298
- static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) {
1299
- SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
1300
-
1301
- if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
1302
-
1303
- FixedBodyVisitor<StaticMarkingVisitor,
1304
- SharedFunctionInfo::BodyDescriptor,
1305
- void>::Visit(map, object);
1306
- }
1307
-
1308
-
1309
1348
  static void UpdateRegExpCodeAgeAndFlush(Heap* heap,
1310
1349
  JSRegExp* re,
1311
1350
  bool is_ascii) {
@@ -1368,7 +1407,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
1368
1407
  Heap* heap = map->GetHeap();
1369
1408
  MarkCompactCollector* collector = heap->mark_compact_collector();
1370
1409
  if (!collector->is_code_flushing_enabled()) {
1371
- VisitJSRegExpFields(map, object);
1410
+ VisitJSRegExp(map, object);
1372
1411
  return;
1373
1412
  }
1374
1413
  JSRegExp* re = reinterpret_cast<JSRegExp*>(object);
@@ -1376,183 +1415,161 @@ class StaticMarkingVisitor : public StaticVisitorBase {
1376
1415
  UpdateRegExpCodeAgeAndFlush(heap, re, true);
1377
1416
  UpdateRegExpCodeAgeAndFlush(heap, re, false);
1378
1417
  // Visit the fields of the RegExp, including the updated FixedArray.
1379
- VisitJSRegExpFields(map, object);
1418
+ VisitJSRegExp(map, object);
1380
1419
  }
1381
1420
 
1421
+ static VisitorDispatchTable<Callback> non_count_table_;
1422
+ };
1382
1423
 
1383
- static void VisitSharedFunctionInfoAndFlushCode(Map* map,
1384
- HeapObject* object) {
1385
- Heap* heap = map->GetHeap();
1386
- SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
1387
- if (shared->ic_age() != heap->global_ic_age()) {
1388
- shared->ResetForNewContext(heap->global_ic_age());
1389
- }
1390
1424
 
1391
- MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
1392
- if (!collector->is_code_flushing_enabled()) {
1393
- VisitSharedFunctionInfoGeneric(map, object);
1394
- return;
1425
+ void MarkCompactMarkingVisitor::ObjectStatsCountFixedArray(
1426
+ FixedArrayBase* fixed_array,
1427
+ FixedArraySubInstanceType fast_type,
1428
+ FixedArraySubInstanceType dictionary_type) {
1429
+ Heap* heap = fixed_array->map()->GetHeap();
1430
+ if (fixed_array->map() != heap->fixed_cow_array_map() &&
1431
+ fixed_array->map() != heap->fixed_double_array_map() &&
1432
+ fixed_array != heap->empty_fixed_array()) {
1433
+ if (fixed_array->IsDictionary()) {
1434
+ heap->RecordObjectStats(FIXED_ARRAY_TYPE,
1435
+ dictionary_type,
1436
+ fixed_array->Size());
1437
+ } else {
1438
+ heap->RecordObjectStats(FIXED_ARRAY_TYPE,
1439
+ fast_type,
1440
+ fixed_array->Size());
1395
1441
  }
1396
- VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false);
1397
1442
  }
1443
+ }
1398
1444
 
1399
1445
 
1400
- static void VisitSharedFunctionInfoAndFlushCodeGeneric(
1401
- Map* map, HeapObject* object, bool known_flush_code_candidate) {
1402
- Heap* heap = map->GetHeap();
1403
- SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
1404
-
1405
- if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
1406
-
1407
- if (!known_flush_code_candidate) {
1408
- known_flush_code_candidate = IsFlushable(heap, shared);
1409
- if (known_flush_code_candidate) {
1410
- heap->mark_compact_collector()->code_flusher()->AddCandidate(shared);
1411
- }
1412
- }
1413
-
1414
- VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate);
1446
+ void MarkCompactMarkingVisitor::ObjectStatsVisitBase(
1447
+ MarkCompactMarkingVisitor::VisitorId id, Map* map, HeapObject* obj) {
1448
+ Heap* heap = map->GetHeap();
1449
+ int object_size = obj->Size();
1450
+ heap->RecordObjectStats(map->instance_type(), -1, object_size);
1451
+ non_count_table_.GetVisitorById(id)(map, obj);
1452
+ if (obj->IsJSObject()) {
1453
+ JSObject* object = JSObject::cast(obj);
1454
+ ObjectStatsCountFixedArray(object->elements(),
1455
+ DICTIONARY_ELEMENTS_SUB_TYPE,
1456
+ FAST_ELEMENTS_SUB_TYPE);
1457
+ ObjectStatsCountFixedArray(object->properties(),
1458
+ DICTIONARY_PROPERTIES_SUB_TYPE,
1459
+ FAST_PROPERTIES_SUB_TYPE);
1415
1460
  }
1461
+ }
1416
1462
 
1417
1463
 
1418
- static void VisitCodeEntry(Heap* heap, Address entry_address) {
1419
- Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
1420
- MarkBit mark = Marking::MarkBitFrom(code);
1421
- heap->mark_compact_collector()->MarkObject(code, mark);
1422
- heap->mark_compact_collector()->
1423
- RecordCodeEntrySlot(entry_address, code);
1424
- }
1425
-
1426
- static void VisitGlobalContext(Map* map, HeapObject* object) {
1427
- FixedBodyVisitor<StaticMarkingVisitor,
1428
- Context::MarkCompactBodyDescriptor,
1429
- void>::Visit(map, object);
1464
+ template<MarkCompactMarkingVisitor::VisitorId id>
1465
+ void MarkCompactMarkingVisitor::ObjectStatsTracker<id>::Visit(
1466
+ Map* map, HeapObject* obj) {
1467
+ ObjectStatsVisitBase(id, map, obj);
1468
+ }
1430
1469
 
1431
- MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
1432
- for (int idx = Context::FIRST_WEAK_SLOT;
1433
- idx < Context::GLOBAL_CONTEXT_SLOTS;
1434
- ++idx) {
1435
- Object** slot =
1436
- HeapObject::RawField(object, FixedArray::OffsetOfElementAt(idx));
1437
- collector->RecordSlot(slot, slot, *slot);
1438
- }
1439
- }
1440
1470
 
1441
- static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) {
1471
+ template<>
1472
+ class MarkCompactMarkingVisitor::ObjectStatsTracker<
1473
+ MarkCompactMarkingVisitor::kVisitMap> {
1474
+ public:
1475
+ static inline void Visit(Map* map, HeapObject* obj) {
1442
1476
  Heap* heap = map->GetHeap();
1443
- MarkCompactCollector* collector = heap->mark_compact_collector();
1444
- if (!collector->is_code_flushing_enabled()) {
1445
- VisitJSFunction(map, object);
1446
- return;
1447
- }
1448
-
1449
- JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
1450
- // The function must have a valid context and not be a builtin.
1451
- bool flush_code_candidate = false;
1452
- if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
1453
- flush_code_candidate = FlushCodeForFunction(heap, jsfunction);
1454
- }
1455
-
1456
- if (!flush_code_candidate) {
1457
- Code* code = jsfunction->shared()->code();
1458
- MarkBit code_mark = Marking::MarkBitFrom(code);
1459
- collector->MarkObject(code, code_mark);
1460
-
1461
- if (jsfunction->code()->kind() == Code::OPTIMIZED_FUNCTION) {
1462
- collector->MarkInlinedFunctionsCode(jsfunction->code());
1463
- }
1464
- }
1465
-
1466
- VisitJSFunctionFields(map,
1467
- reinterpret_cast<JSFunction*>(object),
1468
- flush_code_candidate);
1477
+ Map* map_obj = Map::cast(obj);
1478
+ ASSERT(map->instance_type() == MAP_TYPE);
1479
+ DescriptorArray* array = map_obj->instance_descriptors();
1480
+ if (map_obj->owns_descriptors() &&
1481
+ array != heap->empty_descriptor_array()) {
1482
+ int fixed_array_size = array->Size();
1483
+ heap->RecordObjectStats(FIXED_ARRAY_TYPE,
1484
+ DESCRIPTOR_ARRAY_SUB_TYPE,
1485
+ fixed_array_size);
1486
+ }
1487
+ if (map_obj->HasTransitionArray()) {
1488
+ int fixed_array_size = map_obj->transitions()->Size();
1489
+ heap->RecordObjectStats(FIXED_ARRAY_TYPE,
1490
+ TRANSITION_ARRAY_SUB_TYPE,
1491
+ fixed_array_size);
1492
+ }
1493
+ if (map_obj->code_cache() != heap->empty_fixed_array()) {
1494
+ heap->RecordObjectStats(
1495
+ FIXED_ARRAY_TYPE,
1496
+ MAP_CODE_CACHE_SUB_TYPE,
1497
+ FixedArray::cast(map_obj->code_cache())->Size());
1498
+ }
1499
+ ObjectStatsVisitBase(kVisitMap, map, obj);
1469
1500
  }
1501
+ };
1470
1502
 
1471
1503
 
1472
- static void VisitJSFunction(Map* map, HeapObject* object) {
1473
- VisitJSFunctionFields(map,
1474
- reinterpret_cast<JSFunction*>(object),
1475
- false);
1504
+ template<>
1505
+ class MarkCompactMarkingVisitor::ObjectStatsTracker<
1506
+ MarkCompactMarkingVisitor::kVisitCode> {
1507
+ public:
1508
+ static inline void Visit(Map* map, HeapObject* obj) {
1509
+ Heap* heap = map->GetHeap();
1510
+ int object_size = obj->Size();
1511
+ ASSERT(map->instance_type() == CODE_TYPE);
1512
+ heap->RecordObjectStats(CODE_TYPE, Code::cast(obj)->kind(), object_size);
1513
+ ObjectStatsVisitBase(kVisitCode, map, obj);
1476
1514
  }
1515
+ };
1477
1516
 
1478
1517
 
1479
- #define SLOT_ADDR(obj, offset) \
1480
- reinterpret_cast<Object**>((obj)->address() + offset)
1481
-
1482
-
1483
- static inline void VisitJSFunctionFields(Map* map,
1484
- JSFunction* object,
1485
- bool flush_code_candidate) {
1518
+ template<>
1519
+ class MarkCompactMarkingVisitor::ObjectStatsTracker<
1520
+ MarkCompactMarkingVisitor::kVisitSharedFunctionInfo> {
1521
+ public:
1522
+ static inline void Visit(Map* map, HeapObject* obj) {
1486
1523
  Heap* heap = map->GetHeap();
1487
-
1488
- VisitPointers(heap,
1489
- HeapObject::RawField(object, JSFunction::kPropertiesOffset),
1490
- HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
1491
-
1492
- if (!flush_code_candidate) {
1493
- VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
1494
- } else {
1495
- // Don't visit code object.
1496
-
1497
- // Visit shared function info to avoid double checking of it's
1498
- // flushability.
1499
- SharedFunctionInfo* shared_info = object->unchecked_shared();
1500
- MarkBit shared_info_mark = Marking::MarkBitFrom(shared_info);
1501
- if (!shared_info_mark.Get()) {
1502
- Map* shared_info_map = shared_info->map();
1503
- MarkBit shared_info_map_mark =
1504
- Marking::MarkBitFrom(shared_info_map);
1505
- heap->mark_compact_collector()->SetMark(shared_info, shared_info_mark);
1506
- heap->mark_compact_collector()->MarkObject(shared_info_map,
1507
- shared_info_map_mark);
1508
- VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
1509
- shared_info,
1510
- true);
1511
- }
1524
+ SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj);
1525
+ if (sfi->scope_info() != heap->empty_fixed_array()) {
1526
+ heap->RecordObjectStats(
1527
+ FIXED_ARRAY_TYPE,
1528
+ SCOPE_INFO_SUB_TYPE,
1529
+ FixedArray::cast(sfi->scope_info())->Size());
1512
1530
  }
1513
-
1514
- VisitPointers(
1515
- heap,
1516
- HeapObject::RawField(object,
1517
- JSFunction::kCodeEntryOffset + kPointerSize),
1518
- HeapObject::RawField(object,
1519
- JSFunction::kNonWeakFieldsEndOffset));
1520
- }
1521
-
1522
- static inline void VisitJSRegExpFields(Map* map,
1523
- HeapObject* object) {
1524
- int last_property_offset =
1525
- JSRegExp::kSize + kPointerSize * map->inobject_properties();
1526
- VisitPointers(map->GetHeap(),
1527
- SLOT_ADDR(object, JSRegExp::kPropertiesOffset),
1528
- SLOT_ADDR(object, last_property_offset));
1531
+ ObjectStatsVisitBase(kVisitSharedFunctionInfo, map, obj);
1529
1532
  }
1533
+ };
1530
1534
 
1531
1535
 
1532
- static void VisitSharedFunctionInfoFields(Heap* heap,
1533
- HeapObject* object,
1534
- bool flush_code_candidate) {
1535
- VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset));
1536
-
1537
- if (!flush_code_candidate) {
1538
- VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset));
1536
+ template<>
1537
+ class MarkCompactMarkingVisitor::ObjectStatsTracker<
1538
+ MarkCompactMarkingVisitor::kVisitFixedArray> {
1539
+ public:
1540
+ static inline void Visit(Map* map, HeapObject* obj) {
1541
+ Heap* heap = map->GetHeap();
1542
+ FixedArray* fixed_array = FixedArray::cast(obj);
1543
+ if (fixed_array == heap->symbol_table()) {
1544
+ heap->RecordObjectStats(
1545
+ FIXED_ARRAY_TYPE,
1546
+ SYMBOL_TABLE_SUB_TYPE,
1547
+ fixed_array->Size());
1539
1548
  }
1540
-
1541
- VisitPointers(heap,
1542
- SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset),
1543
- SLOT_ADDR(object, SharedFunctionInfo::kSize));
1549
+ ObjectStatsVisitBase(kVisitFixedArray, map, obj);
1544
1550
  }
1551
+ };
1545
1552
 
1546
- #undef SLOT_ADDR
1547
1553
 
1548
- typedef void (*Callback)(Map* map, HeapObject* object);
1554
+ void MarkCompactMarkingVisitor::Initialize() {
1555
+ StaticMarkingVisitor<MarkCompactMarkingVisitor>::Initialize();
1549
1556
 
1550
- static VisitorDispatchTable<Callback> table_;
1551
- };
1557
+ table_.Register(kVisitJSRegExp,
1558
+ &VisitRegExpAndFlushCode);
1552
1559
 
1560
+ if (FLAG_track_gc_object_stats) {
1561
+ // Copy the visitor table to make call-through possible.
1562
+ non_count_table_.CopyFrom(&table_);
1563
+ #define VISITOR_ID_COUNT_FUNCTION(id) \
1564
+ table_.Register(kVisit##id, ObjectStatsTracker<kVisit##id>::Visit);
1565
+ VISITOR_ID_LIST(VISITOR_ID_COUNT_FUNCTION)
1566
+ #undef VISITOR_ID_COUNT_FUNCTION
1567
+ }
1568
+ }
1553
1569
 
1554
- VisitorDispatchTable<StaticMarkingVisitor::Callback>
1555
- StaticMarkingVisitor::table_;
1570
+
1571
+ VisitorDispatchTable<MarkCompactMarkingVisitor::Callback>
1572
+ MarkCompactMarkingVisitor::non_count_table_;
1556
1573
 
1557
1574
 
1558
1575
  class MarkingVisitor : public ObjectVisitor {
@@ -1560,11 +1577,11 @@ class MarkingVisitor : public ObjectVisitor {
1560
1577
  explicit MarkingVisitor(Heap* heap) : heap_(heap) { }
1561
1578
 
1562
1579
  void VisitPointer(Object** p) {
1563
- StaticMarkingVisitor::VisitPointer(heap_, p);
1580
+ MarkCompactMarkingVisitor::VisitPointer(heap_, p);
1564
1581
  }
1565
1582
 
1566
1583
  void VisitPointers(Object** start, Object** end) {
1567
- StaticMarkingVisitor::VisitPointers(heap_, start, end);
1584
+ MarkCompactMarkingVisitor::VisitPointers(heap_, start, end);
1568
1585
  }
1569
1586
 
1570
1587
  private:
@@ -1611,26 +1628,6 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
1611
1628
  };
1612
1629
 
1613
1630
 
1614
- void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) {
1615
- // For optimized functions we should retain both non-optimized version
1616
- // of it's code and non-optimized version of all inlined functions.
1617
- // This is required to support bailing out from inlined code.
1618
- DeoptimizationInputData* data =
1619
- DeoptimizationInputData::cast(code->deoptimization_data());
1620
-
1621
- FixedArray* literals = data->LiteralArray();
1622
-
1623
- for (int i = 0, count = data->InlinedFunctionCount()->value();
1624
- i < count;
1625
- i++) {
1626
- JSFunction* inlined = JSFunction::cast(literals->get(i));
1627
- Code* inlined_code = inlined->shared()->code();
1628
- MarkBit inlined_code_mark = Marking::MarkBitFrom(inlined_code);
1629
- MarkObject(inlined_code, inlined_code_mark);
1630
- }
1631
- }
1632
-
1633
-
1634
1631
  void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
1635
1632
  ThreadLocalTop* top) {
1636
1633
  for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
@@ -1643,7 +1640,8 @@ void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
1643
1640
  MarkBit code_mark = Marking::MarkBitFrom(code);
1644
1641
  MarkObject(code, code_mark);
1645
1642
  if (frame->is_optimized()) {
1646
- MarkInlinedFunctionsCode(frame->LookupCode());
1643
+ MarkCompactMarkingVisitor::MarkInlinedFunctionsCode(heap(),
1644
+ frame->LookupCode());
1647
1645
  }
1648
1646
  }
1649
1647
  }
@@ -1652,21 +1650,13 @@ void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
1652
1650
  void MarkCompactCollector::PrepareForCodeFlushing() {
1653
1651
  ASSERT(heap() == Isolate::Current()->heap());
1654
1652
 
1655
- // TODO(1609) Currently incremental marker does not support code flushing.
1656
- if (!FLAG_flush_code || was_marked_incrementally_) {
1657
- EnableCodeFlushing(false);
1658
- return;
1653
+ // Enable code flushing for non-incremental cycles.
1654
+ if (FLAG_flush_code && !FLAG_flush_code_incrementally) {
1655
+ EnableCodeFlushing(!was_marked_incrementally_);
1659
1656
  }
1660
1657
 
1661
- #ifdef ENABLE_DEBUGGER_SUPPORT
1662
- if (heap()->isolate()->debug()->IsLoaded() ||
1663
- heap()->isolate()->debug()->has_break_points()) {
1664
- EnableCodeFlushing(false);
1665
- return;
1666
- }
1667
- #endif
1668
-
1669
- EnableCodeFlushing(true);
1658
+ // If code flushing is disabled, there is no need to prepare for it.
1659
+ if (!is_code_flushing_enabled()) return;
1670
1660
 
1671
1661
  // Ensure that empty descriptor array is marked. Method MarkDescriptorArray
1672
1662
  // relies on it being marked before any other descriptor array.
@@ -1723,7 +1713,7 @@ class RootMarkingVisitor : public ObjectVisitor {
1723
1713
  // Mark the map pointer and body, and push them on the marking stack.
1724
1714
  MarkBit map_mark = Marking::MarkBitFrom(map);
1725
1715
  collector_->MarkObject(map, map_mark);
1726
- StaticMarkingVisitor::IterateBody(map, object);
1716
+ MarkCompactMarkingVisitor::IterateBody(map, object);
1727
1717
 
1728
1718
  // Mark all the objects reachable from the map and body. May leave
1729
1719
  // overflowed objects in the heap.
@@ -1785,150 +1775,6 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
1785
1775
  };
1786
1776
 
1787
1777
 
1788
- void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
1789
- ASSERT(IsMarked(object));
1790
- ASSERT(HEAP->Contains(object));
1791
- if (object->IsMap()) {
1792
- Map* map = Map::cast(object);
1793
- heap_->ClearCacheOnMap(map);
1794
-
1795
- // When map collection is enabled we have to mark through map's transitions
1796
- // in a special way to make transition links weak. Only maps for subclasses
1797
- // of JSReceiver can have transitions.
1798
- STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1799
- if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
1800
- marker_.MarkMapContents(map);
1801
- } else {
1802
- marking_deque_.PushBlack(map);
1803
- }
1804
- } else {
1805
- marking_deque_.PushBlack(object);
1806
- }
1807
- }
1808
-
1809
-
1810
- // Force instantiation of template instances.
1811
- template void Marker<IncrementalMarking>::MarkMapContents(Map* map);
1812
- template void Marker<MarkCompactCollector>::MarkMapContents(Map* map);
1813
-
1814
-
1815
- template <class T>
1816
- void Marker<T>::MarkMapContents(Map* map) {
1817
- // Mark prototype transitions array but don't push it into marking stack.
1818
- // This will make references from it weak. We will clean dead prototype
1819
- // transitions in ClearNonLiveTransitions.
1820
- Object** proto_trans_slot =
1821
- HeapObject::RawField(map, Map::kPrototypeTransitionsOrBackPointerOffset);
1822
- HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot);
1823
- if (prototype_transitions->IsFixedArray()) {
1824
- mark_compact_collector()->RecordSlot(proto_trans_slot,
1825
- proto_trans_slot,
1826
- prototype_transitions);
1827
- MarkBit mark = Marking::MarkBitFrom(prototype_transitions);
1828
- if (!mark.Get()) {
1829
- mark.Set();
1830
- MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(),
1831
- prototype_transitions->Size());
1832
- }
1833
- }
1834
-
1835
- // Make sure that the back pointer stored either in the map itself or inside
1836
- // its prototype transitions array is marked. Treat pointers in the descriptor
1837
- // array as weak and also mark that array to prevent visiting it later.
1838
- base_marker()->MarkObjectAndPush(HeapObject::cast(map->GetBackPointer()));
1839
-
1840
- Object** descriptor_array_slot =
1841
- HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset);
1842
- Object* descriptor_array = *descriptor_array_slot;
1843
- if (!descriptor_array->IsSmi()) {
1844
- MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(descriptor_array));
1845
- }
1846
-
1847
- // Mark the Object* fields of the Map. Since the descriptor array has been
1848
- // marked already, it is fine that one of these fields contains a pointer
1849
- // to it. But make sure to skip back pointer and prototype transitions.
1850
- STATIC_ASSERT(Map::kPointerFieldsEndOffset ==
1851
- Map::kPrototypeTransitionsOrBackPointerOffset + kPointerSize);
1852
- Object** start_slot = HeapObject::RawField(
1853
- map, Map::kPointerFieldsBeginOffset);
1854
- Object** end_slot = HeapObject::RawField(
1855
- map, Map::kPrototypeTransitionsOrBackPointerOffset);
1856
- for (Object** slot = start_slot; slot < end_slot; slot++) {
1857
- Object* obj = *slot;
1858
- if (!obj->NonFailureIsHeapObject()) continue;
1859
- mark_compact_collector()->RecordSlot(start_slot, slot, obj);
1860
- base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(obj));
1861
- }
1862
- }
1863
-
1864
-
1865
- template <class T>
1866
- void Marker<T>::MarkDescriptorArray(DescriptorArray* descriptors) {
1867
- // Empty descriptor array is marked as a root before any maps are marked.
1868
- ASSERT(descriptors != descriptors->GetHeap()->empty_descriptor_array());
1869
-
1870
- // The DescriptorArray contains a pointer to its contents array, but the
1871
- // contents array will be marked black and hence not be visited again.
1872
- if (!base_marker()->MarkObjectAndPush(descriptors)) return;
1873
- FixedArray* contents = FixedArray::cast(
1874
- descriptors->get(DescriptorArray::kContentArrayIndex));
1875
- ASSERT(Marking::IsWhite(Marking::MarkBitFrom(contents)));
1876
- base_marker()->MarkObjectWithoutPush(contents);
1877
-
1878
- // If the descriptor contains a transition (value is a Map), we don't mark the
1879
- // value as live. It might be set to the NULL_DESCRIPTOR in
1880
- // ClearNonLiveTransitions later.
1881
- for (int i = 0; i < descriptors->number_of_descriptors(); ++i) {
1882
- PropertyDetails details(descriptors->GetDetails(i));
1883
- Object** slot = descriptors->GetValueSlot(i);
1884
-
1885
- if (!(*slot)->IsHeapObject()) continue;
1886
- HeapObject* value = HeapObject::cast(*slot);
1887
-
1888
- mark_compact_collector()->RecordSlot(slot, slot, *slot);
1889
-
1890
- switch (details.type()) {
1891
- case NORMAL:
1892
- case FIELD:
1893
- case CONSTANT_FUNCTION:
1894
- case HANDLER:
1895
- case INTERCEPTOR:
1896
- base_marker()->MarkObjectAndPush(value);
1897
- break;
1898
- case CALLBACKS:
1899
- if (!value->IsAccessorPair()) {
1900
- base_marker()->MarkObjectAndPush(value);
1901
- } else if (base_marker()->MarkObjectWithoutPush(value)) {
1902
- AccessorPair* accessors = AccessorPair::cast(value);
1903
- MarkAccessorPairSlot(accessors, AccessorPair::kGetterOffset);
1904
- MarkAccessorPairSlot(accessors, AccessorPair::kSetterOffset);
1905
- }
1906
- break;
1907
- case ELEMENTS_TRANSITION:
1908
- // For maps with multiple elements transitions, the transition maps are
1909
- // stored in a FixedArray. Keep the fixed array alive but not the maps
1910
- // that it refers to.
1911
- if (value->IsFixedArray()) base_marker()->MarkObjectWithoutPush(value);
1912
- break;
1913
- case MAP_TRANSITION:
1914
- case CONSTANT_TRANSITION:
1915
- case NULL_DESCRIPTOR:
1916
- break;
1917
- }
1918
- }
1919
- }
1920
-
1921
-
1922
- template <class T>
1923
- void Marker<T>::MarkAccessorPairSlot(AccessorPair* accessors, int offset) {
1924
- Object** slot = HeapObject::RawField(accessors, offset);
1925
- HeapObject* accessor = HeapObject::cast(*slot);
1926
- if (accessor->IsMap()) return;
1927
- mark_compact_collector()->RecordSlot(slot, slot, accessor);
1928
- base_marker()->MarkObjectAndPush(accessor);
1929
- }
1930
-
1931
-
1932
1778
  // Fill the marking stack with overflowed objects returned by the given
1933
1779
  // iterator. Stop when the marking stack is filled or the end of the space
1934
1780
  // is reached, whichever comes first.
@@ -1981,10 +1827,10 @@ static void DiscoverGreyObjectsOnPage(MarkingDeque* marking_deque, Page* p) {
1981
1827
  for (;
1982
1828
  cell_index < last_cell_index;
1983
1829
  cell_index++, cell_base += 32 * kPointerSize) {
1984
- ASSERT((unsigned)cell_index ==
1985
- Bitmap::IndexToCell(
1986
- Bitmap::CellAlignIndex(
1987
- p->AddressToMarkbitIndex(cell_base))));
1830
+ ASSERT(static_cast<unsigned>(cell_index) ==
1831
+ Bitmap::IndexToCell(
1832
+ Bitmap::CellAlignIndex(
1833
+ p->AddressToMarkbitIndex(cell_base))));
1988
1834
 
1989
1835
  const MarkBit::CellType current_cell = cells[cell_index];
1990
1836
  if (current_cell == 0) continue;
@@ -2041,6 +1887,16 @@ bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
2041
1887
  }
2042
1888
 
2043
1889
 
1890
+ bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap,
1891
+ Object** p) {
1892
+ Object* o = *p;
1893
+ ASSERT(o->IsHeapObject());
1894
+ HeapObject* heap_object = HeapObject::cast(o);
1895
+ MarkBit mark = Marking::MarkBitFrom(heap_object);
1896
+ return !mark.Get();
1897
+ }
1898
+
1899
+
2044
1900
  void MarkCompactCollector::MarkSymbolTable() {
2045
1901
  SymbolTable* symbol_table = heap()->symbol_table();
2046
1902
  // Mark the symbol table itself.
@@ -2069,54 +1925,6 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
2069
1925
  }
2070
1926
 
2071
1927
 
2072
- void MarkCompactCollector::MarkObjectGroups() {
2073
- List<ObjectGroup*>* object_groups =
2074
- heap()->isolate()->global_handles()->object_groups();
2075
-
2076
- int last = 0;
2077
- for (int i = 0; i < object_groups->length(); i++) {
2078
- ObjectGroup* entry = object_groups->at(i);
2079
- ASSERT(entry != NULL);
2080
-
2081
- Object*** objects = entry->objects_;
2082
- bool group_marked = false;
2083
- for (size_t j = 0; j < entry->length_; j++) {
2084
- Object* object = *objects[j];
2085
- if (object->IsHeapObject()) {
2086
- HeapObject* heap_object = HeapObject::cast(object);
2087
- MarkBit mark = Marking::MarkBitFrom(heap_object);
2088
- if (mark.Get()) {
2089
- group_marked = true;
2090
- break;
2091
- }
2092
- }
2093
- }
2094
-
2095
- if (!group_marked) {
2096
- (*object_groups)[last++] = entry;
2097
- continue;
2098
- }
2099
-
2100
- // An object in the group is marked, so mark as grey all white heap
2101
- // objects in the group.
2102
- for (size_t j = 0; j < entry->length_; ++j) {
2103
- Object* object = *objects[j];
2104
- if (object->IsHeapObject()) {
2105
- HeapObject* heap_object = HeapObject::cast(object);
2106
- MarkBit mark = Marking::MarkBitFrom(heap_object);
2107
- MarkObject(heap_object, mark);
2108
- }
2109
- }
2110
-
2111
- // Once the entire group has been colored grey, set the object group
2112
- // to NULL so it won't be processed again.
2113
- entry->Dispose();
2114
- object_groups->at(i) = NULL;
2115
- }
2116
- object_groups->Rewind(last);
2117
- }
2118
-
2119
-
2120
1928
  void MarkCompactCollector::MarkImplicitRefGroups() {
2121
1929
  List<ImplicitRefGroup*>* ref_groups =
2122
1930
  heap()->isolate()->global_handles()->implicit_ref_groups();
@@ -2165,7 +1973,7 @@ void MarkCompactCollector::EmptyMarkingDeque() {
2165
1973
  MarkBit map_mark = Marking::MarkBitFrom(map);
2166
1974
  MarkObject(map, map_mark);
2167
1975
 
2168
- StaticMarkingVisitor::IterateBody(map, object);
1976
+ MarkCompactMarkingVisitor::IterateBody(map, object);
2169
1977
  }
2170
1978
 
2171
1979
  // Process encountered weak maps, mark objects only reachable by those
@@ -2235,11 +2043,12 @@ void MarkCompactCollector::ProcessMarkingDeque() {
2235
2043
  }
2236
2044
 
2237
2045
 
2238
- void MarkCompactCollector::ProcessExternalMarking() {
2046
+ void MarkCompactCollector::ProcessExternalMarking(RootMarkingVisitor* visitor) {
2239
2047
  bool work_to_do = true;
2240
2048
  ASSERT(marking_deque_.IsEmpty());
2241
2049
  while (work_to_do) {
2242
- MarkObjectGroups();
2050
+ heap()->isolate()->global_handles()->IterateObjectGroups(
2051
+ visitor, &IsUnmarkedHeapObjectWithHeap);
2243
2052
  MarkImplicitRefGroups();
2244
2053
  work_to_do = !marking_deque_.IsEmpty();
2245
2054
  ProcessMarkingDeque();
@@ -2262,7 +2071,7 @@ void MarkCompactCollector::MarkLiveObjects() {
2262
2071
  // non-incremental marker can deal with them as if overflow
2263
2072
  // occured during normal marking.
2264
2073
  // But incremental marker uses a separate marking deque
2265
- // so we have to explicitly copy it's overflow state.
2074
+ // so we have to explicitly copy its overflow state.
2266
2075
  incremental_marking->Finalize();
2267
2076
  incremental_marking_overflowed =
2268
2077
  incremental_marking->marking_deque()->overflowed();
@@ -2304,7 +2113,7 @@ void MarkCompactCollector::MarkLiveObjects() {
2304
2113
  ASSERT(cell->IsJSGlobalPropertyCell());
2305
2114
  if (IsMarked(cell)) {
2306
2115
  int offset = JSGlobalPropertyCell::kValueOffset;
2307
- StaticMarkingVisitor::VisitPointer(
2116
+ MarkCompactMarkingVisitor::VisitPointer(
2308
2117
  heap(),
2309
2118
  reinterpret_cast<Object**>(cell->address() + offset));
2310
2119
  }
@@ -2318,7 +2127,7 @@ void MarkCompactCollector::MarkLiveObjects() {
2318
2127
  // The objects reachable from the roots are marked, yet unreachable
2319
2128
  // objects are unmarked. Mark objects reachable due to host
2320
2129
  // application specific logic.
2321
- ProcessExternalMarking();
2130
+ ProcessExternalMarking(&root_visitor);
2322
2131
 
2323
2132
  // The objects reachable from the roots or object groups are marked,
2324
2133
  // yet unreachable objects are unmarked. Mark objects reachable
@@ -2337,7 +2146,7 @@ void MarkCompactCollector::MarkLiveObjects() {
2337
2146
 
2338
2147
  // Repeat host application specific marking to mark unmarked objects
2339
2148
  // reachable from the weak roots.
2340
- ProcessExternalMarking();
2149
+ ProcessExternalMarking(&root_visitor);
2341
2150
 
2342
2151
  AfterMarking();
2343
2152
  }
@@ -2359,6 +2168,7 @@ void MarkCompactCollector::AfterMarking() {
2359
2168
  symbol_table->ElementsRemoved(v.PointersRemoved());
2360
2169
  heap()->external_string_table_.Iterate(&v);
2361
2170
  heap()->external_string_table_.CleanUp();
2171
+ heap()->error_object_list_.RemoveUnmarked(heap());
2362
2172
 
2363
2173
  // Process the weak references.
2364
2174
  MarkCompactWeakObjectRetainer mark_compact_object_retainer;
@@ -2371,17 +2181,26 @@ void MarkCompactCollector::AfterMarking() {
2371
2181
  // Flush code from collected candidates.
2372
2182
  if (is_code_flushing_enabled()) {
2373
2183
  code_flusher_->ProcessCandidates();
2184
+ // If incremental marker does not support code flushing, we need to
2185
+ // disable it before incremental marking steps for next cycle.
2186
+ if (FLAG_flush_code && !FLAG_flush_code_incrementally) {
2187
+ EnableCodeFlushing(false);
2188
+ }
2374
2189
  }
2375
2190
 
2376
2191
  if (!FLAG_watch_ic_patching) {
2377
2192
  // Clean up dead objects from the runtime profiler.
2378
2193
  heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
2379
2194
  }
2195
+
2196
+ if (FLAG_track_gc_object_stats) {
2197
+ heap()->CheckpointObjectStats();
2198
+ }
2380
2199
  }
2381
2200
 
2382
2201
 
2383
2202
  void MarkCompactCollector::ProcessMapCaches() {
2384
- Object* raw_context = heap()->global_contexts_list_;
2203
+ Object* raw_context = heap()->native_contexts_list_;
2385
2204
  while (raw_context != heap()->undefined_value()) {
2386
2205
  Context* context = reinterpret_cast<Context*>(raw_context);
2387
2206
  if (IsMarked(context)) {
@@ -2449,7 +2268,7 @@ void MarkCompactCollector::ReattachInitialMaps() {
2449
2268
  }
2450
2269
 
2451
2270
 
2452
- void MarkCompactCollector::ClearNonLiveTransitions() {
2271
+ void MarkCompactCollector::ClearNonLiveReferences() {
2453
2272
  HeapObjectIterator map_iterator(heap()->map_space());
2454
2273
  // Iterate over the map space, setting map transitions that go from
2455
2274
  // a marked map to an unmarked map to null transitions. This action
@@ -2461,9 +2280,7 @@ void MarkCompactCollector::ClearNonLiveTransitions() {
2461
2280
  if (map->IsFreeSpace()) continue;
2462
2281
 
2463
2282
  ASSERT(map->IsMap());
2464
- // Only JSObject and subtypes have map transitions and back pointers.
2465
- STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
2466
- if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue;
2283
+ if (!map->CanTransition()) continue;
2467
2284
 
2468
2285
  if (map_mark.Get() &&
2469
2286
  map->attached_to_shared_function_info()) {
@@ -2475,13 +2292,19 @@ void MarkCompactCollector::ClearNonLiveTransitions() {
2475
2292
 
2476
2293
  ClearNonLivePrototypeTransitions(map);
2477
2294
  ClearNonLiveMapTransitions(map, map_mark);
2295
+
2296
+ if (map_mark.Get()) {
2297
+ ClearNonLiveDependentCodes(map);
2298
+ } else {
2299
+ ClearAndDeoptimizeDependentCodes(map);
2300
+ }
2478
2301
  }
2479
2302
  }
2480
2303
 
2481
2304
 
2482
2305
  void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
2483
2306
  int number_of_transitions = map->NumberOfProtoTransitions();
2484
- FixedArray* prototype_transitions = map->prototype_transitions();
2307
+ FixedArray* prototype_transitions = map->GetPrototypeTransitions();
2485
2308
 
2486
2309
  int new_number_of_transitions = 0;
2487
2310
  const int header = Map::kProtoTransitionHeaderSize;
@@ -2543,6 +2366,46 @@ void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map,
2543
2366
  }
2544
2367
 
2545
2368
 
2369
+ void MarkCompactCollector::ClearAndDeoptimizeDependentCodes(Map* map) {
2370
+ AssertNoAllocation no_allocation_scope;
2371
+ DependentCodes* codes = map->dependent_codes();
2372
+ int number_of_codes = codes->number_of_codes();
2373
+ if (number_of_codes == 0) return;
2374
+ for (int i = 0; i < number_of_codes; i++) {
2375
+ Code* code = codes->code_at(i);
2376
+ if (IsMarked(code) && !code->marked_for_deoptimization()) {
2377
+ code->set_marked_for_deoptimization(true);
2378
+ }
2379
+ codes->clear_code_at(i);
2380
+ }
2381
+ map->set_dependent_codes(DependentCodes::cast(heap()->empty_fixed_array()));
2382
+ }
2383
+
2384
+
2385
+ void MarkCompactCollector::ClearNonLiveDependentCodes(Map* map) {
2386
+ AssertNoAllocation no_allocation_scope;
2387
+ DependentCodes* codes = map->dependent_codes();
2388
+ int number_of_codes = codes->number_of_codes();
2389
+ if (number_of_codes == 0) return;
2390
+ int new_number_of_codes = 0;
2391
+ for (int i = 0; i < number_of_codes; i++) {
2392
+ Code* code = codes->code_at(i);
2393
+ if (IsMarked(code) && !code->marked_for_deoptimization()) {
2394
+ if (new_number_of_codes != i) {
2395
+ codes->set_code_at(new_number_of_codes, code);
2396
+ }
2397
+ Object** slot = codes->code_slot_at(new_number_of_codes);
2398
+ RecordSlot(slot, slot, code);
2399
+ new_number_of_codes++;
2400
+ }
2401
+ }
2402
+ for (int i = new_number_of_codes; i < number_of_codes; i++) {
2403
+ codes->clear_code_at(i);
2404
+ }
2405
+ codes->set_number_of_codes(new_number_of_codes);
2406
+ }
2407
+
2408
+
2546
2409
  void MarkCompactCollector::ProcessWeakMaps() {
2547
2410
  Object* weak_map_obj = encountered_weak_maps();
2548
2411
  while (weak_map_obj != Smi::FromInt(0)) {
@@ -2559,7 +2422,8 @@ void MarkCompactCollector::ProcessWeakMaps() {
2559
2422
  Object** value_slot =
2560
2423
  HeapObject::RawField(table, FixedArray::OffsetOfElementAt(
2561
2424
  ObjectHashTable::EntryToValueIndex(i)));
2562
- StaticMarkingVisitor::MarkObjectByPointer(this, anchor, value_slot);
2425
+ MarkCompactMarkingVisitor::MarkObjectByPointer(
2426
+ this, anchor, value_slot);
2563
2427
  }
2564
2428
  }
2565
2429
  weak_map_obj = weak_map->next();
@@ -2673,15 +2537,33 @@ class PointersUpdatingVisitor: public ObjectVisitor {
2673
2537
  void VisitEmbeddedPointer(RelocInfo* rinfo) {
2674
2538
  ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
2675
2539
  Object* target = rinfo->target_object();
2540
+ Object* old_target = target;
2676
2541
  VisitPointer(&target);
2677
- rinfo->set_target_object(target);
2542
+ // Avoid unnecessary changes that might unnecessary flush the instruction
2543
+ // cache.
2544
+ if (target != old_target) {
2545
+ rinfo->set_target_object(target);
2546
+ }
2678
2547
  }
2679
2548
 
2680
2549
  void VisitCodeTarget(RelocInfo* rinfo) {
2681
2550
  ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
2682
2551
  Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
2552
+ Object* old_target = target;
2683
2553
  VisitPointer(&target);
2684
- rinfo->set_target_address(Code::cast(target)->instruction_start());
2554
+ if (target != old_target) {
2555
+ rinfo->set_target_address(Code::cast(target)->instruction_start());
2556
+ }
2557
+ }
2558
+
2559
+ void VisitCodeAgeSequence(RelocInfo* rinfo) {
2560
+ ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
2561
+ Object* stub = rinfo->code_age_stub();
2562
+ ASSERT(stub != NULL);
2563
+ VisitPointer(&stub);
2564
+ if (stub != rinfo->code_age_stub()) {
2565
+ rinfo->set_code_age_stub(Code::cast(stub));
2566
+ }
2685
2567
  }
2686
2568
 
2687
2569
  void VisitDebugTarget(RelocInfo* rinfo) {
@@ -2855,9 +2737,6 @@ void MarkCompactCollector::EvacuateNewSpace() {
2855
2737
  size,
2856
2738
  NEW_SPACE);
2857
2739
  } else {
2858
- // Process the dead object before we write a NULL into its header.
2859
- LiveObjectList::ProcessNonLive(object);
2860
-
2861
2740
  // Mark dead objects in the new space with null in their map field.
2862
2741
  Memory::Address_at(object->address()) = NULL;
2863
2742
  }
@@ -2890,10 +2769,10 @@ void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) {
2890
2769
  for (;
2891
2770
  cell_index < last_cell_index;
2892
2771
  cell_index++, cell_base += 32 * kPointerSize) {
2893
- ASSERT((unsigned)cell_index ==
2894
- Bitmap::IndexToCell(
2895
- Bitmap::CellAlignIndex(
2896
- p->AddressToMarkbitIndex(cell_base))));
2772
+ ASSERT(static_cast<unsigned>(cell_index) ==
2773
+ Bitmap::IndexToCell(
2774
+ Bitmap::CellAlignIndex(
2775
+ p->AddressToMarkbitIndex(cell_base))));
2897
2776
  if (cells[cell_index] == 0) continue;
2898
2777
 
2899
2778
  int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets);
@@ -3036,6 +2915,11 @@ static void SweepPrecisely(PagedSpace* space,
3036
2915
  space->identity() == CODE_SPACE);
3037
2916
  ASSERT((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST));
3038
2917
 
2918
+ double start_time = 0.0;
2919
+ if (FLAG_print_cumulative_gc_stat) {
2920
+ start_time = OS::TimeCurrentMillis();
2921
+ }
2922
+
3039
2923
  MarkBit::CellType* cells = p->markbits()->cells();
3040
2924
  p->MarkSweptPrecisely();
3041
2925
 
@@ -3063,10 +2947,10 @@ static void SweepPrecisely(PagedSpace* space,
3063
2947
  for (;
3064
2948
  cell_index < last_cell_index;
3065
2949
  cell_index++, object_address += 32 * kPointerSize) {
3066
- ASSERT((unsigned)cell_index ==
3067
- Bitmap::IndexToCell(
3068
- Bitmap::CellAlignIndex(
3069
- p->AddressToMarkbitIndex(object_address))));
2950
+ ASSERT(static_cast<unsigned>(cell_index) ==
2951
+ Bitmap::IndexToCell(
2952
+ Bitmap::CellAlignIndex(
2953
+ p->AddressToMarkbitIndex(object_address))));
3070
2954
  int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets);
3071
2955
  int live_index = 0;
3072
2956
  for ( ; live_objects != 0; live_objects--) {
@@ -3101,6 +2985,9 @@ static void SweepPrecisely(PagedSpace* space,
3101
2985
  space->Free(free_start, static_cast<int>(p->area_end() - free_start));
3102
2986
  }
3103
2987
  p->ResetLiveBytes();
2988
+ if (FLAG_print_cumulative_gc_stat) {
2989
+ space->heap()->AddSweepingTime(OS::TimeCurrentMillis() - start_time);
2990
+ }
3104
2991
  }
3105
2992
 
3106
2993
 
@@ -3221,6 +3108,8 @@ void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) {
3221
3108
 
3222
3109
 
3223
3110
  void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3111
+ Heap::RelocationLock relocation_lock(heap());
3112
+
3224
3113
  bool code_slots_filtering_required;
3225
3114
  { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
3226
3115
  code_slots_filtering_required = MarkInvalidatedCode();
@@ -3255,7 +3144,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3255
3144
  GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS);
3256
3145
  // Update roots.
3257
3146
  heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
3258
- LiveObjectList::IterateElements(&updating_visitor);
3259
3147
  }
3260
3148
 
3261
3149
  { GCTracer::Scope gc_scope(tracer_,
@@ -3326,7 +3214,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3326
3214
 
3327
3215
  switch (space->identity()) {
3328
3216
  case OLD_DATA_SPACE:
3329
- SweepConservatively(space, p);
3217
+ SweepConservatively<SWEEP_SEQUENTIALLY>(space, NULL, p);
3330
3218
  break;
3331
3219
  case OLD_POINTER_SPACE:
3332
3220
  SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, IGNORE_SKIP_LIST>(
@@ -3359,8 +3247,8 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3359
3247
  }
3360
3248
  }
3361
3249
 
3362
- // Update pointer from the global contexts list.
3363
- updating_visitor.VisitPointer(heap_->global_contexts_list_address());
3250
+ // Update pointer from the native contexts list.
3251
+ updating_visitor.VisitPointer(heap_->native_contexts_list_address());
3364
3252
 
3365
3253
  heap_->symbol_table()->Iterate(&updating_visitor);
3366
3254
 
@@ -3368,6 +3256,9 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3368
3256
  heap_->UpdateReferencesInExternalStringTable(
3369
3257
  &UpdateReferenceInExternalStringTableEntry);
3370
3258
 
3259
+ // Update pointers in the new error object list.
3260
+ heap_->error_object_list()->UpdateReferences();
3261
+
3371
3262
  if (!FLAG_watch_ic_patching) {
3372
3263
  // Update JSFunction pointers from the runtime profiler.
3373
3264
  heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
@@ -3383,7 +3274,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3383
3274
 
3384
3275
  heap_->isolate()->inner_pointer_to_code_cache()->Flush();
3385
3276
 
3386
- #ifdef DEBUG
3277
+ #ifdef VERIFY_HEAP
3387
3278
  if (FLAG_verify_heap) {
3388
3279
  VerifyEvacuation(heap_);
3389
3280
  }
@@ -3682,6 +3573,33 @@ static inline Address StartOfLiveObject(Address block_address, uint32_t cell) {
3682
3573
  }
3683
3574
 
3684
3575
 
3576
+ template<MarkCompactCollector::SweepingParallelism mode>
3577
+ static intptr_t Free(PagedSpace* space,
3578
+ FreeList* free_list,
3579
+ Address start,
3580
+ int size) {
3581
+ if (mode == MarkCompactCollector::SWEEP_SEQUENTIALLY) {
3582
+ return space->Free(start, size);
3583
+ } else {
3584
+ return size - free_list->Free(start, size);
3585
+ }
3586
+ }
3587
+
3588
+
3589
+ // Force instantiation of templatized SweepConservatively method for
3590
+ // SWEEP_SEQUENTIALLY mode.
3591
+ template intptr_t MarkCompactCollector::
3592
+ SweepConservatively<MarkCompactCollector::SWEEP_SEQUENTIALLY>(
3593
+ PagedSpace*, FreeList*, Page*);
3594
+
3595
+
3596
+ // Force instantiation of templatized SweepConservatively method for
3597
+ // SWEEP_IN_PARALLEL mode.
3598
+ template intptr_t MarkCompactCollector::
3599
+ SweepConservatively<MarkCompactCollector::SWEEP_IN_PARALLEL>(
3600
+ PagedSpace*, FreeList*, Page*);
3601
+
3602
+
3685
3603
  // Sweeps a space conservatively. After this has been done the larger free
3686
3604
  // spaces have been put on the free list and the smaller ones have been
3687
3605
  // ignored and left untouched. A free space is always either ignored or put
@@ -3689,8 +3607,16 @@ static inline Address StartOfLiveObject(Address block_address, uint32_t cell) {
3689
3607
  // because it means that any FreeSpace maps left actually describe a region of
3690
3608
  // memory that can be ignored when scanning. Dead objects other than free
3691
3609
  // spaces will not contain the free space map.
3692
- intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
3610
+ template<MarkCompactCollector::SweepingParallelism mode>
3611
+ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space,
3612
+ FreeList* free_list,
3613
+ Page* p) {
3693
3614
  ASSERT(!p->IsEvacuationCandidate() && !p->WasSwept());
3615
+ ASSERT((mode == MarkCompactCollector::SWEEP_IN_PARALLEL &&
3616
+ free_list != NULL) ||
3617
+ (mode == MarkCompactCollector::SWEEP_SEQUENTIALLY &&
3618
+ free_list == NULL));
3619
+
3694
3620
  MarkBit::CellType* cells = p->markbits()->cells();
3695
3621
  p->MarkSweptConservatively();
3696
3622
 
@@ -3717,8 +3643,8 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
3717
3643
  }
3718
3644
  size_t size = block_address - p->area_start();
3719
3645
  if (cell_index == last_cell_index) {
3720
- freed_bytes += static_cast<int>(space->Free(p->area_start(),
3721
- static_cast<int>(size)));
3646
+ freed_bytes += Free<mode>(space, free_list, p->area_start(),
3647
+ static_cast<int>(size));
3722
3648
  ASSERT_EQ(0, p->LiveBytes());
3723
3649
  return freed_bytes;
3724
3650
  }
@@ -3727,8 +3653,9 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
3727
3653
  Address free_end = StartOfLiveObject(block_address, cells[cell_index]);
3728
3654
  // Free the first free space.
3729
3655
  size = free_end - p->area_start();
3730
- freed_bytes += space->Free(p->area_start(),
3731
- static_cast<int>(size));
3656
+ freed_bytes += Free<mode>(space, free_list, p->area_start(),
3657
+ static_cast<int>(size));
3658
+
3732
3659
  // The start of the current free area is represented in undigested form by
3733
3660
  // the address of the last 32-word section that contained a live object and
3734
3661
  // the marking bitmap for that cell, which describes where the live object
@@ -3757,8 +3684,8 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
3757
3684
  // so now we need to find the start of the first live object at the
3758
3685
  // end of the free space.
3759
3686
  free_end = StartOfLiveObject(block_address, cell);
3760
- freed_bytes += space->Free(free_start,
3761
- static_cast<int>(free_end - free_start));
3687
+ freed_bytes += Free<mode>(space, free_list, free_start,
3688
+ static_cast<int>(free_end - free_start));
3762
3689
  }
3763
3690
  }
3764
3691
  // Update our undigested record of where the current free area started.
@@ -3772,8 +3699,8 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
3772
3699
  // Handle the free space at the end of the page.
3773
3700
  if (block_address - free_start > 32 * kPointerSize) {
3774
3701
  free_start = DigestFreeStart(free_start, free_start_cell);
3775
- freed_bytes += space->Free(free_start,
3776
- static_cast<int>(block_address - free_start));
3702
+ freed_bytes += Free<mode>(space, free_list, free_start,
3703
+ static_cast<int>(block_address - free_start));
3777
3704
  }
3778
3705
 
3779
3706
  p->ResetLiveBytes();
@@ -3781,28 +3708,37 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
3781
3708
  }
3782
3709
 
3783
3710
 
3711
+ void MarkCompactCollector::SweepInParallel(PagedSpace* space,
3712
+ FreeList* private_free_list,
3713
+ FreeList* free_list) {
3714
+ PageIterator it(space);
3715
+ while (it.has_next()) {
3716
+ Page* p = it.next();
3717
+
3718
+ if (p->TryParallelSweeping()) {
3719
+ SweepConservatively<SWEEP_IN_PARALLEL>(space, private_free_list, p);
3720
+ free_list->Concatenate(private_free_list);
3721
+ }
3722
+ }
3723
+ }
3724
+
3725
+
3784
3726
  void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
3785
3727
  space->set_was_swept_conservatively(sweeper == CONSERVATIVE ||
3786
3728
  sweeper == LAZY_CONSERVATIVE);
3787
-
3788
3729
  space->ClearStats();
3789
3730
 
3790
3731
  PageIterator it(space);
3791
3732
 
3792
3733
  intptr_t freed_bytes = 0;
3793
3734
  int pages_swept = 0;
3794
- intptr_t newspace_size = space->heap()->new_space()->Size();
3795
3735
  bool lazy_sweeping_active = false;
3796
3736
  bool unused_page_present = false;
3797
3737
 
3798
- intptr_t old_space_size = heap()->PromotedSpaceSizeOfObjects();
3799
- intptr_t space_left =
3800
- Min(heap()->OldGenPromotionLimit(old_space_size),
3801
- heap()->OldGenAllocationLimit(old_space_size)) - old_space_size;
3802
-
3803
3738
  while (it.has_next()) {
3804
3739
  Page* p = it.next();
3805
3740
 
3741
+ ASSERT(p->parallel_sweeping() == 0);
3806
3742
  // Clear sweeping flags indicating that marking bits are still intact.
3807
3743
  p->ClearSweptPrecisely();
3808
3744
  p->ClearSweptConservatively();
@@ -3848,7 +3784,7 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
3848
3784
  PrintF("Sweeping 0x%" V8PRIxPTR " conservatively.\n",
3849
3785
  reinterpret_cast<intptr_t>(p));
3850
3786
  }
3851
- SweepConservatively(space, p);
3787
+ SweepConservatively<SWEEP_SEQUENTIALLY>(space, NULL, p);
3852
3788
  pages_swept++;
3853
3789
  break;
3854
3790
  }
@@ -3857,17 +3793,18 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
3857
3793
  PrintF("Sweeping 0x%" V8PRIxPTR " conservatively as needed.\n",
3858
3794
  reinterpret_cast<intptr_t>(p));
3859
3795
  }
3860
- freed_bytes += SweepConservatively(space, p);
3796
+ freed_bytes += SweepConservatively<SWEEP_SEQUENTIALLY>(space, NULL, p);
3861
3797
  pages_swept++;
3862
- if (space_left + freed_bytes > newspace_size) {
3863
- space->SetPagesToSweep(p->next_page());
3864
- lazy_sweeping_active = true;
3865
- } else {
3866
- if (FLAG_gc_verbose) {
3867
- PrintF("Only %" V8PRIdPTR " bytes freed. Still sweeping.\n",
3868
- freed_bytes);
3869
- }
3798
+ space->SetPagesToSweep(p->next_page());
3799
+ lazy_sweeping_active = true;
3800
+ break;
3801
+ }
3802
+ case PARALLEL_CONSERVATIVE: {
3803
+ if (FLAG_gc_verbose) {
3804
+ PrintF("Sweeping 0x%" V8PRIxPTR " conservatively in parallel.\n",
3805
+ reinterpret_cast<intptr_t>(p));
3870
3806
  }
3807
+ p->set_parallel_sweeping(1);
3871
3808
  break;
3872
3809
  }
3873
3810
  case PRECISE: {
@@ -3909,11 +3846,13 @@ void MarkCompactCollector::SweepSpaces() {
3909
3846
  FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE;
3910
3847
  if (FLAG_expose_gc) how_to_sweep = CONSERVATIVE;
3911
3848
  if (sweep_precisely_) how_to_sweep = PRECISE;
3849
+ if (AreSweeperThreadsActivated()) how_to_sweep = PARALLEL_CONSERVATIVE;
3912
3850
  // Noncompacting collections simply sweep the spaces to clear the mark
3913
3851
  // bits and free the nonlive blocks (for old and map spaces). We sweep
3914
3852
  // the map space last because freeing non-live maps overwrites them and
3915
3853
  // the other spaces rely on possibly non-live maps to get the sizes for
3916
3854
  // non-live objects.
3855
+
3917
3856
  SweepSpace(heap()->old_pointer_space(), how_to_sweep);
3918
3857
  SweepSpace(heap()->old_data_space(), how_to_sweep);
3919
3858
 
@@ -3924,6 +3863,15 @@ void MarkCompactCollector::SweepSpaces() {
3924
3863
 
3925
3864
  EvacuateNewSpaceAndCandidates();
3926
3865
 
3866
+ if (AreSweeperThreadsActivated()) {
3867
+ // TODO(hpayer): The starting of the sweeper threads should be after
3868
+ // SweepSpace old data space.
3869
+ StartSweeperThreads();
3870
+ if (FLAG_parallel_sweeping && !FLAG_concurrent_sweeping) {
3871
+ WaitUntilSweepingCompleted();
3872
+ }
3873
+ }
3874
+
3927
3875
  // ClearNonLiveTransitions depends on precise sweeping of map space to
3928
3876
  // detect whether unmarked map became dead in this collection or in one
3929
3877
  // of the previous ones.
@@ -3935,11 +3883,19 @@ void MarkCompactCollector::SweepSpaces() {
3935
3883
 
3936
3884
 
3937
3885
  void MarkCompactCollector::EnableCodeFlushing(bool enable) {
3886
+ #ifdef ENABLE_DEBUGGER_SUPPORT
3887
+ if (heap()->isolate()->debug()->IsLoaded() ||
3888
+ heap()->isolate()->debug()->has_break_points()) {
3889
+ enable = false;
3890
+ }
3891
+ #endif
3892
+
3938
3893
  if (enable) {
3939
3894
  if (code_flusher_ != NULL) return;
3940
3895
  code_flusher_ = new CodeFlusher(heap()->isolate());
3941
3896
  } else {
3942
3897
  if (code_flusher_ == NULL) return;
3898
+ code_flusher_->EvictAllCandidates();
3943
3899
  delete code_flusher_;
3944
3900
  code_flusher_ = NULL;
3945
3901
  }
@@ -3963,7 +3919,8 @@ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj,
3963
3919
 
3964
3920
 
3965
3921
  void MarkCompactCollector::Initialize() {
3966
- StaticMarkingVisitor::Initialize();
3922
+ MarkCompactMarkingVisitor::Initialize();
3923
+ IncrementalMarking::Initialize();
3967
3924
  }
3968
3925
 
3969
3926
 
@@ -4039,6 +3996,20 @@ void MarkCompactCollector::RecordCodeEntrySlot(Address slot, Code* target) {
4039
3996
  }
4040
3997
 
4041
3998
 
3999
+ void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
4000
+ ASSERT(heap()->gc_state() == Heap::MARK_COMPACT);
4001
+ if (is_compacting()) {
4002
+ Code* host = heap()->isolate()->inner_pointer_to_code_cache()->
4003
+ GcSafeFindCodeForInnerPointer(pc);
4004
+ MarkBit mark_bit = Marking::MarkBitFrom(host);
4005
+ if (Marking::IsBlack(mark_bit)) {
4006
+ RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
4007
+ RecordRelocSlot(&rinfo, target);
4008
+ }
4009
+ }
4010
+ }
4011
+
4012
+
4042
4013
  static inline SlotsBuffer::SlotType DecodeSlotType(
4043
4014
  SlotsBuffer::ObjectSlot slot) {
4044
4015
  return static_cast<SlotsBuffer::SlotType>(reinterpret_cast<intptr_t>(slot));