diff -Nru libv8-3.4.14.21/build/all.gyp libv8-3.5.10.24/build/all.gyp --- libv8-3.4.14.21/build/all.gyp 2011-07-18 09:50:57.000000000 +0000 +++ libv8-3.5.10.24/build/all.gyp 2011-08-10 11:27:35.000000000 +0000 @@ -1,4 +1,4 @@ -# Copyright (c) 2010 The Chromium Authors. All rights reserved. +# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,6 +8,7 @@ 'target_name': 'All', 'type': 'none', 'dependencies': [ + '../preparser/preparser.gyp:*', '../samples/samples.gyp:*', '../src/d8.gyp:d8', ], diff -Nru libv8-3.4.14.21/build/armu.gypi libv8-3.5.10.24/build/armu.gypi --- libv8-3.4.14.21/build/armu.gypi 2011-06-15 10:58:27.000000000 +0000 +++ libv8-3.5.10.24/build/armu.gypi 2011-08-10 11:27:35.000000000 +0000 @@ -32,5 +32,5 @@ 'armv7': 1, 'arm_neon': 0, 'arm_fpu': 'vfpv3', - } + }, } diff -Nru libv8-3.4.14.21/build/common.gypi libv8-3.5.10.24/build/common.gypi --- libv8-3.4.14.21/build/common.gypi 2011-06-15 10:58:27.000000000 +0000 +++ libv8-3.5.10.24/build/common.gypi 2011-08-29 10:41:00.000000000 +0000 @@ -1,4 +1,4 @@ -# Copyright 2010 the V8 project authors. All rights reserved. +# Copyright 2011 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: @@ -25,159 +25,266 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# Shared definitions for all V8-related targets. + { 'variables': { - 'library%': 'static_library', - 'component%': 'static_library', - 'visibility%': 'hidden', - 'variables': { - 'conditions': [ - [ 'OS=="linux" or OS=="freebsd" or OS=="openbsd"', { - # This handles the Linux platforms we generally deal with. Anything - # else gets passed through, which probably won't work very well; such - # hosts should pass an explicit target_arch to gyp. - 'host_arch%': - '. + + Stopped using mprotect on Cygwin. + Avoided uninitialized member warning on gcc 4.3.4 + Both patches by Bert Belder. + + Bug fixes and performance improvements on all platforms. + + +2011-08-01: Version 3.5.2 + + Performance improvements on all platforms. + + +2011-07-28: Version 3.5.1 + + Fixed setting the readonly flag on the prototype property using the + API call FunctionTemplate::SetPrototypeAttributes (issue 1539). + + Changed the tools/test.py script to use d8 instead of shell for + testing. + + Fixed crash in ToBooleanStub when GC happens during invocation. + + Enabled automatic unboxing of double arrays. + + Performance improvements on all platforms. + + +2011-07-25: Version 3.5.0 + + Implemented Object.prototype.{hasOwnProperty, propertyIsEnumerable} for + proxies. + + Removed logging to memory support. + + Bugfixes and performance work. + + 2011-07-20: Version 3.4.14 Fix the debugger for strict-mode functions. (Chromium issue 89236) - Add GetPropertyAttribute method for Object in the API. (Patch by Peter Varga) + Add GetPropertyAttribute method for Object in the API. (Patch by + Peter Varga) Fix -Wunused-but-set-variable for gcc-4.6 on x64. (Issue 1291) diff -Nru libv8-3.4.14.21/debian/changelog libv8-3.5.10.24/debian/changelog --- libv8-3.4.14.21/debian/changelog 2011-12-05 09:05:42.000000000 +0000 +++ libv8-3.5.10.24/debian/changelog 2012-01-24 14:13:36.000000000 +0000 @@ -1,10 +1,56 @@ -libv8 (3.4.14.21-5ubuntu1) precise; urgency=low +libv8 (3.5.10.24-4~precise1~ppa1) precise; urgency=low - * Resolve FTBFS on Ubuntu armel (LP: #898558): - - d/rules: Update rules for armel to support armv7+ with VFP and - thumb2 enabled by default for Ubuntu. + * No-change backport to precise - -- James Page Mon, 05 Dec 2011 09:05:31 +0000 + -- James Page Tue, 24 Jan 2012 14:13:36 +0000 + +libv8 (3.5.10.24-4) unstable; urgency=low + + * v8_use_arm_eabi_hardfloat must be passed in GYPFLAGS, + this should fix armhf build. + * Ubuntu vendor needs different armel architecture settings. + + -- Jérémy Lal Mon, 05 Dec 2011 11:36:38 +0100 + +libv8 (3.5.10.24-3) unstable; urgency=low + + * Fix build failures for arm: + + arm_neon=0 for armhf + + vfp3=off for armel + Closes: bug#650548 + + -- Jérémy Lal Fri, 02 Dec 2011 10:17:48 +0100 + +libv8 (3.5.10.24-2) unstable; urgency=low + + * Set -Wno-unused-but-set-variable, i386 build fail otherwise. + Closes: bug#650547 + * Remove mipsel from architectures. Will be re-enabled when + upstream really supports it. + Closes: bug#650549 + * Disable default arm flags (debian/0014_disable_armv7_defaults.patch) and + set them properly for armel and armhf, using GYPFLAGS variable in + debian/rules. Closes: bug#650548 + * Remove 0009_unaligned_access_armel.patch, never proved it was needed. + * Remove -fvisibility=hidden flag, applied upstream. + * CCFLAGS are ignored by the build system, use CXXFLAGS instead. + + -- Jérémy Lal Thu, 01 Dec 2011 14:31:59 +0100 + +libv8 (3.5.10.24-1) unstable; urgency=low + + * New upstream release. + + [ Jérémy Lal ] + * Build using gyp (instead of deprecated scons build): + + removed scons patches + + added gyp patches to achieve the same result. + * Remove mipsel architecture, not yet supported upstream. + * Tests are run using d8 instead of shell. + * Allow parallel builds. + * Update watch file: Track 3.5.x releases. + + -- Jonas Smedegaard Sat, 26 Nov 2011 00:34:40 +0700 libv8 (3.4.14.21-5) unstable; urgency=low diff -Nru libv8-3.4.14.21/debian/control libv8-3.5.10.24/debian/control --- libv8-3.4.14.21/debian/control 2011-12-01 11:38:10.000000000 +0000 +++ libv8-3.5.10.24/debian/control 2011-12-02 13:26:53.000000000 +0000 @@ -1,14 +1,12 @@ Source: libv8 Priority: optional -Maintainer: Ubuntu Developers -XSBC-Original-Maintainer: Debian Javascript Maintainers +Maintainer: Debian Javascript Maintainers Uploaders: Jérémy Lal , Jonas Smedegaard -Build-Depends: devscripts, - cdbs (>= 0.4.73~), - scons, +Build-Depends: cdbs (>= 0.4.72~), + devscripts, debhelper, - dh-buildinfo, libicu-dev + dh-buildinfo, libicu-dev, gyp Standards-Version: 3.9.2 Section: libs Homepage: http://code.google.com/p/v8/ @@ -18,8 +16,8 @@ Package: libv8-dev Section: libdevel -Architecture: i386 amd64 armel armhf mipsel -Depends: libv8-3.4.14.21 (= ${binary:Version}), ${misc:Depends} +Architecture: i386 amd64 armel armhf +Depends: libv8-3.5.10.24 (= ${binary:Version}), ${misc:Depends} Description: v8 JavaScript engine - development files V8 is a high performance JavaScript engine written in C++. It is used in the web browser Chromium. @@ -27,8 +25,8 @@ Install this package if you wish to develop your own programs using the v8 JavaScript engine. -Package: libv8-3.4.14.21 -Architecture: i386 amd64 armel armhf mipsel +Package: libv8-3.5.10.24 +Architecture: i386 amd64 armel armhf Depends: ${shlibs:Depends}, ${misc:Depends} Description: v8 JavaScript engine - runtime library V8 is a high performance JavaScript engine written in C++. It is used @@ -40,8 +38,8 @@ Package: libv8-dbg Priority: extra Section: debug -Architecture: i386 amd64 armel armhf mipsel -Depends: libv8-3.4.14.21 (= ${binary:Version}), ${misc:Depends} +Architecture: i386 amd64 armel armhf +Depends: libv8-3.5.10.24 (= ${binary:Version}), ${misc:Depends} Description: v8 JavaScript engine - debugging symbols V8 is a high performance JavaScript engine written in C++. It is used in the web browser Chromium. diff -Nru libv8-3.4.14.21/debian/control.in libv8-3.5.10.24/debian/control.in --- libv8-3.4.14.21/debian/control.in 2011-12-05 09:37:32.000000000 +0000 +++ libv8-3.5.10.24/debian/control.in 2012-01-24 14:13:38.000000000 +0000 @@ -3,7 +3,7 @@ Maintainer: Debian Javascript Maintainers Uploaders: Jérémy Lal , Jonas Smedegaard -Build-Depends: @cdbs@, libicu-dev +Build-Depends: @cdbs@, libicu-dev, gyp Standards-Version: 3.9.2 Section: libs Homepage: http://code.google.com/p/v8/ @@ -13,8 +13,8 @@ Package: libv8-dev Section: libdevel -Architecture: i386 amd64 armel armhf mipsel -Depends: libv8-3.4.14.21 (= ${binary:Version}), ${misc:Depends} +Architecture: i386 amd64 armel armhf +Depends: libv8-3.5.10.24 (= ${binary:Version}), ${misc:Depends} Description: v8 JavaScript engine - development files V8 is a high performance JavaScript engine written in C++. It is used in the web browser Chromium. @@ -22,8 +22,8 @@ Install this package if you wish to develop your own programs using the v8 JavaScript engine. -Package: libv8-3.4.14.21 -Architecture: i386 amd64 armel armhf mipsel +Package: libv8-3.5.10.24 +Architecture: i386 amd64 armel armhf Depends: ${shlibs:Depends}, ${misc:Depends} Description: v8 JavaScript engine - runtime library V8 is a high performance JavaScript engine written in C++. It is used @@ -35,8 +35,8 @@ Package: libv8-dbg Priority: extra Section: debug -Architecture: i386 amd64 armel armhf mipsel -Depends: libv8-3.4.14.21 (= ${binary:Version}), ${misc:Depends} +Architecture: i386 amd64 armel armhf +Depends: libv8-3.5.10.24 (= ${binary:Version}), ${misc:Depends} Description: v8 JavaScript engine - debugging symbols V8 is a high performance JavaScript engine written in C++. It is used in the web browser Chromium. diff -Nru libv8-3.4.14.21/debian/control.in.in libv8-3.5.10.24/debian/control.in.in --- libv8-3.4.14.21/debian/control.in.in 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/control.in.in 2011-12-01 13:54:38.000000000 +0000 @@ -3,7 +3,7 @@ Maintainer: Debian Javascript Maintainers Uploaders: Jérémy Lal , Jonas Smedegaard -Build-Depends: @cdbs@, libicu-dev +Build-Depends: @cdbs@, libicu-dev, gyp Standards-Version: 3.9.2 Section: libs Homepage: http://code.google.com/p/v8/ @@ -13,7 +13,7 @@ Package: libv8-dev Section: libdevel -Architecture: i386 amd64 armel armhf mipsel +Architecture: i386 amd64 armel armhf Depends: __LIBPKGNAME__ (= ${binary:Version}), ${misc:Depends} Description: v8 JavaScript engine - development files V8 is a high performance JavaScript engine written in C++. It is used @@ -23,7 +23,7 @@ v8 JavaScript engine. Package: __LIBPKGNAME__ -Architecture: i386 amd64 armel armhf mipsel +Architecture: i386 amd64 armel armhf Depends: ${shlibs:Depends}, ${misc:Depends} Description: v8 JavaScript engine - runtime library V8 is a high performance JavaScript engine written in C++. It is used @@ -35,7 +35,7 @@ Package: libv8-dbg Priority: extra Section: debug -Architecture: i386 amd64 armel armhf mipsel +Architecture: i386 amd64 armel armhf Depends: __LIBPKGNAME__ (= ${binary:Version}), ${misc:Depends} Description: v8 JavaScript engine - debugging symbols V8 is a high performance JavaScript engine written in C++. It is used diff -Nru libv8-3.4.14.21/debian/copyright libv8-3.5.10.24/debian/copyright --- libv8-3.4.14.21/debian/copyright 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/copyright 2011-12-02 13:26:53.000000000 +0000 @@ -22,7 +22,7 @@ License: BSD-3-clause Files: src/third-party/valgrind/valgrind.h -Copyright: 2000-2007, Julian Seward +Copyright: 2000-2010, Julian Seward License: BSD-4-clause Files: test/mjsunit/third_party/regexp-pcre.js diff -Nru libv8-3.4.14.21/debian/copyright_hints libv8-3.5.10.24/debian/copyright_hints --- libv8-3.4.14.21/debian/copyright_hints 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/copyright_hints 2011-12-02 13:26:53.000000000 +0000 @@ -6,6 +6,7 @@ Files: LICENSE LICENSE.v8 + Makefile SConstruct benchmarks/base.js benchmarks/regexp.js @@ -15,7 +16,7 @@ build/armu.gypi build/common.gypi build/gyp_v8 - build/v8-features.gypi + build/standalone.gypi include/v8-debug.h include/v8-preparser.h include/v8-profiler.h @@ -24,6 +25,7 @@ include/v8stdint.h preparser/SConscript preparser/preparser-process.cc + preparser/preparser.gyp samples/SConscript samples/count-hosts.js samples/lineprocessor.cc @@ -147,6 +149,8 @@ src/double.h src/dtoa.cc src/dtoa.h + src/elements.cc + src/elements.h src/execution.cc src/execution.h src/extensions/experimental/break-iterator.cc @@ -362,7 +366,6 @@ src/scopes.h src/serialize.cc src/serialize.h - src/shell.h src/simulator.h src/small-pointer-list.h src/smart-pointer.h @@ -418,6 +421,7 @@ src/version.h src/vm-state-inl.h src/vm-state.h + src/weakmap.js src/win32-headers.h src/x64/assembler-x64-inl.h src/x64/assembler-x64.cc @@ -478,6 +482,7 @@ test/cctest/test-debug.cc test/cctest/test-decls.cc test/cctest/test-deoptimization.cc + test/cctest/test-dictionary.cc test/cctest/test-disasm-arm.cc test/cctest/test-disasm-ia32.cc test/cctest/test-disasm-mips.cc @@ -501,6 +506,7 @@ test/cctest/test-threads.cc test/cctest/test-utils.cc test/cctest/test-version.cc + test/cctest/test-weakmaps.cc test/cctest/testcfg.py test/es5conform/es5conform.status test/es5conform/harness-adapt.js @@ -591,6 +597,7 @@ test/mjsunit/bugs/bug-617.js test/mjsunit/bugs/bug-618.js test/mjsunit/bugs/bug-941049.js + test/mjsunit/bugs/harmony/debug-blockscopes.js test/mjsunit/call-non-function-call.js test/mjsunit/call-non-function.js test/mjsunit/call-stub.js @@ -664,6 +671,7 @@ test/mjsunit/compiler/regress-6.js test/mjsunit/compiler/regress-7.js test/mjsunit/compiler/regress-8.js + test/mjsunit/compiler/regress-96989.js test/mjsunit/compiler/regress-arguments.js test/mjsunit/compiler/regress-arrayliteral.js test/mjsunit/compiler/regress-closures-with-eval.js @@ -832,7 +840,16 @@ test/mjsunit/global-vars-eval.js test/mjsunit/global-vars-with.js test/mjsunit/greedy.js + test/mjsunit/harmony/block-lazy-compile.js + test/mjsunit/harmony/block-let-crankshaft.js + test/mjsunit/harmony/block-let-declaration.js + test/mjsunit/harmony/block-let-semantics.js + test/mjsunit/harmony/block-scoping.js + test/mjsunit/harmony/debug-blockscopes.js + test/mjsunit/harmony/debug-evaluate-blockscopes.js test/mjsunit/harmony/proxies.js + test/mjsunit/harmony/typeof.js + test/mjsunit/harmony/weakmaps.js test/mjsunit/has-own-property.js test/mjsunit/hex-parsing.js test/mjsunit/html-comments.js @@ -938,10 +955,12 @@ test/mjsunit/regexp-string-methods.js test/mjsunit/regexp.js test/mjsunit/regress/bitops-register-alias.js + test/mjsunit/regress/regress-100409.js test/mjsunit/regress/regress-1015.js test/mjsunit/regress/regress-1017.js test/mjsunit/regress/regress-1020.js test/mjsunit/regress/regress-1030466.js + test/mjsunit/regress/regress-103259.js test/mjsunit/regress/regress-1036894.js test/mjsunit/regress/regress-1039610.js test/mjsunit/regress/regress-1050043.js @@ -1043,6 +1062,7 @@ test/mjsunit/regress/regress-1401.js test/mjsunit/regress/regress-1403.js test/mjsunit/regress/regress-1412.js + test/mjsunit/regress/regress-1419.js test/mjsunit/regress/regress-1423.js test/mjsunit/regress/regress-1434.js test/mjsunit/regress/regress-1436.js @@ -1059,11 +1079,16 @@ test/mjsunit/regress/regress-1528.js test/mjsunit/regress/regress-1529.js test/mjsunit/regress/regress-1531.js + test/mjsunit/regress/regress-1546.js test/mjsunit/regress/regress-155924.js test/mjsunit/regress/regress-1560.js + test/mjsunit/regress/regress-1563.js test/mjsunit/regress/regress-1582.js test/mjsunit/regress/regress-1583.js + test/mjsunit/regress/regress-1586.js test/mjsunit/regress/regress-1592.js + test/mjsunit/regress/regress-1620.js + test/mjsunit/regress/regress-1625.js test/mjsunit/regress/regress-1647.js test/mjsunit/regress/regress-1650.js test/mjsunit/regress/regress-171.js @@ -1215,6 +1240,9 @@ test/mjsunit/regress/regress-900055.js test/mjsunit/regress/regress-900966.js test/mjsunit/regress/regress-91.js + test/mjsunit/regress/regress-91008.js + test/mjsunit/regress/regress-91010.js + test/mjsunit/regress/regress-91013.js test/mjsunit/regress/regress-91120.js test/mjsunit/regress/regress-91517.js test/mjsunit/regress/regress-91787.js @@ -1225,13 +1253,18 @@ test/mjsunit/regress/regress-937896.js test/mjsunit/regress/regress-944.js test/mjsunit/regress/regress-94425.js + test/mjsunit/regress/regress-95113.js + test/mjsunit/regress/regress-95485.js test/mjsunit/regress/regress-955.js test/mjsunit/regress/regress-95920.js test/mjsunit/regress/regress-962.js + test/mjsunit/regress/regress-96523.js test/mjsunit/regress/regress-969.js test/mjsunit/regress/regress-974.js test/mjsunit/regress/regress-982.js + test/mjsunit/regress/regress-98773.js test/mjsunit/regress/regress-990205.js + test/mjsunit/regress/regress-99167.js test/mjsunit/regress/regress-992.js test/mjsunit/regress/regress-992733.js test/mjsunit/regress/regress-995.js @@ -1259,6 +1292,7 @@ test/mjsunit/regress/splice-missing-wb.js test/mjsunit/samevalue.js test/mjsunit/scanner.js + test/mjsunit/scope-calls-eval.js test/mjsunit/search-string-multiple.js test/mjsunit/setter-on-constructor-prototype.js test/mjsunit/shifts.js @@ -1297,6 +1331,8 @@ test/mjsunit/string-replace-with-empty.js test/mjsunit/string-replace.js test/mjsunit/string-search.js + test/mjsunit/string-slices-regexp.js + test/mjsunit/string-slices.js test/mjsunit/string-split-cache.js test/mjsunit/string-split.js test/mjsunit/substr.js @@ -1394,6 +1430,7 @@ tools/run-valgrind.py tools/splaytree.js tools/stats-viewer.py + tools/test-wrapper-gypbuild.py tools/test.py tools/tickprocessor-driver.js tools/tickprocessor.js @@ -1432,11 +1469,14 @@ debian/libv8-dev.install debian/libv8.dirs debian/libv8.install - debian/patches/0003-add-the-install-target.patch - debian/patches/0004-gcc-opt-level-2.patch debian/patches/0005-enable-i18n-extension.patch - debian/patches/0006-gcc46-compat.patch - debian/patches/0007_armhf.patch + debian/patches/0008_mksnapshot_stdout.patch + debian/patches/0009_unaligned_access_armel.patch + debian/patches/0010_fix_arm_bug.patch + debian/patches/0011_use_system_gyp.patch + debian/patches/0012_make_check_testsuites.patch + debian/patches/0013_enable_soname.patch + debian/patches/0014_disable_armv7_defaults.patch debian/patches/series debian/rules debian/source/format @@ -1545,6 +1585,7 @@ Files: LICENSE.valgrind src/third_party/valgrind/valgrind.h Copyright: 2000-2007, Julian Seward. + 2000-2010, Julian Seward. License: BSD (3 clause) FIXME @@ -1655,7 +1696,7 @@ FIXME Files: build/all.gyp -Copyright: 2010, The Chromium Authors. +Copyright: 2011, The Chromium Authors. License: UNKNOWN FIXME diff -Nru libv8-3.4.14.21/debian/gbp.conf libv8-3.5.10.24/debian/gbp.conf --- libv8-3.4.14.21/debian/gbp.conf 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/gbp.conf 2011-12-02 13:26:53.000000000 +0000 @@ -1,10 +1,10 @@ [DEFAULT] # master and upstream branches are uploaded to unstable -upstream-branch = upstream-3.4 -debian-branch = master-3.4 +upstream-branch = upstream +debian-branch = master # there are separate branches for stable and experimental -# upstream-branch = upstream- -# debian-branch = master- +#upstream-branch = upstream-experimental +#debian-branch = master-experimental pristine-tar = True sign-tags = True diff -Nru libv8-3.4.14.21/debian/libv8-dev.install libv8-3.5.10.24/debian/libv8-dev.install --- libv8-3.4.14.21/debian/libv8-dev.install 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/libv8-dev.install 2011-11-16 08:34:30.000000000 +0000 @@ -1,2 +1,3 @@ -debian/tmp/usr/include -debian/tmp/usr/lib/libv8.so +include usr +src/extensions/experimental/i18n-extension.h usr/include +debian/tmp/usr/lib/libv8.so usr/lib diff -Nru libv8-3.4.14.21/debian/libv8.install libv8-3.5.10.24/debian/libv8.install --- libv8-3.4.14.21/debian/libv8.install 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/libv8.install 2011-11-16 08:34:30.000000000 +0000 @@ -1 +1 @@ -usr/lib/lib*.so.* +libv8.so.* usr/lib diff -Nru libv8-3.4.14.21/debian/patches/0003-add-the-install-target.patch libv8-3.5.10.24/debian/patches/0003-add-the-install-target.patch --- libv8-3.4.14.21/debian/patches/0003-add-the-install-target.patch 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/patches/0003-add-the-install-target.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,43 +0,0 @@ -Description: add the install target - Noteworthy : Here are defined the headers for libv8-dev -Forwarded: not-needed -Author: Antonio Radici -Last-Update: 2011-01-20 - ---- a/SConstruct -+++ b/SConstruct -@@ -1063,6 +1063,7 @@ - result.Add('mode', 'compilation mode (debug, release)', 'release') - result.Add('sample', 'build sample (shell, process, lineprocessor)', '') - result.Add('cache', 'directory to use for scons build cache', '') -+ result.Add('install', 'install result files', '') - result.Add('env', 'override environment settings (NAME0:value0,NAME1:value1,...)', '') - result.Add('importenv', 'import environment settings (NAME0,NAME1,...)', '') - AddOptions(PLATFORM_OPTIONS, result) -@@ -1463,8 +1464,26 @@ - env.Alias('d8', d8s) - env.Alias('preparser', preparsers) - -+ install_dirs = [] -+ destdir = ARGUMENTS.get('DESTDIR', '') -+ prefix = ARGUMENTS.get('PREFIX', '/usr') -+ libdir = [ destdir + prefix + '/lib/' ] -+ incdir = [ destdir + prefix + '/include/' ] -+ for lib in libraries: -+ env.Install(libdir, lib[0]) -+ if dir not in install_dirs: -+ install_dirs += libdir -+ env.Install(incdir, [ 'include/v8.h', 'include/v8-debug.h', 'include/v8-profiler.h', 'include/v8-preparser.h', 'include/v8-testing.h', 'include/v8stdint.h' ]); -+ install_dirs += incdir -+ env.Alias('install', install_dirs) -+ -+ # Always build at least the library -+ env.Default('library') -+ - if env['sample']: - env.Default('sample') -+ elif env['install']: -+ env.Default('install') - else: - env.Default('library') - diff -Nru libv8-3.4.14.21/debian/patches/0004-gcc-opt-level-2.patch libv8-3.5.10.24/debian/patches/0004-gcc-opt-level-2.patch --- libv8-3.4.14.21/debian/patches/0004-gcc-opt-level-2.patch 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/patches/0004-gcc-opt-level-2.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -Description: Policy asks for gcc -02 -Forwarded: not-needed -Author: Jérémy Lal -Last-Update: 2011-02-16 ---- a/SConstruct -+++ b/SConstruct -@@ -94,7 +94,7 @@ - 'CPPDEFINES': ['ENABLE_DISASSEMBLER', 'DEBUG'], - }, - 'mode:release': { -- 'CCFLAGS': ['-O3', '-fomit-frame-pointer', '-fdata-sections', -+ 'CCFLAGS': ['-O2', '-fomit-frame-pointer', '-fdata-sections', - '-ffunction-sections'], - }, - 'os:linux': { diff -Nru libv8-3.4.14.21/debian/patches/0005-enable-i18n-extension.patch libv8-3.5.10.24/debian/patches/0005-enable-i18n-extension.patch --- libv8-3.4.14.21/debian/patches/0005-enable-i18n-extension.patch 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/patches/0005-enable-i18n-extension.patch 2011-12-02 13:26:53.000000000 +0000 @@ -1,97 +1,26 @@ Description: Enable the i18n extension needed by chromium 11 - * src/SConscript is modified to mimic what src/extension/experimental/experimental.gyp does. - * in SConstruct, -I. is added (root_dir to CPPPATH) because the extension/experimental files are relative to it. + * tools/gyp/v8.gyp is modified to mimic what src/extension/experimental/experimental.gyp does. + * internalize src/extension/experimental paths, remove src/ and include/ * silence a gcc warning about comparison of signed and unsigned integers in language-matcher.cc * fix include of "v8.h" in i18n-extension.h (Closes: #639522) - * do not put I18NExtension in v8::internal namespace. (Closes: #639596) + * do not put I18NExtension in v8::internal namespace, use v8_i18n (same name as the externalized version) (Closes: #639596) Forwarded: not-needed Author: Giuseppe Iuculano , Jérémy Lal -Last-Update: 2011-09-04 ---- a/src/SConscript -+++ b/src/SConscript -@@ -134,6 +134,15 @@ - zone.cc - extensions/gc-extension.cc - extensions/externalize-string-extension.cc -+ extensions/experimental/i18n-extension.cc -+ extensions/experimental/break-iterator.cc -+ extensions/experimental/collator.cc -+ extensions/experimental/datetime-format.cc -+ extensions/experimental/i18n-locale.cc -+ extensions/experimental/i18n-utils.cc -+ extensions/experimental/language-matcher.cc -+ extensions/experimental/number-format.cc -+ extensions/experimental/i18n-js.cc - """), - 'arch:arm': Split(""" - arm/builtins-arm.cc -@@ -320,6 +329,7 @@ - env.Replace(**context.flags['v8']) - context.ApplyEnvOverrides(env) - env['BUILDERS']['JS2C'] = Builder(action=js2c.JS2C) -+ env['BUILDERS']['I18NJS2C'] = Builder(action='python src/extensions/experimental/i18n-js2c.py $TARGET $SOURCE') - env['BUILDERS']['Snapshot'] = Builder(action='$SOURCE $TARGET --logfile "$LOGFILE" --log-snapshot-positions') - - def BuildJS2CEnv(type): -@@ -340,6 +350,8 @@ - d8_objs = [d8_js_obj] - d8_objs.append(context.ConfigureObject(env, [d8_files])) - -+ env.I18NJS2C('extensions/experimental/i18n-js.cc', 'extensions/experimental/i18n.js', **BuildJS2CEnv('I18N')) -+ - # Combine the JavaScript library files into a single C++ file and - # compile it. - library_files = [s for s in LIBRARY_FILES] ---- a/SConstruct -+++ b/SConstruct -@@ -53,7 +53,7 @@ - - LIBRARY_FLAGS = { - 'all': { -- 'CPPPATH': [join(root_dir, 'src')], -+ 'CPPPATH': [join(root_dir, 'src'), root_dir], - 'regexp:interpreted': { - 'CPPDEFINES': ['V8_INTERPRETED_REGEXP'] - }, -@@ -101,7 +101,7 @@ - 'CCFLAGS': ['-ansi'] + GCC_EXTRA_CCFLAGS, - 'library:shared': { - 'CPPDEFINES': ['V8_SHARED'], -- 'LIBS': ['pthread'] -+ 'LIBS': ['pthread', 'icui18n', 'icuuc', 'icudata'] - } - }, - 'os:macos': { -@@ -340,7 +340,7 @@ - MKSNAPSHOT_EXTRA_FLAGS = { - 'gcc': { - 'os:linux': { -- 'LIBS': ['pthread'], -+ 'LIBS': ['pthread', 'icui18n', 'icuuc', 'icudata'], - }, - 'os:macos': { - 'LIBS': ['pthread'], -@@ -457,7 +457,7 @@ - 'LINKFLAGS': ['$CCFLAGS'], - }, - 'os:linux': { -- 'LIBS': ['pthread'], -+ 'LIBS': ['pthread', 'icui18n', 'icuuc', 'icudata'], - }, - 'os:macos': { - 'LIBS': ['pthread'], -@@ -1473,7 +1473,7 @@ - env.Install(libdir, lib[0]) - if dir not in install_dirs: - install_dirs += libdir -- env.Install(incdir, [ 'include/v8.h', 'include/v8-debug.h', 'include/v8-profiler.h', 'include/v8-preparser.h', 'include/v8-testing.h', 'include/v8stdint.h' ]); -+ env.Install(incdir, [ 'include/v8.h', 'include/v8-debug.h', 'include/v8-profiler.h', 'include/v8-preparser.h', 'include/v8-testing.h', 'include/v8stdint.h', 'src/extensions/experimental/i18n-extension.h' ]); - install_dirs += incdir - env.Alias('install', install_dirs) - +Last-Update: 2011-10-25 --- a/src/extensions/experimental/language-matcher.cc +++ b/src/extensions/experimental/language-matcher.cc -@@ -37,13 +37,12 @@ +@@ -28,22 +28,21 @@ + // TODO(cira): Remove LanguageMatcher from v8 when ICU implements + // language matching API. + +-#include "src/extensions/experimental/language-matcher.h" ++#include "extensions/experimental/language-matcher.h" + + #include + +-#include "src/extensions/experimental/i18n-utils.h" ++#include "extensions/experimental/i18n-utils.h" + #include "unicode/datefmt.h" // For getAvailableLocales #include "unicode/locid.h" #include "unicode/uloc.h" @@ -115,7 +44,12 @@ +} // namespace v8_i18n --- a/src/extensions/experimental/language-matcher.h +++ b/src/extensions/experimental/language-matcher.h -@@ -32,8 +32,7 @@ +@@ -28,12 +28,11 @@ + #ifndef V8_EXTENSIONS_EXPERIMENTAL_LANGUAGE_MATCHER_H_ + #define V8_EXTENSIONS_EXPERIMENTAL_LANGUAGE_MATCHER_H_ + +-#include "include/v8.h" ++#include "v8.h" #include "unicode/uloc.h" @@ -185,6 +119,15 @@ #endif // V8_EXTENSIONS_EXPERIMENTAL_I18N_EXTENSION_H_ --- a/src/extensions/experimental/break-iterator.cc +++ b/src/extensions/experimental/break-iterator.cc +@@ -25,7 +25,7 @@ + // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-#include "src/extensions/experimental/break-iterator.h" ++#include "extensions/experimental/break-iterator.h" + + #include + @@ -33,8 +33,7 @@ #include "unicode/locid.h" #include "unicode/rbbi.h" @@ -203,6 +146,15 @@ +} // namespace v8_i18n --- a/src/extensions/experimental/break-iterator.h +++ b/src/extensions/experimental/break-iterator.h +@@ -28,7 +28,7 @@ + #ifndef V8_EXTENSIONS_EXPERIMENTAL_BREAK_ITERATOR_H_ + #define V8_EXTENSIONS_EXPERIMENTAL_BREAK_ITERATOR_H_ + +-#include "include/v8.h" ++#include "v8.h" + + #include "unicode/uversion.h" + @@ -37,8 +37,7 @@ class UnicodeString; } @@ -223,7 +175,14 @@ #endif // V8_EXTENSIONS_EXPERIMENTAL_BREAK_ITERATOR_H_ --- a/src/extensions/experimental/collator.cc +++ b/src/extensions/experimental/collator.cc -@@ -31,8 +31,7 @@ +@@ -25,14 +25,13 @@ + // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-#include "src/extensions/experimental/collator.h" ++#include "extensions/experimental/collator.h" + + #include "unicode/coll.h" #include "unicode/locid.h" #include "unicode/ucol.h" @@ -241,6 +200,15 @@ +} // namespace v8_i18n --- a/src/extensions/experimental/collator.h +++ b/src/extensions/experimental/collator.h +@@ -28,7 +28,7 @@ + #ifndef V8_EXTENSIONS_EXPERIMENTAL_COLLATOR_H + #define V8_EXTENSIONS_EXPERIMENTAL_COLLATOR_H_ + +-#include "include/v8.h" ++#include "v8.h" + + #include "unicode/uversion.h" + @@ -37,8 +37,7 @@ class UnicodeString; } @@ -261,7 +229,19 @@ #endif // V8_EXTENSIONS_EXPERIMENTAL_COLLATOR --- a/src/extensions/experimental/datetime-format.cc +++ b/src/extensions/experimental/datetime-format.cc -@@ -35,8 +35,7 @@ +@@ -25,18 +25,17 @@ + // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-#include "src/extensions/experimental/datetime-format.h" ++#include "extensions/experimental/datetime-format.h" + + #include + +-#include "src/extensions/experimental/i18n-utils.h" ++#include "extensions/experimental/i18n-utils.h" + #include "unicode/dtfmtsym.h" + #include "unicode/dtptngen.h" #include "unicode/locid.h" #include "unicode/smpdtfmt.h" @@ -279,6 +259,15 @@ +} // namespace v8_i18n --- a/src/extensions/experimental/datetime-format.h +++ b/src/extensions/experimental/datetime-format.h +@@ -28,7 +28,7 @@ + #ifndef V8_EXTENSIONS_EXPERIMENTAL_DATETIME_FORMAT_H_ + #define V8_EXTENSIONS_EXPERIMENTAL_DATETIME_FORMAT_H_ + +-#include "include/v8.h" ++#include "v8.h" + + #include "unicode/uversion.h" + @@ -36,8 +36,7 @@ class SimpleDateFormat; } @@ -299,9 +288,25 @@ #endif // V8_EXTENSIONS_EXPERIMENTAL_DATETIME_FORMAT_H_ --- a/src/extensions/experimental/i18n-extension.cc +++ b/src/extensions/experimental/i18n-extension.cc -@@ -34,8 +34,7 @@ - #include "src/extensions/experimental/i18n-natives.h" - #include "src/extensions/experimental/number-format.h" +@@ -25,17 +25,16 @@ + // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-#include "src/extensions/experimental/i18n-extension.h" ++#include "extensions/experimental/i18n-extension.h" + +-#include "src/extensions/experimental/break-iterator.h" +-#include "src/extensions/experimental/collator.h" +-#include "src/extensions/experimental/datetime-format.h" +-#include "src/extensions/experimental/i18n-locale.h" +-#include "src/extensions/experimental/i18n-natives.h" +-#include "src/extensions/experimental/number-format.h" ++#include "extensions/experimental/break-iterator.h" ++#include "extensions/experimental/collator.h" ++#include "extensions/experimental/datetime-format.h" ++#include "extensions/experimental/i18n-locale.h" ++#include "extensions/experimental/i18n-natives.h" ++#include "extensions/experimental/number-format.h" -namespace v8 { -namespace internal { @@ -317,7 +322,17 @@ +} // namespace v8_i18n --- a/src/extensions/experimental/i18n-locale.cc +++ b/src/extensions/experimental/i18n-locale.cc -@@ -32,8 +32,7 @@ +@@ -25,15 +25,14 @@ + // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-#include "src/extensions/experimental/i18n-locale.h" ++#include "extensions/experimental/i18n-locale.h" + +-#include "src/extensions/experimental/i18n-utils.h" +-#include "src/extensions/experimental/language-matcher.h" ++#include "extensions/experimental/i18n-utils.h" ++#include "extensions/experimental/language-matcher.h" #include "unicode/locid.h" #include "unicode/uloc.h" @@ -335,9 +350,12 @@ +} // namespace v8_i18n --- a/src/extensions/experimental/i18n-locale.h +++ b/src/extensions/experimental/i18n-locale.h -@@ -30,8 +30,7 @@ +@@ -28,10 +28,9 @@ + #ifndef V8_EXTENSIONS_EXPERIMENTAL_I18N_LOCALE_H_ + #define V8_EXTENSIONS_EXPERIMENTAL_I18N_LOCALE_H_ - #include "include/v8.h" +-#include "include/v8.h" ++#include "v8.h" -namespace v8 { -namespace internal { @@ -375,7 +393,14 @@ #endif // V8_EXTENSIONS_EXPERIMENTAL_I18N_NATIVES_H_ --- a/src/extensions/experimental/i18n-utils.cc +++ b/src/extensions/experimental/i18n-utils.cc -@@ -31,8 +31,7 @@ +@@ -25,14 +25,13 @@ + // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-#include "src/extensions/experimental/i18n-utils.h" ++#include "extensions/experimental/i18n-utils.h" + + #include #include "unicode/unistr.h" @@ -393,6 +418,15 @@ +} // namespace v8_i18n --- a/src/extensions/experimental/i18n-utils.h +++ b/src/extensions/experimental/i18n-utils.h +@@ -28,7 +28,7 @@ + #ifndef V8_EXTENSIONS_EXPERIMENTAL_I18N_UTILS_H_ + #define V8_EXTENSIONS_EXPERIMENTAL_I18N_UTILS_H_ + +-#include "include/v8.h" ++#include "v8.h" + + #include "unicode/uversion.h" + @@ -36,8 +36,7 @@ class UnicodeString; } @@ -413,9 +447,23 @@ #endif // V8_EXTENSIONS_EXPERIMENTAL_I18N_UTILS_H_ --- a/src/extensions/experimental/number-format.cc +++ b/src/extensions/experimental/number-format.cc -@@ -37,8 +37,7 @@ - #include "unicode/uchar.h" - #include "unicode/ucurr.h" +@@ -25,11 +25,11 @@ + // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-#include "src/extensions/experimental/number-format.h" ++#include "extensions/experimental/number-format.h" + + #include + +-#include "src/extensions/experimental/i18n-utils.h" ++#include "extensions/experimental/i18n-utils.h" + #include "unicode/dcfmtsym.h" + #include "unicode/decimfmt.h" + #include "unicode/locid.h" +@@ -39,8 +39,7 @@ + #include "unicode/unum.h" + #include "unicode/uversion.h" -namespace v8 { -namespace internal { @@ -423,7 +471,7 @@ const int NumberFormat::kCurrencyCodeLength = 4; -@@ -353,4 +352,4 @@ +@@ -371,4 +370,4 @@ "that is not a NumberFormat."))); } @@ -431,6 +479,15 @@ +} // namespace v8_i18n --- a/src/extensions/experimental/number-format.h +++ b/src/extensions/experimental/number-format.h +@@ -28,7 +28,7 @@ + #ifndef V8_EXTENSIONS_EXPERIMENTAL_NUMBER_FORMAT_H_ + #define V8_EXTENSIONS_EXPERIMENTAL_NUMBER_FORMAT_H_ + +-#include "include/v8.h" ++#include "v8.h" + + #include "unicode/uversion.h" + @@ -36,8 +36,7 @@ class DecimalFormat; } @@ -451,9 +508,12 @@ #endif // V8_EXTENSIONS_EXPERIMENTAL_NUMBER_FORMAT_H_ --- a/src/extensions/experimental/i18n-js2c.py +++ b/src/extensions/experimental/i18n-js2c.py -@@ -87,8 +87,7 @@ +@@ -85,10 +85,9 @@ + // want to make changes to this file you should either change the + // javascript source files or the i18n-js2c.py script. - #include "src/extensions/experimental/i18n-natives.h" +-#include "src/extensions/experimental/i18n-natives.h" ++#include "extensions/experimental/i18n-natives.h" -namespace v8 { -namespace internal { @@ -471,3 +531,132 @@ """ +--- a/tools/gyp/v8.gyp ++++ b/tools/gyp/v8.gyp +@@ -89,10 +89,10 @@ + 'conditions': [ + ['want_separate_host_toolset==1', { + 'toolsets': ['host', 'target'], +- 'dependencies': ['mksnapshot#host', 'js2c#host'], ++ 'dependencies': ['mksnapshot#host', 'js2c#host', 'js2c_i18n#host'], + }, { + 'toolsets': ['target'], +- 'dependencies': ['mksnapshot', 'js2c'], ++ 'dependencies': ['mksnapshot', 'js2c', 'js2c_i18n'], + }], + ['component=="shared_library"', { + 'conditions': [ +@@ -127,6 +127,7 @@ + 'sources': [ + '<(SHARED_INTERMEDIATE_DIR)/libraries.cc', + '<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc', ++ '<(SHARED_INTERMEDIATE_DIR)/i18n-js.cc', + '<(INTERMEDIATE_DIR)/snapshot.cc', + ], + 'actions': [ +@@ -197,15 +198,16 @@ + 'sources': [ + '<(SHARED_INTERMEDIATE_DIR)/libraries.cc', + '<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc', ++ '<(SHARED_INTERMEDIATE_DIR)/i18n-js.cc', + '../../src/snapshot-empty.cc', + ], + 'conditions': [ + ['want_separate_host_toolset==1', { + 'toolsets': ['host', 'target'], +- 'dependencies': ['js2c#host'], ++ 'dependencies': ['js2c#host', 'js2c_i18n#host'], + }, { + 'toolsets': ['target'], +- 'dependencies': ['js2c'], ++ 'dependencies': ['js2c', 'js2c_i18n'], + }], + ['component=="shared_library"', { + 'defines': [ +@@ -221,6 +223,11 @@ + 'include_dirs+': [ + '../../src', + ], ++ 'link_settings': { ++ 'libraries': [ ++ '-licui18n', '-licuuc', ++ ], ++ }, + 'sources': [ + '../../src/accessors.cc', + '../../src/accessors.h', +@@ -470,6 +477,23 @@ + '../../src/extensions/externalize-string-extension.h', + '../../src/extensions/gc-extension.cc', + '../../src/extensions/gc-extension.h', ++ '../../src/extensions/experimental/break-iterator.cc', ++ '../../src/extensions/experimental/break-iterator.h', ++ '../../src/extensions/experimental/collator.cc', ++ '../../src/extensions/experimental/collator.h', ++ '../../src/extensions/experimental/datetime-format.cc', ++ '../../src/extensions/experimental/datetime-format.h', ++ '../../src/extensions/experimental/i18n-extension.cc', ++ '../../src/extensions/experimental/i18n-extension.h', ++ '../../src/extensions/experimental/i18n-locale.cc', ++ '../../src/extensions/experimental/i18n-locale.h', ++ '../../src/extensions/experimental/i18n-natives.h', ++ '../../src/extensions/experimental/i18n-utils.cc', ++ '../../src/extensions/experimental/i18n-utils.h', ++ '../../src/extensions/experimental/language-matcher.cc', ++ '../../src/extensions/experimental/language-matcher.h', ++ '../../src/extensions/experimental/number-format.cc', ++ '../../src/extensions/experimental/number-format.h', + ], + 'conditions': [ + ['want_separate_host_toolset==1', { +@@ -731,6 +755,40 @@ + ], + }, + { ++ 'target_name': 'js2c_i18n', ++ 'type': 'none', ++ 'conditions': [ ++ ['want_separate_host_toolset==1', { ++ 'toolsets': ['host'], ++ }, { ++ 'toolsets': ['target'], ++ }], ++ ], ++ 'variables': { ++ 'js_files': [ ++ '../../src/extensions/experimental/i18n.js' ++ ], ++ }, ++ 'actions': [ ++ { ++ 'action_name': 'js2c_i18n', ++ 'inputs': [ ++ '../../src/extensions/experimental/i18n-js2c.py', ++ '<@(js_files)', ++ ], ++ 'outputs': [ ++ '<(SHARED_INTERMEDIATE_DIR)/i18n-js.cc', ++ ], ++ 'action': [ ++ 'python', ++ '../../src/extensions/experimental/i18n-js2c.py', ++ '<@(_outputs)', ++ '<@(js_files)' ++ ], ++ }, ++ ], ++ }, ++ { + 'target_name': 'mksnapshot', + 'type': 'executable', + 'dependencies': [ +@@ -779,6 +837,9 @@ + '-lbz2', + ]}], + ], ++ 'libraries': [ ++ '-licui18n', '-licuuc', '-licudata', ++ ], + }, + { + 'target_name': 'preparser_lib', diff -Nru libv8-3.4.14.21/debian/patches/0006-gcc46-compat.patch libv8-3.5.10.24/debian/patches/0006-gcc46-compat.patch --- libv8-3.4.14.21/debian/patches/0006-gcc46-compat.patch 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/patches/0006-gcc46-compat.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -Description: GCC 4.6 warns about unused-but-set-variable -Bug-V8: http://code.google.com/p/v8/issues/detail?id=1291 -Author: Jérémy Lal -Last-Update: 2011-07-16 ---- a/SConstruct -+++ b/SConstruct -@@ -285,6 +285,7 @@ - '-Werror', - '-W', - '-Wno-unused-parameter', -+ '-Wno-unused-but-set-variable', - '-Wnon-virtual-dtor'] - }, - 'os:win32': { diff -Nru libv8-3.4.14.21/debian/patches/0007_armhf.patch libv8-3.5.10.24/debian/patches/0007_armhf.patch --- libv8-3.4.14.21/debian/patches/0007_armhf.patch 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/patches/0007_armhf.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,58 +0,0 @@ -Description: armhf built with armeabi:hard, armel built with armeabi:softfp -Bug-Debian: http://bugs.debian.org/636370 -Author: Konstantinos Margaritis -Last-Update: 2011-09-16 - ---- a/SConstruct -+++ b/SConstruct -@@ -885,6 +885,10 @@ - return 'default' - - -+def GuessARMEABI(env): -+ eabi = env['armeabi']; -+ return eabi -+ - PLATFORM_OPTIONS = { - 'arch': { - 'values': ['arm', 'ia32', 'x64', 'mips'], -@@ -900,7 +904,12 @@ - 'values': ['gcc', 'msvc'], - 'guess': GuessToolchain, - 'help': 'the toolchain to use' -- } -+ }, -+ 'armeabi': { -+ 'values': ['hard', 'softfp', 'soft'], -+ 'default': GuessARMEABI, -+ 'help': 'generate calling conventiont according to selected ARM EABI variant' -+ }, - } - - SIMPLE_OPTIONS = { -@@ -1020,11 +1029,11 @@ - 'default': 'off', - 'help': 'select profile guided optimization variant', - }, -- 'armeabi': { -- 'values': ['hard', 'softfp', 'soft'], -- 'default': 'softfp', -- 'help': 'generate calling conventiont according to selected ARM EABI variant' -- }, -+# 'armeabi': { -+# 'values': ['hard', 'softfp', 'soft'], -+# 'guess': GuessARMEABI, -+# 'help': 'generate calling conventiont according to selected ARM EABI variant' -+# }, - 'mipsabi': { - 'values': ['hardfloat', 'softfloat', 'none'], - 'default': 'hardfloat', -@@ -1165,6 +1174,8 @@ - Abort("Option unalignedaccesses only supported for the ARM architecture.") - if env['os'] != 'linux' and env['compress_startup_data'] != 'off': - Abort("Startup data compression is only available on Linux") -+ if (env['arch'] == 'arm' and not (env['armeabi'] in ('hard', 'softfp', 'soft'))): -+ Abort("Invalid EABI set for arm, use one of hard, softfp, soft."); - for (name, option) in ALL_OPTIONS.iteritems(): - if (not name in env): - message = ("A value for option %s must be specified (%s)." % diff -Nru libv8-3.4.14.21/debian/patches/0008_mksnapshot_stdout.patch libv8-3.5.10.24/debian/patches/0008_mksnapshot_stdout.patch --- libv8-3.4.14.21/debian/patches/0008_mksnapshot_stdout.patch 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/patches/0008_mksnapshot_stdout.patch 2011-12-02 13:26:53.000000000 +0000 @@ -2,15 +2,15 @@ armel builds typically fail at mksnapshot, for which it is useful to be able to get the actual log. Forwarded: not-needed Author: Jérémy Lal -Last-Update: 2011-09-28 ---- a/src/SConscript -+++ b/src/SConscript -@@ -388,7 +388,7 @@ - mksnapshot = mksnapshot_env.Program('mksnapshot', [mksnapshot_src, libraries_obj, experimental_libraries_obj, non_snapshot_files, empty_snapshot_obj], PDB='mksnapshot.exe.pdb') - if context.use_snapshot: - if context.build_snapshot: -- snapshot_cc = env.Snapshot('snapshot.cc', mksnapshot, LOGFILE=File('snapshot.log').abspath) -+ snapshot_cc = env.Snapshot('snapshot.cc', mksnapshot, LOGFILE='-') - else: - snapshot_cc = 'snapshot.cc' - snapshot_obj = context.ConfigureObject(env, snapshot_cc, CPPPATH=['.']) +Last-Update: 2011-10-25 +--- a/tools/gyp/v8.gyp ++++ b/tools/gyp/v8.gyp +@@ -142,7 +142,7 @@ + 'variables': { + 'mksnapshot_flags': [ + '--log-snapshot-positions', +- '--logfile', '<(INTERMEDIATE_DIR)/snapshot.log', ++ '--logfile', '-', + ], + }, + 'conditions': [ diff -Nru libv8-3.4.14.21/debian/patches/0010_fix_arm_bug.patch libv8-3.5.10.24/debian/patches/0010_fix_arm_bug.patch --- libv8-3.4.14.21/debian/patches/0010_fix_arm_bug.patch 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/patches/0010_fix_arm_bug.patch 2011-12-02 13:26:53.000000000 +0000 @@ -5,7 +5,7 @@ Last-Update: 2011-10-21 --- a/src/arm/macro-assembler-arm.cc +++ b/src/arm/macro-assembler-arm.cc -@@ -2907,8 +2907,8 @@ +@@ -2901,8 +2901,8 @@ #ifdef CAN_USE_ARMV5_INSTRUCTIONS clz(zeros, source); // This instruction is only supported after ARM5. #else diff -Nru libv8-3.4.14.21/debian/patches/0011_fix_icu4.8_compatiblity.patch libv8-3.5.10.24/debian/patches/0011_fix_icu4.8_compatiblity.patch --- libv8-3.4.14.21/debian/patches/0011_fix_icu4.8_compatiblity.patch 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/patches/0011_fix_icu4.8_compatiblity.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,62 +0,0 @@ -Description: Fix compatiblity with ICU 4.8. - ICU 4.6 NumberFormat::EStyle enum is gone in ICU 4.8. Adding #if #else to - make it work across versions. - . - There was an ICU cleanup (removing all C++ style enums and replacing them - with plain C enums). NumberFormat::EStyle had a draft status (from ICU 4.2) - and it was cut. Confusion came up because DateFormat::EStyle is stable and - won't be removed. - . - This patch fixes compatiblity with ICU 4.8. -Origin: upstream, http://code.google.com/p/v8/source/detail?r=8701 -Bug: http://code.google.com/p/v8/issues/detail?id=1561 -Bug-Debian: http://bugs.debian.org/648506 -Last-Update: 2011-11-15 ---- a/src/extensions/experimental/number-format.cc -+++ b/src/extensions/experimental/number-format.cc -@@ -36,6 +36,8 @@ - #include "unicode/numfmt.h" - #include "unicode/uchar.h" - #include "unicode/ucurr.h" -+#include "unicode/unum.h" -+#include "unicode/uversion.h" - - namespace v8_i18n { - -@@ -230,6 +232,8 @@ - } - - // Generates ICU number format pattern from given skeleton. -+// TODO(cira): Remove once ICU includes equivalent method -+// (see http://bugs.icu-project.org/trac/ticket/8610). - static icu::DecimalFormat* CreateFormatterFromSkeleton( - const icu::Locale& icu_locale, - const icu::UnicodeString& skeleton, -@@ -250,6 +254,7 @@ - // Case of non-consecutive U+00A4 is taken care of in i18n.js. - int32_t end_index = skeleton.lastIndexOf(currency_symbol, index); - -+#if (U_ICU_VERSION_MAJOR_NUM == 4) && (U_ICU_VERSION_MINOR_NUM <= 6) - icu::NumberFormat::EStyles style; - switch (end_index - index) { - case 0: -@@ -261,6 +266,19 @@ - default: - style = icu::NumberFormat::kPluralCurrencyStyle; - } -+#else // ICU version is 4.8 or above (we ignore versions below 4.0). -+ UNumberFormatStyle style; -+ switch (end_index - index) { -+ case 0: -+ style = UNUM_CURRENCY; -+ break; -+ case 1: -+ style = UNUM_CURRENCY_ISO; -+ break; -+ default: -+ style = UNUM_CURRENCY_PLURAL; -+ } -+#endif - - base_format = static_cast( - icu::NumberFormat::createInstance(icu_locale, style, *status)); diff -Nru libv8-3.4.14.21/debian/patches/0011_use_system_gyp.patch libv8-3.5.10.24/debian/patches/0011_use_system_gyp.patch --- libv8-3.4.14.21/debian/patches/0011_use_system_gyp.patch 1970-01-01 00:00:00.000000000 +0000 +++ libv8-3.5.10.24/debian/patches/0011_use_system_gyp.patch 2011-12-02 13:26:53.000000000 +0000 @@ -0,0 +1,27 @@ +Description: Use gyp package, not build/gyp/gyp. +Forwarded: not-needed +Author: Jérémy Lal +Last-Update: 2011-10-22 +--- a/Makefile ++++ b/Makefile +@@ -145,17 +145,17 @@ + + # GYP file generation targets. + $(OUTDIR)/Makefile-ia32: $(GYPFILES) $(ENVFILE) +- build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ ++ gyp --generator-output="$(OUTDIR)" build/all.gyp \ + -Ibuild/standalone.gypi --depth=. -Dtarget_arch=ia32 \ + -S-ia32 $(GYPFLAGS) + + $(OUTDIR)/Makefile-x64: $(GYPFILES) $(ENVFILE) +- build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ ++ gyp --generator-output="$(OUTDIR)" build/all.gyp \ + -Ibuild/standalone.gypi --depth=. -Dtarget_arch=x64 \ + -S-x64 $(GYPFLAGS) + + $(OUTDIR)/Makefile-arm: $(GYPFILES) $(ENVFILE) +- build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ ++ gyp --generator-output="$(OUTDIR)" build/all.gyp \ + -Ibuild/standalone.gypi --depth=. -Ibuild/armu.gypi \ + -S-arm $(GYPFLAGS) + diff -Nru libv8-3.4.14.21/debian/patches/0012_make_check_testsuites.patch libv8-3.5.10.24/debian/patches/0012_make_check_testsuites.patch --- libv8-3.4.14.21/debian/patches/0012_make_check_testsuites.patch 1970-01-01 00:00:00.000000000 +0000 +++ libv8-3.5.10.24/debian/patches/0012_make_check_testsuites.patch 2011-12-02 13:26:53.000000000 +0000 @@ -0,0 +1,38 @@ +Description: make check runs TESTSUITES space separated suites +Forwarded: not-needed +Author: Jérémy Lal +Last-Update: 2011-10-22 +--- a/Makefile ++++ b/Makefile +@@ -31,6 +31,7 @@ + LINK ?= "g++" + OUTDIR ?= out + TESTJOBS ?= -j16 ++TESTSUITES ?= "" + GYPFLAGS ?= + + # Special build flags. Use them like this: "make library=shared" +@@ -120,19 +121,19 @@ + + # Test targets. + check: all +- @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) ++ @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) $(TESTSUITES) + + $(addsuffix .check,$(MODES)): $$(basename $$@) + @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \ +- --mode=$(basename $@) ++ --mode=$(basename $@) $(TESTSUITES) + + $(addsuffix .check,$(ARCHES)): $$(basename $$@) + @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \ +- --arch=$(basename $@) ++ --arch=$(basename $@) $(TESTSUITES) + + $(CHECKS): $$(basename $$@) + @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \ +- --arch-and-mode=$(basename $@) ++ --arch-and-mode=$(basename $@) $(TESTSUITES) + + # Clean targets. You can clean each architecture individually, or everything. + $(addsuffix .clean,$(ARCHES)): diff -Nru libv8-3.4.14.21/debian/patches/0013_enable_soname.patch libv8-3.5.10.24/debian/patches/0013_enable_soname.patch --- libv8-3.4.14.21/debian/patches/0013_enable_soname.patch 1970-01-01 00:00:00.000000000 +0000 +++ libv8-3.5.10.24/debian/patches/0013_enable_soname.patch 2011-12-02 13:26:53.000000000 +0000 @@ -0,0 +1,45 @@ +Description: enable soname, done right. +Origin: http://code.google.com/p/v8/source/browse/ +Bug-V8: http://code.google.com/p/v8/issues/detail?id=1786 +Last-Update: 2011-10-25 +--- a/tools/gyp/v8.gyp ++++ b/tools/gyp/v8.gyp +@@ -71,6 +71,13 @@ + ], + }, + }], ++ ['soname_version!=""', { ++ # Ideally, we'd like to specify the full filename for the ++ # library and set it to "libv8.so.<(soname_version)", ++ # but currently the best we can do is use 'product_name' and ++ # get "libv8-<(soname_version).so". ++ 'product_extension': 'so.<(soname_version)', ++ }], + ], + }, + { +--- a/build/common.gypi ++++ b/build/common.gypi +@@ -72,6 +72,9 @@ + 'v8_use_snapshot%': 'true', + 'host_os%': '<(OS)', + 'v8_use_liveobjectlist%': 'false', ++ ++ # For a shared library build, results in "libv8-<(soname_version).so". ++ 'soname_version%': '', + }, + 'target_defaults': { + 'conditions': [ +--- a/Makefile ++++ b/Makefile +@@ -69,6 +69,10 @@ + else + GYPFLAGS += -Dv8_can_use_vfp_instructions=true + endif ++# soname_version=1.2.3 ++ifdef soname_version ++ GYPFLAGS += -Dsoname_version=$(soname_version) ++endif + + # ----------------- available targets: -------------------- + # - any arch listed in ARCHES (see below) diff -Nru libv8-3.4.14.21/debian/patches/0014_disable_armv7_defaults.patch libv8-3.5.10.24/debian/patches/0014_disable_armv7_defaults.patch --- libv8-3.4.14.21/debian/patches/0014_disable_armv7_defaults.patch 1970-01-01 00:00:00.000000000 +0000 +++ libv8-3.5.10.24/debian/patches/0014_disable_armv7_defaults.patch 2011-12-01 13:56:16.000000000 +0000 @@ -0,0 +1,15 @@ +Description: arm targets armv7 by default, try to fix this. +Forwarded: not-needed, upstream do not promote armv4t support +Author: Jérémy Lal +Last-Update: 2011-12-01 +--- a/Makefile ++++ b/Makefile +@@ -161,7 +161,7 @@ + + $(OUTDIR)/Makefile-arm: $(GYPFILES) $(ENVFILE) + gyp --generator-output="$(OUTDIR)" build/all.gyp \ +- -Ibuild/standalone.gypi --depth=. -Ibuild/armu.gypi \ ++ -Ibuild/standalone.gypi --depth=. -Dtarget_arch=arm \ + -S-arm $(GYPFLAGS) + + # Replaces the old with the new environment file if they're different, which diff -Nru libv8-3.4.14.21/debian/patches/series libv8-3.5.10.24/debian/patches/series --- libv8-3.4.14.21/debian/patches/series 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/patches/series 2011-12-01 13:56:16.000000000 +0000 @@ -1,9 +1,7 @@ -0003-add-the-install-target.patch -0004-gcc-opt-level-2.patch 0005-enable-i18n-extension.patch -0006-gcc46-compat.patch -0007_armhf.patch 0008_mksnapshot_stdout.patch -0009_unaligned_access_armel.patch 0010_fix_arm_bug.patch -0011_fix_icu4.8_compatiblity.patch +0011_use_system_gyp.patch +0012_make_check_testsuites.patch +0013_enable_soname.patch +0014_disable_armv7_defaults.patch diff -Nru libv8-3.4.14.21/debian/rules libv8-3.5.10.24/debian/rules --- libv8-3.4.14.21/debian/rules 2011-12-05 09:18:15.000000000 +0000 +++ libv8-3.5.10.24/debian/rules 2011-12-05 09:57:26.000000000 +0000 @@ -7,11 +7,15 @@ sed -e 's/__LIBPKGNAME__/$(LIBPKGNAME)/g' debian/control.in include /usr/share/cdbs/1/rules/utils.mk -include /usr/share/cdbs/1/class/scons.mk +include /usr/share/cdbs/1/class/makefile.mk include /usr/share/cdbs/1/rules/debhelper.mk SONAME = $(DEB_UPSTREAM_VERSION) LIBPKGNAME = libv8-$(SONAME) +libSoname = libv8.so.$(SONAME) + +# allow parallel builds +DEB_BUILD_PARALLEL=1 # suppress checking binary files, to not upset dpkg-source DEB_COPYRIGHT_CHECK_IGNORE_REGEX = ^(benchmarks/v8-logo\.png|test/mjsunit/unicode-test\.js|debian/(changelog|copyright(|_hints|_newhints)))$ @@ -29,64 +33,65 @@ v8os := $(or $(v8os),$(if $(filter kfreebsd,$(DEB_HOST_ARCH_OS)),freebsd)) v8os := $(or $(v8os),$(DEB_HOST_ARCH_OS)) -# force aligned access -v8armunaligned := +GYPFLAGS += -Dhost_arch=$(v8arch) + +# still not compatible with GCC 4.6 defaults (at 3.5.10.24) +CXXFLAGS += -Wno-unused-but-set-variable + # armel and armhf arches need flags to work around those issues : # -fno-tree-sink: http://gcc.gnu.org/bugzilla/show_bug.cgi?id=39604 # -Wno-psabi: http://gcc.gnu.org/bugzilla/show_bug.cgi?id=42748 -v8armeabi := soft -# armhf is hardfloat ifeq (armhf, $(DEB_HOST_ARCH)) -v8armeabi := hard -CCFLAGS += -fno-tree-sink +CXXFLAGS += -fno-tree-sink CXXFLAGS += -Wno-psabi +# enable armv7 vfpv3 +GYPFLAGS += -Darmv7=1 -Darm_fpu=vfpv3 -Darm_neon=0 -Dv8_use_arm_eabi_hardfloat=true endif ifeq (armel, $(DEB_HOST_ARCH)) -CCFLAGS += -fno-tree-sink +CXXFLAGS += -fno-tree-sink CXXFLAGS += -Wno-psabi -ifeq ($(shell dpkg-vendor --derives-from ubuntu && echo true),true) -# Ubuntu targets armv7+ with VFP and enables thumb2 support by default -# v8 supports thumb in >= armv5t so this is OK. -v8armeabi := softfp +ifeq ($(shell dpkg-vendor --is ubuntu && echo true),true) +# Ubuntu targets armv7+ with VFP and thumb2 support by default for armel +GYPFLAGS += -Darmv7=1 -Darm_fpu=vfpv3 -Darm_neon=0 -Dv8_use_arm_eabi_hardfloat=false else +DEB_MAKE_EXTRA_ARGS += vfp3=off # Disable thumb-interworking because v8 supports it only on >= armv5t. # http://code.google.com/p/v8/issues/detail?id=590 -v8armunaligned := unalignedaccesses=off -CCFLAGS += -mno-thumb-interwork +CXXFLAGS += -mno-thumb-interwork +# disable armv7, use softfloat +GYPFLAGS += -Darmv7=0 -Dv8_can_use_unaligned_accesses=false -Dv8_use_arm_eabi_hardfloat=false endif endif -export CCFLAGS +export GYPFLAGS export CXXFLAGS -DEB_SCONS_OPTIONS = library=shared soname=on snapshot=on shlibtype=hidden arch=$(v8arch) os=$(v8os) armeabi=$(v8armeabi) $(v8armunaligned) -DEB_SCONS_BUILD_TARGET = library +DEB_MAKE_EXTRA_ARGS += library=shared soname_version=$(SONAME) OS=$(v8os) V=1 +DEB_MAKE_CLEAN_TARGET = clean +DEB_MAKE_BUILD_TARGET = $(v8arch).release -# Scons does not always add -soname param, force it. This prevents gold linker from erroring. -DEB_SCONS_ENVVARS = SONAME=libv8.so.$(SONAME) +v8out = $(CURDIR)/out/$(v8arch).release/lib.target/$(libSoname) -# regression tests need to build shell for mjsunit -- will be deprecated in 3.5 branch in favor of d8. shell becoming a simple v8 cli. -DEB_SCONS_CHECK_TARGET = sample sample=shell +# regression tests +# * relax regression tests when targeted experimental suite +# * run only javascript tests, cctests are for development purposes +DEB_MAKE_CHECK_TARGET = $(v8arch).release.check \ + LD_PRELOAD=$(v8out) \ + TESTSUITES="mjsunit message preparser" \ + TESTJOBS="$(DEB_MAKE_PARALLEL) --timeout=120" \ + $(if $(shell dpkg-parsechangelog | grep -Fx 'Distribution: experimental'),|| true) -DEB_SCONS_INSTALL_OPTIONS += DESTDIR="$(cdbs_curdestdir)" -DEB_DH_INSTALL_ARGS_$(LIBPKGNAME) = usr/lib/libv8.so.$(SONAME) +DEB_DH_INSTALL_ARGS_$(LIBPKGNAME) = usr/lib/$(libSoname) clean:: - rm -f tools/*.pyc libv8-$(SONAME).so - rm -rf obj + rm -rf out + rm -f tools/*.pyc rm -f test/*/*.pyc - rm -f shell - -# actually run regression tests -# * relax regression tests when targeted experimental suite -# * run only javascript tests, cctests are for development purposes -exp-relax-check := $(if $(shell dpkg-parsechangelog | grep -Fx 'Distribution: experimental'),-) -common-post-build-impl:: - $(exp-relax-check)$(if $(filter nocheck,$(DEB_BUILD_OPTIONS)),,LD_PRELOAD=$(CURDIR)/libv8-$(SONAME).so $(CURDIR)/tools/test.py --progress=verbose --snapshot --no-build --timeout=120 mjsunit message arch=$(v8arch)) common-install-impl:: - cd debian/tmp/usr/lib/ ; \ - mv libv8-$(SONAME)*.so libv8.so.$(SONAME) ; \ - ln -s -T libv8.so.$(SONAME) libv8.so + mkdir -p debian/tmp/usr/lib ; \ + cd debian/tmp/usr/lib ; \ + cp $(v8out) . ; \ + ln -s -T $(libSoname) libv8.so diff -Nru libv8-3.4.14.21/debian/watch libv8-3.5.10.24/debian/watch --- libv8-3.4.14.21/debian/watch 2011-12-01 11:35:37.000000000 +0000 +++ libv8-3.5.10.24/debian/watch 2011-12-02 13:26:53.000000000 +0000 @@ -5,4 +5,4 @@ # SVN tag but the watch file will still useful to find # new versions opts="dversionmangle=s/\+dfsg//" \ -http://v8.googlecode.com/svn/tags/ (3\.4\.[\d.]+)/ +http://v8.googlecode.com/svn/tags/ (3\.5\.[\d.]+)/ diff -Nru libv8-3.4.14.21/.gitignore libv8-3.5.10.24/.gitignore --- libv8-3.4.14.21/.gitignore 2011-05-04 12:43:48.000000000 +0000 +++ libv8-3.5.10.24/.gitignore 2011-08-15 13:01:23.000000000 +0000 @@ -21,6 +21,7 @@ shell_g /build/gyp /obj/ +/out/ /test/es5conform/data/ /test/mozilla/data/ /test/sputnik/sputniktests/ @@ -31,5 +32,4 @@ /tools/visual_studio/Release /xcodebuild/ TAGS -Makefile *.Makefile diff -Nru libv8-3.4.14.21/include/v8.h libv8-3.5.10.24/include/v8.h --- libv8-3.4.14.21/include/v8.h 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/include/v8.h 2011-08-24 12:02:41.000000000 +0000 @@ -171,12 +171,12 @@ /** * Creates an empty handle. */ - inline Handle(); + inline Handle() : val_(0) {} /** * Creates a new handle for the specified value. */ - inline explicit Handle(T* val) : val_(val) { } + inline explicit Handle(T* val) : val_(val) {} /** * Creates a handle for the contents of the specified handle. This @@ -203,14 +203,14 @@ */ inline bool IsEmpty() const { return val_ == 0; } - inline T* operator->() const { return val_; } - - inline T* operator*() const { return val_; } - /** * Sets the handle to be empty. IsEmpty() will then return true. */ - inline void Clear() { this->val_ = 0; } + inline void Clear() { val_ = 0; } + + inline T* operator->() const { return val_; } + + inline T* operator*() const { return val_; } /** * Checks whether two handles are the same. @@ -1039,29 +1039,30 @@ * \param length The number of characters to copy from the string. For * WriteUtf8 the number of bytes in the buffer. * \param nchars_ref The number of characters written, can be NULL. - * \param hints Various hints that might affect performance of this or + * \param options Various options that might affect performance of this or * subsequent operations. * \return The number of characters copied to the buffer excluding the null * terminator. For WriteUtf8: The number of bytes copied to the buffer - * including the null terminator. + * including the null terminator (if written). */ - enum WriteHints { - NO_HINTS = 0, - HINT_MANY_WRITES_EXPECTED = 1 + enum WriteOptions { + NO_OPTIONS = 0, + HINT_MANY_WRITES_EXPECTED = 1, + NO_NULL_TERMINATION = 2 }; V8EXPORT int Write(uint16_t* buffer, int start = 0, int length = -1, - WriteHints hints = NO_HINTS) const; // UTF-16 + int options = NO_OPTIONS) const; // UTF-16 V8EXPORT int WriteAscii(char* buffer, int start = 0, int length = -1, - WriteHints hints = NO_HINTS) const; // ASCII + int options = NO_OPTIONS) const; // ASCII V8EXPORT int WriteUtf8(char* buffer, int length = -1, int* nchars_ref = NULL, - WriteHints hints = NO_HINTS) const; // UTF-8 + int options = NO_OPTIONS) const; // UTF-8 /** * A zero length string. @@ -1335,7 +1336,7 @@ static inline Number* Cast(v8::Value* obj); private: V8EXPORT Number(); - static void CheckCast(v8::Value* obj); + V8EXPORT static void CheckCast(v8::Value* obj); }; @@ -1709,7 +1710,7 @@ static inline Array* Cast(Value* obj); private: V8EXPORT Array(); - static void CheckCast(Value* obj); + V8EXPORT static void CheckCast(Value* obj); }; @@ -2231,11 +2232,10 @@ void SetHiddenPrototype(bool value); /** - * Sets the property attributes of the 'prototype' property of functions - * created from this FunctionTemplate. Can be any combination of ReadOnly, - * DontEnum and DontDelete. + * Sets the ReadOnly flag in the attributes of the 'prototype' property + * of functions created from this FunctionTemplate to true. */ - void SetPrototypeAttributes(int attributes); + void ReadOnlyPrototype(); /** * Returns true if the given object is an instance of this function @@ -3607,7 +3607,7 @@ /** * Returns whether v8::Locker is being used by this V8 instance. */ - static bool IsActive() { return active_; } + static bool IsActive(); private: bool has_lock_; @@ -3828,10 +3828,6 @@ template -Handle::Handle() : val_(0) { } - - -template Local::Local() : Handle() { } diff -Nru libv8-3.4.14.21/include/v8-profiler.h libv8-3.5.10.24/include/v8-profiler.h --- libv8-3.4.14.21/include/v8-profiler.h 2011-06-27 13:21:41.000000000 +0000 +++ libv8-3.5.10.24/include/v8-profiler.h 2011-10-14 09:24:03.000000000 +0000 @@ -307,6 +307,12 @@ * path from the snapshot root to the current node. */ const HeapGraphNode* GetDominatorNode() const; + + /** + * Finds and returns a value from the heap corresponding to this node, + * if the value is still reachable. + */ + Handle GetHeapValue() const; }; diff -Nru libv8-3.4.14.21/Makefile libv8-3.5.10.24/Makefile --- libv8-3.4.14.21/Makefile 1970-01-01 00:00:00.000000000 +0000 +++ libv8-3.5.10.24/Makefile 2011-08-29 10:41:00.000000000 +0000 @@ -0,0 +1,171 @@ +# Copyright 2011 the V8 project authors. All rights reserved. +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +# Variable default definitions. Override them by exporting them in your shell. +CXX ?= "g++" # For distcc: export CXX="distcc g++" +LINK ?= "g++" +OUTDIR ?= out +TESTJOBS ?= -j16 +GYPFLAGS ?= + +# Special build flags. Use them like this: "make library=shared" + +# library=shared || component=shared_library +ifeq ($(library), shared) + GYPFLAGS += -Dcomponent=shared_library +endif +ifdef component + GYPFLAGS += -Dcomponent=$(component) +endif +# console=readline +ifdef console + GYPFLAGS += -Dconsole=$(console) +endif +# disassembler=on +ifeq ($(disassembler), on) + GYPFLAGS += -Dv8_enable_disassembler=1 +endif +# snapshot=off +ifeq ($(snapshot), off) + GYPFLAGS += -Dv8_use_snapshot='false' +endif +# gdbjit=on +ifeq ($(gdbjit), on) + GYPFLAGS += -Dv8_enable_gdbjit=1 +endif +# liveobjectlist=on +ifeq ($(liveobjectlist), on) + GYPFLAGS += -Dv8_use_liveobjectlist=true +endif +# vfp3=off +ifeq ($(vfp3), off) + GYPFLAGS += -Dv8_can_use_vfp_instructions=false +else + GYPFLAGS += -Dv8_can_use_vfp_instructions=true +endif + +# ----------------- available targets: -------------------- +# - any arch listed in ARCHES (see below) +# - any mode listed in MODES +# - every combination ., e.g. "ia32.release" +# - any of the above with .check appended, e.g. "ia32.release.check" +# - default (no target specified): build all ARCHES and MODES +# - "check": build all targets and run all tests +# - ".clean" for any in ARCHES +# - "clean": clean all ARCHES + +# ----------------- internal stuff ------------------------ + +# Architectures and modes to be compiled. Consider these to be internal +# variables, don't override them (use the targets instead). +ARCHES = ia32 x64 arm +MODES = release debug + +# List of files that trigger Makefile regeneration: +GYPFILES = build/all.gyp build/common.gypi build/standalone.gypi \ + preparser/preparser.gyp samples/samples.gyp src/d8.gyp \ + test/cctest/cctest.gyp tools/gyp/v8.gyp + +# Generates all combinations of ARCHES and MODES, e.g. "ia32.release". +BUILDS = $(foreach mode,$(MODES),$(addsuffix .$(mode),$(ARCHES))) +# Generates corresponding test targets, e.g. "ia32.release.check". +CHECKS = $(addsuffix .check,$(BUILDS)) +# File where previously used GYPFLAGS are stored. +ENVFILE = $(OUTDIR)/environment + +.PHONY: all clean $(ENVFILE).new \ + $(ARCHES) $(MODES) $(BUILDS) $(addsuffix .clean,$(ARCHES)) + +# Target definitions. "all" is the default. +all: $(MODES) + +# Compile targets. MODES and ARCHES are convenience targets. +.SECONDEXPANSION: +$(MODES): $(addsuffix .$$@,$(ARCHES)) + +$(ARCHES): $(addprefix $$@.,$(MODES)) + +# Defines how to build a particular target (e.g. ia32.release). +$(BUILDS): $(OUTDIR)/Makefile-$$(basename $$@) + @$(MAKE) -C "$(OUTDIR)" -f Makefile-$(basename $@) \ + CXX="$(CXX)" LINK="$(LINK)" \ + BUILDTYPE=$(shell echo $(subst .,,$(suffix $@)) | \ + python -c "print raw_input().capitalize()") \ + builddir="$(shell pwd)/$(OUTDIR)/$@" + +# Test targets. +check: all + @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) + +$(addsuffix .check,$(MODES)): $$(basename $$@) + @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \ + --mode=$(basename $@) + +$(addsuffix .check,$(ARCHES)): $$(basename $$@) + @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \ + --arch=$(basename $@) + +$(CHECKS): $$(basename $$@) + @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \ + --arch-and-mode=$(basename $@) + +# Clean targets. You can clean each architecture individually, or everything. +$(addsuffix .clean,$(ARCHES)): + rm -f $(OUTDIR)/Makefile-$(basename $@) + rm -rf $(OUTDIR)/$(basename $@).release + rm -rf $(OUTDIR)/$(basename $@).debug + find $(OUTDIR) -regex '.*\(host\|target\)-$(basename $@)\.mk' -delete + +clean: $(addsuffix .clean,$(ARCHES)) + +# GYP file generation targets. +$(OUTDIR)/Makefile-ia32: $(GYPFILES) $(ENVFILE) + build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ + -Ibuild/standalone.gypi --depth=. -Dtarget_arch=ia32 \ + -S-ia32 $(GYPFLAGS) + +$(OUTDIR)/Makefile-x64: $(GYPFILES) $(ENVFILE) + build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ + -Ibuild/standalone.gypi --depth=. -Dtarget_arch=x64 \ + -S-x64 $(GYPFLAGS) + +$(OUTDIR)/Makefile-arm: $(GYPFILES) $(ENVFILE) + build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ + -Ibuild/standalone.gypi --depth=. -Ibuild/armu.gypi \ + -S-arm $(GYPFLAGS) + +# Replaces the old with the new environment file if they're different, which +# will trigger GYP to regenerate Makefiles. +$(ENVFILE): $(ENVFILE).new + @if test -r $(ENVFILE) && cmp $(ENVFILE).new $(ENVFILE) >/dev/null; \ + then rm $(ENVFILE).new; \ + else mv $(ENVFILE).new $(ENVFILE); fi + +# Stores current GYPFLAGS in a file. +$(ENVFILE).new: + @mkdir -p $(OUTDIR); echo "GYPFLAGS=$(GYPFLAGS)" > $(ENVFILE).new; diff -Nru libv8-3.4.14.21/preparser/preparser.gyp libv8-3.5.10.24/preparser/preparser.gyp --- libv8-3.4.14.21/preparser/preparser.gyp 1970-01-01 00:00:00.000000000 +0000 +++ libv8-3.5.10.24/preparser/preparser.gyp 2011-08-29 10:41:00.000000000 +0000 @@ -0,0 +1,42 @@ +# Copyright 2011 the V8 project authors. All rights reserved. +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +{ + 'includes': ['../build/common.gypi'], + 'targets': [ + { + 'target_name': 'preparser', + 'type': 'executable', + 'dependencies': [ + '../tools/gyp/v8.gyp:preparser_lib', + ], + 'sources': [ + 'preparser-process.cc', + ], + }, + ], +} diff -Nru libv8-3.4.14.21/samples/samples.gyp libv8-3.5.10.24/samples/samples.gyp --- libv8-3.4.14.21/samples/samples.gyp 2010-12-17 08:51:21.000000000 +0000 +++ libv8-3.5.10.24/samples/samples.gyp 2011-08-29 10:41:00.000000000 +0000 @@ -1,4 +1,4 @@ -# Copyright 2010 the V8 project authors. All rights reserved. +# Copyright 2011 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: @@ -26,23 +26,25 @@ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. { + 'includes': ['../build/common.gypi'], + 'target_defaults': { + 'type': 'executable', + 'dependencies': [ + '../tools/gyp/v8.gyp:v8', + ], + 'include_dirs': [ + '../include', + ], + }, 'targets': [ { 'target_name': 'shell', - 'type': 'executable', - 'dependencies': [ - '../tools/gyp/v8.gyp:v8', - ], 'sources': [ 'shell.cc', ], }, { 'target_name': 'process', - 'type': 'executable', - 'dependencies': [ - '../tools/gyp/v8.gyp:v8', - ], 'sources': [ 'process.cc', ], diff -Nru libv8-3.4.14.21/samples/shell.cc libv8-3.5.10.24/samples/shell.cc --- libv8-3.4.14.21/samples/shell.cc 2011-07-18 09:50:57.000000000 +0000 +++ libv8-3.5.10.24/samples/shell.cc 2011-08-22 11:03:23.000000000 +0000 @@ -26,39 +26,28 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include -#include #include -#ifdef COMPRESS_STARTUP_DATA_BZ2 -#include -#endif #include #include #include #include -// When building with V8 in a shared library we cannot use functions which -// is not explicitly a part of the public V8 API. This extensive use of -// #ifndef USING_V8_SHARED/#endif is a hack until we can resolve whether to -// still use the shell sample for testing or change to use the developer -// shell d8 TODO(1272). -#if !(defined(USING_V8_SHARED) || defined(V8_SHARED)) -#include "../src/v8.h" -#endif // USING_V8_SHARED - -#if !defined(_WIN32) && !defined(_WIN64) -#include // NOLINT +#ifdef COMPRESS_STARTUP_DATA_BZ2 +#error Using compressed startup data is not supported for this sample #endif -static void ExitShell(int exit_code) { - // Use _exit instead of exit to avoid races between isolate - // threads and static destructors. - fflush(stdout); - fflush(stderr); - _exit(exit_code); -} +/** + * This sample program shows how to implement a simple javascript shell + * based on V8. This includes initializing V8 with command line options, + * creating global functions, compiling and executing strings. + * + * For a more sophisticated shell, consider using the debug shell D8. + */ + v8::Persistent CreateShellContext(); void RunShell(v8::Handle context); +int RunMain(int argc, char* argv[]); bool ExecuteString(v8::Handle source, v8::Handle name, bool print_result, @@ -68,305 +57,28 @@ v8::Handle Load(const v8::Arguments& args); v8::Handle Quit(const v8::Arguments& args); v8::Handle Version(const v8::Arguments& args); -v8::Handle Int8Array(const v8::Arguments& args); -v8::Handle Uint8Array(const v8::Arguments& args); -v8::Handle Int16Array(const v8::Arguments& args); -v8::Handle Uint16Array(const v8::Arguments& args); -v8::Handle Int32Array(const v8::Arguments& args); -v8::Handle Uint32Array(const v8::Arguments& args); -v8::Handle Float32Array(const v8::Arguments& args); -v8::Handle Float64Array(const v8::Arguments& args); -v8::Handle PixelArray(const v8::Arguments& args); v8::Handle ReadFile(const char* name); void ReportException(v8::TryCatch* handler); -static bool last_run = true; - -class SourceGroup { - public: - SourceGroup() : -#if !(defined(USING_V8_SHARED) || defined(V8_SHARED)) - next_semaphore_(v8::internal::OS::CreateSemaphore(0)), - done_semaphore_(v8::internal::OS::CreateSemaphore(0)), - thread_(NULL), -#endif // USING_V8_SHARED - argv_(NULL), - begin_offset_(0), - end_offset_(0) { } - -#if !(defined(USING_V8_SHARED) || defined(V8_SHARED)) - ~SourceGroup() { - delete next_semaphore_; - delete done_semaphore_; - } -#endif // USING_V8_SHARED - - void Begin(char** argv, int offset) { - argv_ = const_cast(argv); - begin_offset_ = offset; - } - - void End(int offset) { end_offset_ = offset; } - - void Execute() { - for (int i = begin_offset_; i < end_offset_; ++i) { - const char* arg = argv_[i]; - if (strcmp(arg, "-e") == 0 && i + 1 < end_offset_) { - // Execute argument given to -e option directly. - v8::HandleScope handle_scope; - v8::Handle file_name = v8::String::New("unnamed"); - v8::Handle source = v8::String::New(argv_[i + 1]); - if (!ExecuteString(source, file_name, false, true)) { - ExitShell(1); - return; - } - ++i; - } else if (arg[0] == '-') { - // Ignore other options. They have been parsed already. - } else { - // Use all other arguments as names of files to load and run. - v8::HandleScope handle_scope; - v8::Handle file_name = v8::String::New(arg); - v8::Handle source = ReadFile(arg); - if (source.IsEmpty()) { - printf("Error reading '%s'\n", arg); - continue; - } - if (!ExecuteString(source, file_name, false, true)) { - ExitShell(1); - return; - } - } - } - } - -#if !(defined(USING_V8_SHARED) || defined(V8_SHARED)) - void StartExecuteInThread() { - if (thread_ == NULL) { - thread_ = new IsolateThread(this); - thread_->Start(); - } - next_semaphore_->Signal(); - } - - void WaitForThread() { - if (thread_ == NULL) return; - if (last_run) { - thread_->Join(); - thread_ = NULL; - } else { - done_semaphore_->Wait(); - } - } -#endif // USING_V8_SHARED - - private: -#if !(defined(USING_V8_SHARED) || defined(V8_SHARED)) - static v8::internal::Thread::Options GetThreadOptions() { - v8::internal::Thread::Options options; - options.name = "IsolateThread"; - // On some systems (OSX 10.6) the stack size default is 0.5Mb or less - // which is not enough to parse the big literal expressions used in tests. - // The stack size should be at least StackGuard::kLimitSize + some - // OS-specific padding for thread startup code. - options.stack_size = 2 << 20; // 2 Mb seems to be enough - return options; - } - - class IsolateThread : public v8::internal::Thread { - public: - explicit IsolateThread(SourceGroup* group) - : v8::internal::Thread(GetThreadOptions()), group_(group) {} - - virtual void Run() { - group_->ExecuteInThread(); - } - - private: - SourceGroup* group_; - }; - - void ExecuteInThread() { - v8::Isolate* isolate = v8::Isolate::New(); - do { - if (next_semaphore_ != NULL) next_semaphore_->Wait(); - { - v8::Isolate::Scope iscope(isolate); - v8::HandleScope scope; - v8::Persistent context = CreateShellContext(); - { - v8::Context::Scope cscope(context); - Execute(); - } - context.Dispose(); - } - if (done_semaphore_ != NULL) done_semaphore_->Signal(); - } while (!last_run); - isolate->Dispose(); - } - - v8::internal::Semaphore* next_semaphore_; - v8::internal::Semaphore* done_semaphore_; - v8::internal::Thread* thread_; -#endif // USING_V8_SHARED - - const char** argv_; - int begin_offset_; - int end_offset_; -}; - - -static SourceGroup* isolate_sources = NULL; - +static bool run_shell; -#ifdef COMPRESS_STARTUP_DATA_BZ2 -class BZip2Decompressor : public v8::StartupDataDecompressor { - public: - virtual ~BZip2Decompressor() { } - - protected: - virtual int DecompressData(char* raw_data, - int* raw_data_size, - const char* compressed_data, - int compressed_data_size) { - ASSERT_EQ(v8::StartupData::kBZip2, - v8::V8::GetCompressedStartupDataAlgorithm()); - unsigned int decompressed_size = *raw_data_size; - int result = - BZ2_bzBuffToBuffDecompress(raw_data, - &decompressed_size, - const_cast(compressed_data), - compressed_data_size, - 0, 1); - if (result == BZ_OK) { - *raw_data_size = decompressed_size; - } - return result; - } -}; -#endif - -int RunMain(int argc, char* argv[]) { +int main(int argc, char* argv[]) { v8::V8::SetFlagsFromCommandLine(&argc, argv, true); + run_shell = (argc == 1); v8::HandleScope handle_scope; v8::Persistent context = CreateShellContext(); - // Enter the newly created execution environment. - context->Enter(); if (context.IsEmpty()) { printf("Error creating context\n"); return 1; } - - bool run_shell = (argc == 1); - int num_isolates = 1; - for (int i = 1; i < argc; i++) { - if (strcmp(argv[i], "--isolate") == 0) { -#if !(defined(USING_V8_SHARED) || defined(V8_SHARED)) - ++num_isolates; -#else // USING_V8_SHARED - printf("Error: --isolate not supported when linked with shared " - "library\n"); - ExitShell(1); -#endif // USING_V8_SHARED - } - } - if (isolate_sources == NULL) { - isolate_sources = new SourceGroup[num_isolates]; - SourceGroup* current = isolate_sources; - current->Begin(argv, 1); - for (int i = 1; i < argc; i++) { - const char* str = argv[i]; - if (strcmp(str, "--isolate") == 0) { - current->End(i); - current++; - current->Begin(argv, i + 1); - } else if (strcmp(str, "--shell") == 0) { - run_shell = true; - } else if (strcmp(str, "-f") == 0) { - // Ignore any -f flags for compatibility with the other stand- - // alone JavaScript engines. - continue; - } else if (strncmp(str, "--", 2) == 0) { - printf("Warning: unknown flag %s.\nTry --help for options\n", str); - } - } - current->End(argc); - } -#if !(defined(USING_V8_SHARED) || defined(V8_SHARED)) - for (int i = 1; i < num_isolates; ++i) { - isolate_sources[i].StartExecuteInThread(); - } -#endif // USING_V8_SHARED - isolate_sources[0].Execute(); + context->Enter(); + int result = RunMain(argc, argv); if (run_shell) RunShell(context); -#if !(defined(USING_V8_SHARED) || defined(V8_SHARED)) - for (int i = 1; i < num_isolates; ++i) { - isolate_sources[i].WaitForThread(); - } -#endif // USING_V8_SHARED - if (last_run) { - delete[] isolate_sources; - isolate_sources = NULL; - } context->Exit(); context.Dispose(); - return 0; -} - - -int main(int argc, char* argv[]) { - // Figure out if we're requested to stress the optimization - // infrastructure by running tests multiple times and forcing - // optimization in the last run. - bool FLAG_stress_opt = false; - bool FLAG_stress_deopt = false; - for (int i = 0; i < argc; i++) { - if (strcmp(argv[i], "--stress-opt") == 0) { - FLAG_stress_opt = true; - argv[i] = NULL; - } else if (strcmp(argv[i], "--stress-deopt") == 0) { - FLAG_stress_deopt = true; - argv[i] = NULL; - } else if (strcmp(argv[i], "--noalways-opt") == 0) { - // No support for stressing if we can't use --always-opt. - FLAG_stress_opt = false; - FLAG_stress_deopt = false; - break; - } - } - -#ifdef COMPRESS_STARTUP_DATA_BZ2 - BZip2Decompressor startup_data_decompressor; - int bz2_result = startup_data_decompressor.Decompress(); - if (bz2_result != BZ_OK) { - fprintf(stderr, "bzip error code: %d\n", bz2_result); - exit(1); - } -#endif - - v8::V8::SetFlagsFromCommandLine(&argc, argv, true); - int result = 0; - if (FLAG_stress_opt || FLAG_stress_deopt) { - v8::Testing::SetStressRunType(FLAG_stress_opt - ? v8::Testing::kStressTypeOpt - : v8::Testing::kStressTypeDeopt); - int stress_runs = v8::Testing::GetStressRuns(); - for (int i = 0; i < stress_runs && result == 0; i++) { - printf("============ Stress %d/%d ============\n", - i + 1, stress_runs); - v8::Testing::PrepareStressRun(i); - last_run = (i == stress_runs - 1); - result = RunMain(argc, argv); - } - printf("======== Full Deoptimization =======\n"); - v8::Testing::DeoptimizeAll(); - } else { - result = RunMain(argc, argv); - } v8::V8::Dispose(); - return result; } @@ -393,26 +105,6 @@ // Bind the 'version' function global->Set(v8::String::New("version"), v8::FunctionTemplate::New(Version)); - // Bind the handlers for external arrays. - global->Set(v8::String::New("Int8Array"), - v8::FunctionTemplate::New(Int8Array)); - global->Set(v8::String::New("Uint8Array"), - v8::FunctionTemplate::New(Uint8Array)); - global->Set(v8::String::New("Int16Array"), - v8::FunctionTemplate::New(Int16Array)); - global->Set(v8::String::New("Uint16Array"), - v8::FunctionTemplate::New(Uint16Array)); - global->Set(v8::String::New("Int32Array"), - v8::FunctionTemplate::New(Int32Array)); - global->Set(v8::String::New("Uint32Array"), - v8::FunctionTemplate::New(Uint32Array)); - global->Set(v8::String::New("Float32Array"), - v8::FunctionTemplate::New(Float32Array)); - global->Set(v8::String::New("Float64Array"), - v8::FunctionTemplate::New(Float64Array)); - global->Set(v8::String::New("PixelArray"), - v8::FunctionTemplate::New(PixelArray)); - return v8::Context::New(NULL, global); } @@ -486,7 +178,9 @@ // If not arguments are given args[0] will yield undefined which // converts to the integer value 0. int exit_code = args[0]->Int32Value(); - ExitShell(exit_code); + fflush(stdout); + fflush(stderr); + exit(exit_code); return v8::Undefined(); } @@ -496,113 +190,6 @@ } -void ExternalArrayWeakCallback(v8::Persistent object, void* data) { - free(data); - object.Dispose(); -} - - -v8::Handle CreateExternalArray(const v8::Arguments& args, - v8::ExternalArrayType type, - size_t element_size) { - assert(element_size == 1 || - element_size == 2 || - element_size == 4 || - element_size == 8); - if (args.Length() != 1) { - return v8::ThrowException( - v8::String::New("Array constructor needs one parameter.")); - } - static const int kMaxLength = 0x3fffffff; - size_t length = 0; - if (args[0]->IsUint32()) { - length = args[0]->Uint32Value(); - } else if (args[0]->IsNumber()) { - double raw_length = args[0]->NumberValue(); - if (raw_length < 0) { - return v8::ThrowException( - v8::String::New("Array length must not be negative.")); - } - if (raw_length > kMaxLength) { - return v8::ThrowException( - v8::String::New("Array length exceeds maximum length.")); - } - length = static_cast(raw_length); - } else { - return v8::ThrowException( - v8::String::New("Array length must be a number.")); - } - if (length > static_cast(kMaxLength)) { - return v8::ThrowException( - v8::String::New("Array length exceeds maximum length.")); - } - void* data = calloc(length, element_size); - if (data == NULL) { - return v8::ThrowException(v8::String::New("Memory allocation failed.")); - } - v8::Handle array = v8::Object::New(); - v8::Persistent persistent_array = - v8::Persistent::New(array); - persistent_array.MakeWeak(data, ExternalArrayWeakCallback); - persistent_array.MarkIndependent(); - array->SetIndexedPropertiesToExternalArrayData(data, type, length); - array->Set(v8::String::New("length"), v8::Int32::New(length), - v8::ReadOnly); - array->Set(v8::String::New("BYTES_PER_ELEMENT"), - v8::Int32::New(element_size)); - return array; -} - - -v8::Handle Int8Array(const v8::Arguments& args) { - return CreateExternalArray(args, v8::kExternalByteArray, sizeof(int8_t)); -} - - -v8::Handle Uint8Array(const v8::Arguments& args) { - return CreateExternalArray(args, v8::kExternalUnsignedByteArray, - sizeof(uint8_t)); -} - - -v8::Handle Int16Array(const v8::Arguments& args) { - return CreateExternalArray(args, v8::kExternalShortArray, sizeof(int16_t)); -} - - -v8::Handle Uint16Array(const v8::Arguments& args) { - return CreateExternalArray(args, v8::kExternalUnsignedShortArray, - sizeof(uint16_t)); -} - -v8::Handle Int32Array(const v8::Arguments& args) { - return CreateExternalArray(args, v8::kExternalIntArray, sizeof(int32_t)); -} - - -v8::Handle Uint32Array(const v8::Arguments& args) { - return CreateExternalArray(args, v8::kExternalUnsignedIntArray, - sizeof(uint32_t)); -} - - -v8::Handle Float32Array(const v8::Arguments& args) { - return CreateExternalArray(args, v8::kExternalFloatArray, - sizeof(float)); // NOLINT -} - - -v8::Handle Float64Array(const v8::Arguments& args) { - return CreateExternalArray(args, v8::kExternalDoubleArray, - sizeof(double)); // NOLINT -} - - -v8::Handle PixelArray(const v8::Arguments& args) { - return CreateExternalArray(args, v8::kExternalPixelArray, sizeof(uint8_t)); -} - - // Reads a file into a v8 string. v8::Handle ReadFile(const char* name) { FILE* file = fopen(name, "rb"); @@ -625,9 +212,41 @@ } +// Process remaining command line arguments and execute files +int RunMain(int argc, char* argv[]) { + for (int i = 1; i < argc; i++) { + const char* str = argv[i]; + if (strcmp(str, "--shell") == 0) { + run_shell = true; + } else if (strcmp(str, "-f") == 0) { + // Ignore any -f flags for compatibility with the other stand- + // alone JavaScript engines. + continue; + } else if (strncmp(str, "--", 2) == 0) { + printf("Warning: unknown flag %s.\nTry --help for options\n", str); + } else if (strcmp(str, "-e") == 0 && i + 1 < argc) { + // Execute argument given to -e option directly. + v8::Handle file_name = v8::String::New("unnamed"); + v8::Handle source = v8::String::New(argv[++i]); + if (!ExecuteString(source, file_name, false, true)) return 1; + } else { + // Use all other arguments as names of files to load and run. + v8::Handle file_name = v8::String::New(str); + v8::Handle source = ReadFile(str); + if (source.IsEmpty()) { + printf("Error reading '%s'\n", str); + continue; + } + if (!ExecuteString(source, file_name, false, true)) return 1; + } + } + return 0; +} + + // The read-eval-execute loop of the shell. void RunShell(v8::Handle context) { - printf("V8 version %s\n", v8::V8::GetVersion()); + printf("V8 version %s [sample shell]\n", v8::V8::GetVersion()); static const int kBufferSize = 256; // Enter the execution environment before evaluating any code. v8::Context::Scope context_scope(context); diff -Nru libv8-3.4.14.21/SConstruct libv8-3.5.10.24/SConstruct --- libv8-3.4.14.21/SConstruct 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/SConstruct 2011-08-10 11:27:35.000000000 +0000 @@ -153,13 +153,19 @@ } }, 'armeabi:softfp' : { - 'CPPDEFINES' : ['USE_EABI_HARDFLOAT=0', 'CAN_USE_VFP_INSTRUCTIONS'], + 'CPPDEFINES' : ['USE_EABI_HARDFLOAT=0'], + 'vfp3:on': { + 'CPPDEFINES' : ['CAN_USE_VFP_INSTRUCTIONS'] + }, 'simulator:none': { 'CCFLAGS': ['-mfloat-abi=softfp'], } }, 'armeabi:hard' : { - 'CPPDEFINES' : ['USE_EABI_HARDFLOAT=1', 'CAN_USE_VFP_INSTRUCTIONS'], + 'CPPDEFINES' : ['USE_EABI_HARDFLOAT=1'], + 'vfp3:on': { + 'CPPDEFINES' : ['CAN_USE_VFP_INSTRUCTIONS'] + }, 'simulator:none': { 'CCFLAGS': ['-mfloat-abi=hard'], } @@ -436,7 +442,7 @@ }, 'arch:x64': { 'CPPDEFINES': ['V8_TARGET_ARCH_X64'], - 'LINKFLAGS': ['/STACK:2091752'] + 'LINKFLAGS': ['/STACK:2097152'] }, } } @@ -496,7 +502,10 @@ } }, 'armeabi:hard' : { - 'CPPDEFINES' : ['USE_EABI_HARDFLOAT=1', 'CAN_USE_VFP_INSTRUCTIONS'], + 'CPPDEFINES' : ['USE_EABI_HARDFLOAT=1'], + 'vfp3:on': { + 'CPPDEFINES' : ['CAN_USE_VFP_INSTRUCTIONS'] + }, 'simulator:none': { 'CCFLAGS': ['-mfloat-abi=hard'], } @@ -601,7 +610,7 @@ }, 'arch:x64': { 'CPPDEFINES': ['V8_TARGET_ARCH_X64', 'WIN32'], - 'LINKFLAGS': ['/MACHINE:X64', '/STACK:2091752'] + 'LINKFLAGS': ['/MACHINE:X64', '/STACK:2097152'] }, 'mode:debug': { 'CCFLAGS': ['/Od'], @@ -756,7 +765,7 @@ }, 'arch:x64': { 'CPPDEFINES': ['V8_TARGET_ARCH_X64', 'WIN32'], - 'LINKFLAGS': ['/MACHINE:X64', '/STACK:2091752'] + 'LINKFLAGS': ['/MACHINE:X64', '/STACK:2097152'] }, 'mode:debug': { 'CCFLAGS': ['/Od'], @@ -822,6 +831,57 @@ 'msvc': { 'all': { 'LIBS': ['winmm', 'ws2_32'] + }, + 'verbose:off': { + 'CCFLAGS': ['/nologo'], + 'LINKFLAGS': ['/NOLOGO'] + }, + 'verbose:on': { + 'LINKFLAGS': ['/VERBOSE'] + }, + 'prof:on': { + 'LINKFLAGS': ['/MAP'] + }, + 'mode:release': { + 'CCFLAGS': ['/O2'], + 'LINKFLAGS': ['/OPT:REF', '/OPT:ICF'], + 'msvcrt:static': { + 'CCFLAGS': ['/MT'] + }, + 'msvcrt:shared': { + 'CCFLAGS': ['/MD'] + }, + 'msvcltcg:on': { + 'CCFLAGS': ['/GL'], + 'pgo:off': { + 'LINKFLAGS': ['/LTCG'], + }, + }, + 'pgo:instrument': { + 'LINKFLAGS': ['/LTCG:PGI'] + }, + 'pgo:optimize': { + 'LINKFLAGS': ['/LTCG:PGO'] + } + }, + 'arch:ia32': { + 'CPPDEFINES': ['V8_TARGET_ARCH_IA32', 'WIN32'], + 'LINKFLAGS': ['/MACHINE:X86'] + }, + 'arch:x64': { + 'CPPDEFINES': ['V8_TARGET_ARCH_X64', 'WIN32'], + 'LINKFLAGS': ['/MACHINE:X64', '/STACK:2097152'] + }, + 'mode:debug': { + 'CCFLAGS': ['/Od'], + 'LINKFLAGS': ['/DEBUG'], + 'CPPDEFINES': ['DEBUG'], + 'msvcrt:static': { + 'CCFLAGS': ['/MTd'] + }, + 'msvcrt:shared': { + 'CCFLAGS': ['/MDd'] + } } } } @@ -1039,6 +1099,12 @@ 'default': 'off', 'help': 'compress startup data (snapshot) [Linux only]' }, + 'vfp3': { + 'values': ['on', 'off'], + 'default': 'on', + 'help': 'use vfp3 instructions when building the snapshot [Arm only]' + }, + } ALL_OPTIONS = dict(PLATFORM_OPTIONS, **SIMPLE_OPTIONS) @@ -1343,10 +1409,12 @@ env['SONAME'] = soname # Build the object files by invoking SCons recursively. + d8_env = Environment(tools=tools) + d8_env.Replace(**context.flags['d8']) (object_files, shell_files, mksnapshot, preparser_files) = env.SConscript( join('src', 'SConscript'), build_dir=join('obj', target_id), - exports='context tools', + exports='context tools d8_env', duplicate=False ) @@ -1375,8 +1443,6 @@ context.library_targets.append(library) context.library_targets.append(preparser_library) - d8_env = Environment(tools=tools) - d8_env.Replace(**context.flags['d8']) context.ApplyEnvOverrides(d8_env) if context.options['library'] == 'static': shell = d8_env.Program('d8' + suffix, object_files + shell_files) diff -Nru libv8-3.4.14.21/src/api.cc libv8-3.5.10.24/src/api.cc --- libv8-3.4.14.21/src/api.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/api.cc 2011-10-14 09:24:03.000000000 +0000 @@ -35,6 +35,7 @@ #include "debug.h" #include "deoptimizer.h" #include "execution.h" +#include "flags.h" #include "global-handles.h" #include "heap-profiler.h" #include "messages.h" @@ -84,7 +85,7 @@ if (has_pending_exception) { \ if (handle_scope_implementer->CallDepthIsZero() && \ (isolate)->is_out_of_memory()) { \ - if (!handle_scope_implementer->ignore_out_of_memory()) \ + if (!(isolate)->ignore_out_of_memory()) \ i::V8::FatalProcessOutOfMemory(NULL); \ } \ bool call_depth_is_zero = handle_scope_implementer->CallDepthIsZero(); \ @@ -877,7 +878,6 @@ i::Handle info) { info->set_tag(i::Smi::FromInt(Consts::FUNCTION_TEMPLATE)); info->set_flag(0); - info->set_prototype_attributes(i::Smi::FromInt(v8::None)); } @@ -1100,14 +1100,13 @@ } -void FunctionTemplate::SetPrototypeAttributes(int attributes) { +void FunctionTemplate::ReadOnlyPrototype() { i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate(); if (IsDeadCheck(isolate, "v8::FunctionTemplate::SetPrototypeAttributes()")) { return; } ENTER_V8(isolate); - Utils::OpenHandle(this)->set_prototype_attributes( - i::Smi::FromInt(attributes)); + Utils::OpenHandle(this)->set_read_only_prototype(true); } @@ -1407,7 +1406,7 @@ ScriptData* ScriptData::PreCompile(const char* input, int length) { i::Utf8ToUC16CharacterStream stream( reinterpret_cast(input), length); - return i::ParserApi::PreParse(&stream, NULL); + return i::ParserApi::PreParse(&stream, NULL, i::FLAG_harmony_block_scoping); } @@ -1416,10 +1415,10 @@ if (str->IsExternalTwoByteString()) { i::ExternalTwoByteStringUC16CharacterStream stream( i::Handle::cast(str), 0, str->length()); - return i::ParserApi::PreParse(&stream, NULL); + return i::ParserApi::PreParse(&stream, NULL, i::FLAG_harmony_block_scoping); } else { i::GenericStringUC16CharacterStream stream(str, 0, str->length()); - return i::ParserApi::PreParse(&stream, NULL); + return i::ParserApi::PreParse(&stream, NULL, i::FLAG_harmony_block_scoping); } } @@ -3165,10 +3164,9 @@ i::Object* constructor = object->map()->constructor(); i::JSFunction* function; if (!constructor->IsJSFunction()) { - // API functions have null as a constructor, + // Functions have null as a constructor, // but any JSFunction knows its context immediately. - ASSERT(object->IsJSFunction() && - i::JSFunction::cast(object)->shared()->IsApiFunction()); + ASSERT(object->IsJSFunction()); function = i::JSFunction::cast(object); } else { function = i::JSFunction::cast(constructor); @@ -3194,39 +3192,7 @@ ENTER_V8(isolate); i::HandleScope scope(isolate); i::Handle self = Utils::OpenHandle(this); - i::Handle hidden_props_obj(i::GetHiddenProperties(self, true)); - if (!hidden_props_obj->IsJSObject()) { - // We failed to create hidden properties. That's a detached - // global proxy. - ASSERT(hidden_props_obj->IsUndefined()); - return 0; - } - i::Handle hidden_props = - i::Handle::cast(hidden_props_obj); - i::Handle hash_symbol = isolate->factory()->identity_hash_symbol(); - if (hidden_props->HasLocalProperty(*hash_symbol)) { - i::Handle hash = i::GetProperty(hidden_props, hash_symbol); - CHECK(!hash.is_null()); - CHECK(hash->IsSmi()); - return i::Smi::cast(*hash)->value(); - } - - int hash_value; - int attempts = 0; - do { - // Generate a random 32-bit hash value but limit range to fit - // within a smi. - hash_value = i::V8::Random(self->GetIsolate()) & i::Smi::kMaxValue; - attempts++; - } while (hash_value == 0 && attempts < 30); - hash_value = hash_value != 0 ? hash_value : 1; // never return 0 - CHECK(!i::SetLocalPropertyIgnoreAttributes( - hidden_props, - hash_symbol, - i::Handle(i::Smi::FromInt(hash_value)), - static_cast(None)).is_null()); - - return hash_value; + return i::GetIdentityHash(self); } @@ -3237,7 +3203,9 @@ ENTER_V8(isolate); i::HandleScope scope(isolate); i::Handle self = Utils::OpenHandle(this); - i::Handle hidden_props(i::GetHiddenProperties(self, true)); + i::Handle hidden_props(i::GetHiddenProperties( + self, + i::JSObject::ALLOW_CREATION)); i::Handle key_obj = Utils::OpenHandle(*key); i::Handle value_obj = Utils::OpenHandle(*value); EXCEPTION_PREAMBLE(isolate); @@ -3259,7 +3227,9 @@ return Local()); ENTER_V8(isolate); i::Handle self = Utils::OpenHandle(this); - i::Handle hidden_props(i::GetHiddenProperties(self, false)); + i::Handle hidden_props(i::GetHiddenProperties( + self, + i::JSObject::OMIT_CREATION)); if (hidden_props->IsUndefined()) { return v8::Local(); } @@ -3281,7 +3251,9 @@ ENTER_V8(isolate); i::HandleScope scope(isolate); i::Handle self = Utils::OpenHandle(this); - i::Handle hidden_props(i::GetHiddenProperties(self, false)); + i::Handle hidden_props(i::GetHiddenProperties( + self, + i::JSObject::OMIT_CREATION)); if (hidden_props->IsUndefined()) { return true; } @@ -3649,7 +3621,7 @@ int String::WriteUtf8(char* buffer, int capacity, int* nchars_ref, - WriteHints hints) const { + int options) const { i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate(); if (IsDeadCheck(isolate, "v8::String::WriteUtf8()")) return 0; LOG_API(isolate, "String::WriteUtf8"); @@ -3657,7 +3629,7 @@ i::StringInputBuffer& write_input_buffer = *isolate->write_input_buffer(); i::Handle str = Utils::OpenHandle(this); isolate->string_tracker()->RecordWrite(str); - if (hints & HINT_MANY_WRITES_EXPECTED) { + if (options & HINT_MANY_WRITES_EXPECTED) { // Flatten the string for efficiency. This applies whether we are // using StringInputBuffer or Get(i) to access the characters. str->TryFlatten(); @@ -3697,7 +3669,8 @@ } } if (nchars_ref != NULL) *nchars_ref = nchars; - if (i == len && (capacity == -1 || pos < capacity)) + if (!(options & NO_NULL_TERMINATION) && + (i == len && (capacity == -1 || pos < capacity))) buffer[pos++] = '\0'; return pos; } @@ -3706,7 +3679,7 @@ int String::WriteAscii(char* buffer, int start, int length, - WriteHints hints) const { + int options) const { i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate(); if (IsDeadCheck(isolate, "v8::String::WriteAscii()")) return 0; LOG_API(isolate, "String::WriteAscii"); @@ -3715,7 +3688,7 @@ ASSERT(start >= 0 && length >= -1); i::Handle str = Utils::OpenHandle(this); isolate->string_tracker()->RecordWrite(str); - if (hints & HINT_MANY_WRITES_EXPECTED) { + if (options & HINT_MANY_WRITES_EXPECTED) { // Flatten the string for efficiency. This applies whether we are // using StringInputBuffer or Get(i) to access the characters. str->TryFlatten(); @@ -3731,7 +3704,7 @@ if (c == '\0') c = ' '; buffer[i] = c; } - if (length == -1 || i < length) + if (!(options & NO_NULL_TERMINATION) && (length == -1 || i < length)) buffer[i] = '\0'; return i; } @@ -3740,7 +3713,7 @@ int String::Write(uint16_t* buffer, int start, int length, - WriteHints hints) const { + int options) const { i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate(); if (IsDeadCheck(isolate, "v8::String::Write()")) return 0; LOG_API(isolate, "String::Write"); @@ -3748,7 +3721,7 @@ ASSERT(start >= 0 && length >= -1); i::Handle str = Utils::OpenHandle(this); isolate->string_tracker()->RecordWrite(str); - if (hints & HINT_MANY_WRITES_EXPECTED) { + if (options & HINT_MANY_WRITES_EXPECTED) { // Flatten the string for efficiency. This applies whether we are // using StringInputBuffer or Get(i) to access the characters. str->TryFlatten(); @@ -3758,7 +3731,8 @@ end = str->length(); if (end < 0) return 0; i::String::WriteToFlat(*str, buffer, start, end); - if (length == -1 || end - start < length) { + if (!(options & NO_NULL_TERMINATION) && + (length == -1 || end - start < length)) { buffer[end - start] = '\0'; } return end - start; @@ -4146,7 +4120,7 @@ v8::Local Context::GetEntered() { i::Isolate* isolate = i::Isolate::Current(); - if (IsDeadCheck(isolate, "v8::Context::GetEntered()")) { + if (!EnsureInitializedForIsolate(isolate, "v8::Context::GetEntered()")) { return Local(); } i::Handle last = @@ -4287,8 +4261,8 @@ Local v8::External::Wrap(void* data) { i::Isolate* isolate = i::Isolate::Current(); STATIC_ASSERT(sizeof(data) == sizeof(i::Address)); - LOG_API(isolate, "External::Wrap"); EnsureInitializedForIsolate(isolate, "v8::External::Wrap()"); + LOG_API(isolate, "External::Wrap"); ENTER_V8(isolate); v8::Local result = CanBeEncodedAsSmi(data) @@ -4332,8 +4306,8 @@ Local v8::External::New(void* data) { STATIC_ASSERT(sizeof(data) == sizeof(i::Address)); i::Isolate* isolate = i::Isolate::Current(); - LOG_API(isolate, "External::New"); EnsureInitializedForIsolate(isolate, "v8::External::New()"); + LOG_API(isolate, "External::New"); ENTER_V8(isolate); return ExternalNewImpl(data); } @@ -4825,8 +4799,7 @@ void V8::IgnoreOutOfMemoryException() { - EnterIsolateIfNeeded()->handle_scope_implementer()->set_ignore_out_of_memory( - true); + EnterIsolateIfNeeded()->set_ignore_out_of_memory(true); } @@ -5793,6 +5766,16 @@ } +v8::Handle HeapGraphNode::GetHeapValue() const { + i::Isolate* isolate = i::Isolate::Current(); + IsDeadCheck(isolate, "v8::HeapGraphNode::GetHeapValue"); + i::Handle object = ToInternal(this)->GetHeapObject(); + return v8::Handle(!object.is_null() ? + ToApi(object) : ToApi( + isolate->factory()->undefined_value())); +} + + static i::HeapSnapshot* ToInternal(const HeapSnapshot* snapshot) { return const_cast( reinterpret_cast(snapshot)); diff -Nru libv8-3.4.14.21/src/api.h libv8-3.5.10.24/src/api.h --- libv8-3.4.14.21/src/api.h 2011-06-08 10:05:15.000000000 +0000 +++ libv8-3.5.10.24/src/api.h 2011-08-22 11:03:23.000000000 +0000 @@ -404,9 +404,12 @@ entered_contexts_(0), saved_contexts_(0), spare_(NULL), - ignore_out_of_memory_(false), call_depth_(0) { } + ~HandleScopeImplementer() { + DeleteArray(spare_); + } + // Threading support for handle data. static int ArchiveSpacePerThread(); char* RestoreThread(char* from); @@ -437,10 +440,6 @@ inline bool HasSavedContexts(); inline List* blocks() { return &blocks_; } - inline bool ignore_out_of_memory() { return ignore_out_of_memory_; } - inline void set_ignore_out_of_memory(bool value) { - ignore_out_of_memory_ = value; - } private: void ResetAfterArchive() { @@ -448,7 +447,6 @@ entered_contexts_.Initialize(0); saved_contexts_.Initialize(0); spare_ = NULL; - ignore_out_of_memory_ = false; call_depth_ = 0; } @@ -473,7 +471,6 @@ // Used as a stack to keep track of saved contexts. List saved_contexts_; Object** spare_; - bool ignore_out_of_memory_; int call_depth_; // This is only used for threading support. v8::ImplementationUtilities::HandleScopeData handle_scope_data_; diff -Nru libv8-3.4.14.21/src/apinatives.js libv8-3.5.10.24/src/apinatives.js --- libv8-3.4.14.21/src/apinatives.js 2011-06-27 13:21:41.000000000 +0000 +++ libv8-3.5.10.24/src/apinatives.js 2011-08-15 13:01:23.000000000 +0000 @@ -49,7 +49,10 @@ return InstantiateFunction(data, name); case kNewObjectTag: var Constructor = %GetTemplateField(data, kApiConstructorOffset); - var result = Constructor ? new (Instantiate(Constructor))() : {}; + // Note: Do not directly use a function template as a condition, our + // internal ToBoolean doesn't handle that! + var result = typeof Constructor === 'undefined' ? + {} : new (Instantiate(Constructor))(); ConfigureTemplateInstance(result, data); result = %ToFastProperties(result); return result; @@ -73,18 +76,19 @@ if (name) %FunctionSetName(fun, name); cache[serialNumber] = fun; var prototype = %GetTemplateField(data, kApiPrototypeTemplateOffset); - var attributes = %GetTemplateField(data, kApiPrototypeAttributesOffset); - if (attributes != NONE) { - %IgnoreAttributesAndSetProperty( - fun, "prototype", - prototype ? Instantiate(prototype) : {}, - attributes); - } else { - fun.prototype = prototype ? Instantiate(prototype) : {}; + var flags = %GetTemplateField(data, kApiFlagOffset); + // Note: Do not directly use an object template as a condition, our + // internal ToBoolean doesn't handle that! + fun.prototype = typeof prototype === 'undefined' ? + {} : Instantiate(prototype); + if (flags & (1 << kReadOnlyPrototypeBit)) { + %FunctionSetReadOnlyPrototype(fun); } %SetProperty(fun.prototype, "constructor", fun, DONT_ENUM); var parent = %GetTemplateField(data, kApiParentTemplateOffset); - if (parent) { + // Note: Do not directly use a function template as a condition, our + // internal ToBoolean doesn't handle that! + if (!(typeof parent === 'undefined')) { var parent_fun = Instantiate(parent); fun.prototype.__proto__ = parent_fun.prototype; } diff -Nru libv8-3.4.14.21/src/arm/assembler-arm.cc libv8-3.5.10.24/src/arm/assembler-arm.cc --- libv8-3.4.14.21/src/arm/assembler-arm.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/arm/assembler-arm.cc 2011-08-31 09:03:56.000000000 +0000 @@ -692,11 +692,11 @@ void Assembler::next(Label* L) { ASSERT(L->is_linked()); int link = target_at(L->pos()); - if (link > 0) { - L->link_to(link); - } else { - ASSERT(link == kEndOfChain); + if (link == kEndOfChain) { L->Unuse(); + } else { + ASSERT(link >= 0); + L->link_to(link); } } diff -Nru libv8-3.4.14.21/src/arm/assembler-arm.h libv8-3.5.10.24/src/arm/assembler-arm.h --- libv8-3.4.14.21/src/arm/assembler-arm.h 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/arm/assembler-arm.h 2011-08-24 12:02:41.000000000 +0000 @@ -377,6 +377,9 @@ // immediate INLINE(explicit Operand(int32_t immediate, RelocInfo::Mode rmode = RelocInfo::NONE)); + INLINE(static Operand Zero()) { + return Operand(static_cast(0)); + } INLINE(explicit Operand(const ExternalReference& f)); explicit Operand(Handle handle); INLINE(explicit Operand(Smi* value)); diff -Nru libv8-3.4.14.21/src/arm/builtins-arm.cc libv8-3.5.10.24/src/arm/builtins-arm.cc --- libv8-3.4.14.21/src/arm/builtins-arm.cc 2011-07-04 14:01:31.000000000 +0000 +++ libv8-3.5.10.24/src/arm/builtins-arm.cc 2011-08-31 09:03:56.000000000 +0000 @@ -138,7 +138,7 @@ __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset)); // Clear the heap tag on the elements array. - ASSERT(kSmiTag == 0); + STATIC_ASSERT(kSmiTag == 0); __ sub(scratch1, scratch1, Operand(kHeapObjectTag)); // Initialize the FixedArray and fill it with holes. FixedArray length is @@ -207,7 +207,7 @@ // Allocate the JSArray object together with space for a FixedArray with the // requested number of elements. __ bind(¬_empty); - ASSERT(kSmiTagSize == 1 && kSmiTag == 0); + STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); __ mov(elements_array_end, Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize)); __ add(elements_array_end, @@ -243,7 +243,7 @@ FieldMemOperand(result, JSArray::kElementsOffset)); // Clear the heap tag on the elements array. - ASSERT(kSmiTag == 0); + STATIC_ASSERT(kSmiTag == 0); __ sub(elements_array_storage, elements_array_storage, Operand(kHeapObjectTag)); @@ -255,7 +255,7 @@ __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex); ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset); __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex)); - ASSERT(kSmiTag == 0); + STATIC_ASSERT(kSmiTag == 0); __ tst(array_size, array_size); // Length of the FixedArray is the number of pre-allocated elements if // the actual JSArray has length 0 and the size of the JSArray for non-empty @@ -272,7 +272,7 @@ // result: JSObject // elements_array_storage: elements array element storage // array_size: smi-tagged size of elements array - ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); + STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); __ add(elements_array_end, elements_array_storage, Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize)); @@ -337,14 +337,14 @@ __ bind(&argc_one_or_more); __ cmp(r0, Operand(1)); __ b(ne, &argc_two_or_more); - ASSERT(kSmiTag == 0); + STATIC_ASSERT(kSmiTag == 0); __ ldr(r2, MemOperand(sp)); // Get the argument from the stack. __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC); __ b(ne, call_generic_code); // Handle construction of an empty array of a certain size. Bail out if size // is too large to actually allocate an elements array. - ASSERT(kSmiTag == 0); + STATIC_ASSERT(kSmiTag == 0); __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize)); __ b(ge, call_generic_code); @@ -571,7 +571,7 @@ // Is it a String? __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset)); - ASSERT(kNotStringTag != 0); + STATIC_ASSERT(kNotStringTag != 0); __ tst(r3, Operand(kIsNotStringMask)); __ b(ne, &convert_argument); __ mov(argument, r0); diff -Nru libv8-3.4.14.21/src/arm/code-stubs-arm.cc libv8-3.5.10.24/src/arm/code-stubs-arm.cc --- libv8-3.4.14.21/src/arm/code-stubs-arm.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/arm/code-stubs-arm.cc 2011-08-31 09:03:56.000000000 +0000 @@ -549,7 +549,7 @@ // | s | exp | mantissa | // Check for zero. - __ cmp(int_scratch, Operand(0)); + __ cmp(int_scratch, Operand::Zero()); __ mov(dst2, int_scratch); __ mov(dst1, int_scratch); __ b(eq, &done); @@ -557,7 +557,7 @@ // Preload the sign of the value. __ and_(dst2, int_scratch, Operand(HeapNumber::kSignMask), SetCC); // Get the absolute value of the object (as an unsigned integer). - __ rsb(int_scratch, int_scratch, Operand(0), SetCC, mi); + __ rsb(int_scratch, int_scratch, Operand::Zero(), SetCC, mi); // Get mantisssa[51:20]. @@ -589,7 +589,7 @@ __ mov(scratch2, Operand(int_scratch, LSL, scratch2)); __ orr(dst2, dst2, scratch2); // Set dst1 to 0. - __ mov(dst1, Operand(0)); + __ mov(dst1, Operand::Zero()); } __ bind(&done); } @@ -657,7 +657,7 @@ // Check for 0 and -0. __ bic(scratch1, dst1, Operand(HeapNumber::kSignMask)); __ orr(scratch1, scratch1, Operand(dst2)); - __ cmp(scratch1, Operand(0)); + __ cmp(scratch1, Operand::Zero()); __ b(eq, &done); // Check that the value can be exactly represented by a 32-bit integer. @@ -730,7 +730,7 @@ // Check for 0 and -0. __ bic(dst, scratch1, Operand(HeapNumber::kSignMask)); __ orr(dst, scratch2, Operand(dst)); - __ cmp(dst, Operand(0)); + __ cmp(dst, Operand::Zero()); __ b(eq, &done); DoubleIs32BitInteger(masm, scratch1, scratch2, dst, scratch3, not_int32); @@ -747,7 +747,7 @@ // Set the sign. __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset)); __ tst(scratch1, Operand(HeapNumber::kSignMask)); - __ rsb(dst, dst, Operand(0), LeaveCC, mi); + __ rsb(dst, dst, Operand::Zero(), LeaveCC, mi); } __ bind(&done); @@ -1603,83 +1603,113 @@ } -// The stub returns zero for false, and a non-zero value for true. +// The stub expects its argument in the tos_ register and returns its result in +// it, too: zero for false, and a non-zero value for true. void ToBooleanStub::Generate(MacroAssembler* masm) { // This stub uses VFP3 instructions. CpuFeatures::Scope scope(VFP3); - Label false_result, true_result, not_string; + Label patch; const Register map = r9.is(tos_) ? r7 : r9; - // undefined -> false - __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); - __ cmp(tos_, ip); - __ b(eq, &false_result); + // undefined -> false. + CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false); - // Boolean -> its value - __ LoadRoot(ip, Heap::kFalseValueRootIndex); - __ cmp(tos_, ip); - __ b(eq, &false_result); - __ LoadRoot(ip, Heap::kTrueValueRootIndex); - __ cmp(tos_, ip); - // "tos_" is a register and contains a non-zero value. Hence we implicitly - // return true if the equal condition is satisfied. - __ Ret(eq); + // Boolean -> its value. + CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false); + CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true); + + // 'null' -> false. + CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false); + + if (types_.Contains(SMI)) { + // Smis: 0 -> false, all other -> true + __ tst(tos_, Operand(kSmiTagMask)); + // tos_ contains the correct return value already + __ Ret(eq); + } else if (types_.NeedsMap()) { + // If we need a map later and have a Smi -> patch. + __ JumpIfSmi(tos_, &patch); + } - // Smis: 0 -> false, all other -> true - __ tst(tos_, tos_); - __ b(eq, &false_result); - __ tst(tos_, Operand(kSmiTagMask)); - // "tos_" is a register and contains a non-zero value. Hence we implicitly - // return true if the not equal condition is satisfied. - __ Ret(eq); + if (types_.NeedsMap()) { + __ ldr(map, FieldMemOperand(tos_, HeapObject::kMapOffset)); + + if (types_.CanBeUndetectable()) { + __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset)); + __ tst(ip, Operand(1 << Map::kIsUndetectable)); + // Undetectable -> false. + __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, ne); + __ Ret(ne); + } + } - // 'null' -> false - __ LoadRoot(ip, Heap::kNullValueRootIndex); - __ cmp(tos_, ip); - __ b(eq, &false_result); - - // Get the map of the heap object. - __ ldr(map, FieldMemOperand(tos_, HeapObject::kMapOffset)); - - // Undetectable -> false. - __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset)); - __ tst(ip, Operand(1 << Map::kIsUndetectable)); - __ b(&false_result, ne); - - // JavaScript object -> true. - __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE); - // "tos_" is a register and contains a non-zero value. Hence we implicitly - // return true if the greater than condition is satisfied. - __ Ret(ge); + if (types_.Contains(SPEC_OBJECT)) { + // Spec object -> true. + __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE); + // tos_ contains the correct non-zero return value already. + __ Ret(ge); + } - // String value -> false iff empty. + if (types_.Contains(STRING)) { + // String value -> false iff empty. __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE); - __ b(¬_string, ge); - __ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset)); - // Return string length as boolean value, i.e. return false iff length is 0. - __ Ret(); + __ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset), lt); + __ Ret(lt); // the string length is OK as the return value + } - __ bind(¬_string); - // HeapNumber -> false iff +0, -0, or NaN. - __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); - __ b(&true_result, ne); - __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset)); - __ VFPCompareAndSetFlags(d1, 0.0); - // "tos_" is a register, and contains a non zero value by default. - // Hence we only need to overwrite "tos_" with zero to return false for - // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true. - __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO - __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN - __ Ret(); + if (types_.Contains(HEAP_NUMBER)) { + // Heap number -> false iff +0, -0, or NaN. + Label not_heap_number; + __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); + __ b(ne, ¬_heap_number); + __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset)); + __ VFPCompareAndSetFlags(d1, 0.0); + // "tos_" is a register, and contains a non zero value by default. + // Hence we only need to overwrite "tos_" with zero to return false for + // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true. + __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO + __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN + __ Ret(); + __ bind(¬_heap_number); + } - // Return 1/0 for true/false in tos_. - __ bind(&true_result); - __ mov(tos_, Operand(1, RelocInfo::NONE)); - __ Ret(); - __ bind(&false_result); - __ mov(tos_, Operand(0, RelocInfo::NONE)); - __ Ret(); + __ bind(&patch); + GenerateTypeTransition(masm); +} + + +void ToBooleanStub::CheckOddball(MacroAssembler* masm, + Type type, + Heap::RootListIndex value, + bool result) { + if (types_.Contains(type)) { + // If we see an expected oddball, return its ToBoolean value tos_. + __ LoadRoot(ip, value); + __ cmp(tos_, ip); + // The value of a root is never NULL, so we can avoid loading a non-null + // value into tos_ when we want to return 'true'. + if (!result) { + __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); + } + __ Ret(eq); + } +} + + +void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { + if (!tos_.is(r3)) { + __ mov(r3, Operand(tos_)); + } + __ mov(r2, Operand(Smi::FromInt(tos_.code()))); + __ mov(r1, Operand(Smi::FromInt(types_.ToByte()))); + __ Push(r3, r2, r1); + // Patch the caller to an appropriate specialized stub and return the + // operation result to the caller of the stub. + __ TailCallExternalReference( + ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()), + 3, + 1); } @@ -2394,7 +2424,6 @@ Register left = r1; Register right = r0; Register scratch1 = r7; - Register scratch2 = r9; // Perform combined smi check on both operands. __ orr(scratch1, left, Operand(right)); @@ -2588,7 +2617,7 @@ __ b(mi, &return_heap_number); // Check for minus zero. Return heap number for minus zero. Label not_zero; - __ cmp(scratch1, Operand(0)); + __ cmp(scratch1, Operand::Zero()); __ b(ne, ¬_zero); __ vmov(scratch2, d5.high()); __ tst(scratch2, Operand(HeapNumber::kSignMask)); @@ -3080,7 +3109,6 @@ Label no_update; Label skip_cache; - const Register heap_number_map = r5; // Call C function to calculate the result and update the cache. // Register r0 holds precalculated cache entry address; preserve @@ -3551,7 +3579,7 @@ ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate); __ mov(r5, Operand(ExternalReference(js_entry_sp))); __ ldr(r6, MemOperand(r5)); - __ cmp(r6, Operand(0)); + __ cmp(r6, Operand::Zero()); __ b(ne, &non_outermost_js); __ str(fp, MemOperand(r5)); __ mov(ip, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); @@ -3626,7 +3654,7 @@ __ pop(r5); __ cmp(r5, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); __ b(ne, &non_outermost_js_2); - __ mov(r6, Operand(0)); + __ mov(r6, Operand::Zero()); __ mov(r5, Operand(ExternalReference(js_entry_sp))); __ str(r6, MemOperand(r5)); __ bind(&non_outermost_js_2); @@ -3827,7 +3855,7 @@ __ Push(r0, r1); __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); __ LeaveInternalFrame(); - __ cmp(r0, Operand(0)); + __ cmp(r0, Operand::Zero()); __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq); __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne); __ Ret(HasArgsInRegisters() ? 0 : 2); @@ -3961,7 +3989,7 @@ FixedArray::kHeaderSize + 2 * kPointerSize; // If there are no mapped parameters, we do not need the parameter_map. __ cmp(r1, Operand(Smi::FromInt(0))); - __ mov(r9, Operand(0), LeaveCC, eq); + __ mov(r9, Operand::Zero(), LeaveCC, eq); __ mov(r9, Operand(r1, LSL, 1), LeaveCC, ne); __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne); @@ -3985,7 +4013,7 @@ __ ldr(r4, MemOperand(r8, Context::SlotOffset(Context::GLOBAL_INDEX))); __ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset)); - __ cmp(r1, Operand(0)); + __ cmp(r1, Operand::Zero()); __ ldr(r4, MemOperand(r4, kNormalOffset), eq); __ ldr(r4, MemOperand(r4, kAliasedOffset), ne); @@ -4339,6 +4367,8 @@ __ cmp(r2, Operand(r0, ASR, kSmiTagSize)); __ b(gt, &runtime); + // Reset offset for possibly sliced string. + __ mov(r9, Operand(0)); // subject: Subject string // regexp_data: RegExp data (FixedArray) // Check the representation and encoding of the subject string. @@ -4346,33 +4376,45 @@ __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset)); __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); // First check for flat string. - __ tst(r0, Operand(kIsNotStringMask | kStringRepresentationMask)); + __ and_(r1, r0, Operand(kIsNotStringMask | kStringRepresentationMask), SetCC); STATIC_ASSERT((kStringTag | kSeqStringTag) == 0); __ b(eq, &seq_string); // subject: Subject string // regexp_data: RegExp data (FixedArray) - // Check for flat cons string. + // Check for flat cons string or sliced string. // A flat cons string is a cons string where the second part is the empty // string. In that case the subject string is just the first part of the cons // string. Also in this case the first part of the cons string is known to be // a sequential string or an external string. - STATIC_ASSERT(kExternalStringTag !=0); - STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0); - __ tst(r0, Operand(kIsNotStringMask | kExternalStringTag)); - __ b(ne, &runtime); + // In the case of a sliced string its offset has to be taken into account. + Label cons_string, check_encoding; + STATIC_ASSERT(kConsStringTag < kExternalStringTag); + STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); + __ cmp(r1, Operand(kExternalStringTag)); + __ b(lt, &cons_string); + __ b(eq, &runtime); + + // String is sliced. + __ ldr(r9, FieldMemOperand(subject, SlicedString::kOffsetOffset)); + __ mov(r9, Operand(r9, ASR, kSmiTagSize)); + __ ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); + // r9: offset of sliced string, smi-tagged. + __ jmp(&check_encoding); + // String is a cons string, check whether it is flat. + __ bind(&cons_string); __ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset)); __ LoadRoot(r1, Heap::kEmptyStringRootIndex); __ cmp(r0, r1); __ b(ne, &runtime); __ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); + // Is first part of cons or parent of slice a flat string? + __ bind(&check_encoding); __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset)); __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); - // Is first part a flat string? STATIC_ASSERT(kSeqStringTag == 0); __ tst(r0, Operand(kStringRepresentationMask)); __ b(ne, &runtime); - __ bind(&seq_string); // subject: Subject string // regexp_data: RegExp data (FixedArray) @@ -4438,21 +4480,30 @@ // For arguments 4 and 3 get string length, calculate start of string data and // calculate the shift of the index (0 for ASCII and 1 for two byte). - __ ldr(r0, FieldMemOperand(subject, String::kLengthOffset)); - __ mov(r0, Operand(r0, ASR, kSmiTagSize)); STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize); - __ add(r9, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); + __ add(r8, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); __ eor(r3, r3, Operand(1)); - // Argument 4 (r3): End of string data - // Argument 3 (r2): Start of string data + // Load the length from the original subject string from the previous stack + // frame. Therefore we have to use fp, which points exactly to two pointer + // sizes below the previous sp. (Because creating a new stack frame pushes + // the previous fp onto the stack and moves up sp by 2 * kPointerSize.) + __ ldr(r0, MemOperand(fp, kSubjectOffset + 2 * kPointerSize)); + // If slice offset is not 0, load the length from the original sliced string. + // Argument 4, r3: End of string data + // Argument 3, r2: Start of string data + // Prepare start and end index of the input. + __ add(r9, r8, Operand(r9, LSL, r3)); __ add(r2, r9, Operand(r1, LSL, r3)); - __ add(r3, r9, Operand(r0, LSL, r3)); + + __ ldr(r8, FieldMemOperand(r0, String::kLengthOffset)); + __ mov(r8, Operand(r8, ASR, kSmiTagSize)); + __ add(r3, r9, Operand(r8, LSL, r3)); // Argument 2 (r1): Previous index. // Already there // Argument 1 (r0): Subject string. - __ mov(r0, subject); + // Already there // Locate the code entry and call it. __ add(r7, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); @@ -4469,12 +4520,12 @@ // Check the result. Label success; - __ cmp(r0, Operand(NativeRegExpMacroAssembler::SUCCESS)); + __ cmp(subject, Operand(NativeRegExpMacroAssembler::SUCCESS)); __ b(eq, &success); Label failure; - __ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE)); + __ cmp(subject, Operand(NativeRegExpMacroAssembler::FAILURE)); __ b(eq, &failure); - __ cmp(r0, Operand(NativeRegExpMacroAssembler::EXCEPTION)); + __ cmp(subject, Operand(NativeRegExpMacroAssembler::EXCEPTION)); // If not exception it can only be retry. Handle that in the runtime system. __ b(ne, &runtime); // Result must now be exception. If there is no pending exception already a @@ -4486,18 +4537,18 @@ __ mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address, isolate))); __ ldr(r0, MemOperand(r2, 0)); - __ cmp(r0, r1); + __ cmp(subject, r1); __ b(eq, &runtime); __ str(r1, MemOperand(r2, 0)); // Clear pending exception. // Check if the exception is a termination. If so, throw as uncatchable. __ LoadRoot(ip, Heap::kTerminationExceptionRootIndex); - __ cmp(r0, ip); + __ cmp(subject, ip); Label termination_exception; __ b(eq, &termination_exception); - __ Throw(r0); // Expects thrown value in r0. + __ Throw(subject); // Expects thrown value in r0. __ bind(&termination_exception); __ ThrowUncatchable(TERMINATION, r0); // Expects thrown value in r0. @@ -4775,6 +4826,7 @@ Label flat_string; Label ascii_string; Label got_char_code; + Label sliced_string; // If the receiver is a smi trigger the non-string case. __ JumpIfSmi(object_, receiver_not_string_); @@ -4804,7 +4856,11 @@ __ b(eq, &flat_string); // Handle non-flat strings. - __ tst(result_, Operand(kIsConsStringMask)); + __ and_(result_, result_, Operand(kStringRepresentationMask)); + STATIC_ASSERT(kConsStringTag < kExternalStringTag); + STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); + __ cmp(result_, Operand(kExternalStringTag)); + __ b(gt, &sliced_string); __ b(eq, &call_runtime_); // ConsString. @@ -4812,15 +4868,26 @@ // this is really a flat string in a cons string). If that is not // the case we would rather go to the runtime system now to flatten // the string. + Label assure_seq_string; __ ldr(result_, FieldMemOperand(object_, ConsString::kSecondOffset)); __ LoadRoot(ip, Heap::kEmptyStringRootIndex); __ cmp(result_, Operand(ip)); __ b(ne, &call_runtime_); // Get the first of the two strings and load its instance type. __ ldr(object_, FieldMemOperand(object_, ConsString::kFirstOffset)); + __ jmp(&assure_seq_string); + + // SlicedString, unpack and add offset. + __ bind(&sliced_string); + __ ldr(result_, FieldMemOperand(object_, SlicedString::kOffsetOffset)); + __ add(scratch_, scratch_, result_); + __ ldr(object_, FieldMemOperand(object_, SlicedString::kParentOffset)); + + // Assure that we are dealing with a sequential string. Go to runtime if not. + __ bind(&assure_seq_string); __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); - // If the first cons component is also non-flat, then go to runtime. + // Check that parent is not an external string. Go to runtime otherwise. STATIC_ASSERT(kSeqStringTag == 0); __ tst(result_, Operand(kStringRepresentationMask)); __ b(ne, &call_runtime_); @@ -5400,10 +5467,17 @@ // Check bounds and smi-ness. Register to = r6; Register from = r7; + + if (FLAG_string_slices) { + __ nop(0); // Jumping as first instruction would crash the code generation. + __ jmp(&runtime); + } + __ Ldrd(to, from, MemOperand(sp, kToOffset)); STATIC_ASSERT(kFromOffset == kToOffset + 4); STATIC_ASSERT(kSmiTag == 0); STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); + // I.e., arithmetic shift right by one un-smi-tags. __ mov(r2, Operand(to, ASR, 1), SetCC); __ mov(r3, Operand(from, ASR, 1), SetCC, cc); @@ -5412,7 +5486,6 @@ __ b(mi, &runtime); // From is negative. // Both to and from are smis. - __ sub(r2, r2, Operand(r3), SetCC); __ b(mi, &runtime); // Fail if from > to. // Special handling of sub-strings of length 1 and 2. One character strings @@ -5667,7 +5740,7 @@ Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); __ add(left, left, Operand(scratch1)); __ add(right, right, Operand(scratch1)); - __ rsb(length, length, Operand(0)); + __ rsb(length, length, Operand::Zero()); Register index = length; // index = -length; // Compare loop. @@ -6285,12 +6358,8 @@ void DirectCEntryStub::GenerateCall(MacroAssembler* masm, ExternalReference function) { - __ mov(lr, Operand(reinterpret_cast(GetCode().location()), - RelocInfo::CODE_TARGET)); __ mov(r2, Operand(function)); - // Push return address (accessible to GC through exit frame pc). - __ str(pc, MemOperand(sp, 0)); - __ Jump(r2); // Call the api function. + GenerateCall(masm, r2); } @@ -6299,8 +6368,14 @@ __ mov(lr, Operand(reinterpret_cast(GetCode().location()), RelocInfo::CODE_TARGET)); // Push return address (accessible to GC through exit frame pc). - __ str(pc, MemOperand(sp, 0)); + // Note that using pc with str is deprecated. + Label start; + __ bind(&start); + __ add(ip, pc, Operand(Assembler::kInstrSize)); + __ str(ip, MemOperand(sp, 0)); __ Jump(target); // Call the C++ function. + ASSERT_EQ(Assembler::kInstrSize + Assembler::kPcLoadDelta, + masm->SizeOfCodeGeneratedSince(&start)); } @@ -6523,7 +6598,7 @@ // treated as a lookup success. For positive lookup probing failure // should be treated as lookup failure. if (mode_ == POSITIVE_LOOKUP) { - __ mov(result, Operand(0)); + __ mov(result, Operand::Zero()); __ Ret(); } @@ -6532,7 +6607,7 @@ __ Ret(); __ bind(¬_in_dictionary); - __ mov(result, Operand(0)); + __ mov(result, Operand::Zero()); __ Ret(); } diff -Nru libv8-3.4.14.21/src/arm/deoptimizer-arm.cc libv8-3.5.10.24/src/arm/deoptimizer-arm.cc --- libv8-3.4.14.21/src/arm/deoptimizer-arm.cc 2011-08-12 12:18:43.000000000 +0000 +++ libv8-3.5.10.24/src/arm/deoptimizer-arm.cc 2011-08-15 13:01:23.000000000 +0000 @@ -35,7 +35,7 @@ namespace v8 { namespace internal { -int Deoptimizer::table_entry_size_ = 16; +const int Deoptimizer::table_entry_size_ = 16; int Deoptimizer::patch_size() { @@ -533,8 +533,6 @@ output_frame->SetContinuation( reinterpret_cast(continuation->entry())); } - - if (output_count_ - 1 == frame_index) iterator->Done(); } @@ -595,6 +593,8 @@ __ vstm(db_w, sp, first, last); // Push all 16 registers (needed to populate FrameDescription::registers_). + // TODO(1588) Note that using pc with stm is deprecated, so we should perhaps + // handle this a bit differently. __ stm(db_w, sp, restored_regs | sp.bit() | lr.bit() | pc.bit()); const int kSavedRegistersAreaSize = diff -Nru libv8-3.4.14.21/src/arm/disasm-arm.cc libv8-3.5.10.24/src/arm/disasm-arm.cc --- libv8-3.4.14.21/src/arm/disasm-arm.cc 2011-05-16 12:14:13.000000000 +0000 +++ libv8-3.5.10.24/src/arm/disasm-arm.cc 2011-08-10 11:27:35.000000000 +0000 @@ -200,7 +200,7 @@ // These shift names are defined in a way to match the native disassembler // formatting. See for example the command "objdump -d ". -static const char* shift_names[kNumberOfShifts] = { +static const char* const shift_names[kNumberOfShifts] = { "lsl", "lsr", "asr", "ror" }; diff -Nru libv8-3.4.14.21/src/arm/frames-arm.h libv8-3.5.10.24/src/arm/frames-arm.h --- libv8-3.4.14.21/src/arm/frames-arm.h 2011-06-23 06:29:21.000000000 +0000 +++ libv8-3.5.10.24/src/arm/frames-arm.h 2011-08-15 13:01:23.000000000 +0000 @@ -1,4 +1,4 @@ -// Copyright 2006-2008 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -93,10 +93,11 @@ class StackHandlerConstants : public AllStatic { public: - static const int kNextOffset = 0 * kPointerSize; - static const int kStateOffset = 1 * kPointerSize; - static const int kFPOffset = 2 * kPointerSize; - static const int kPCOffset = 3 * kPointerSize; + static const int kNextOffset = 0 * kPointerSize; + static const int kStateOffset = 1 * kPointerSize; + static const int kContextOffset = 2 * kPointerSize; + static const int kFPOffset = 3 * kPointerSize; + static const int kPCOffset = 4 * kPointerSize; static const int kSize = kPCOffset + kPointerSize; }; diff -Nru libv8-3.4.14.21/src/arm/full-codegen-arm.cc libv8-3.5.10.24/src/arm/full-codegen-arm.cc --- libv8-3.4.14.21/src/arm/full-codegen-arm.cc 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/arm/full-codegen-arm.cc 2011-09-07 12:19:49.000000000 +0000 @@ -47,7 +47,6 @@ static unsigned GetPropertyId(Property* property) { - if (property->is_synthetic()) return AstNode::kNoNumber; return property->id(); } @@ -694,104 +693,73 @@ Comment cmnt(masm_, "[ Declaration"); ASSERT(variable != NULL); // Must have been resolved. Slot* slot = variable->AsSlot(); - Property* prop = variable->AsProperty(); - - if (slot != NULL) { - switch (slot->type()) { - case Slot::PARAMETER: - case Slot::LOCAL: - if (mode == Variable::CONST) { - __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); - __ str(ip, MemOperand(fp, SlotOffset(slot))); - } else if (function != NULL) { - VisitForAccumulatorValue(function); - __ str(result_register(), MemOperand(fp, SlotOffset(slot))); - } - break; - - case Slot::CONTEXT: - // We bypass the general EmitSlotSearch because we know more about - // this specific context. - - // The variable in the decl always resides in the current function - // context. - ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); - if (FLAG_debug_code) { - // Check that we're not inside a with or catch context. - __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); - __ CompareRoot(r1, Heap::kWithContextMapRootIndex); - __ Check(ne, "Declaration in with context."); - __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); - __ Check(ne, "Declaration in catch context."); - } - if (mode == Variable::CONST) { - __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); - __ str(ip, ContextOperand(cp, slot->index())); - // No write barrier since the_hole_value is in old space. - } else if (function != NULL) { - VisitForAccumulatorValue(function); - __ str(result_register(), ContextOperand(cp, slot->index())); - int offset = Context::SlotOffset(slot->index()); - // We know that we have written a function, which is not a smi. - __ mov(r1, Operand(cp)); - __ RecordWrite(r1, Operand(offset), r2, result_register()); - } - break; - - case Slot::LOOKUP: { - __ mov(r2, Operand(variable->name())); - // Declaration nodes are always introduced in one of two modes. - ASSERT(mode == Variable::VAR || - mode == Variable::CONST); - PropertyAttributes attr = - (mode == Variable::VAR) ? NONE : READ_ONLY; - __ mov(r1, Operand(Smi::FromInt(attr))); - // Push initial value, if any. - // Note: For variables we must not push an initial value (such as - // 'undefined') because we may have a (legal) redeclaration and we - // must not destroy the current value. - if (mode == Variable::CONST) { - __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); - __ Push(cp, r2, r1, r0); - } else if (function != NULL) { - __ Push(cp, r2, r1); - // Push initial value for function declaration. - VisitForStackValue(function); - } else { - __ mov(r0, Operand(Smi::FromInt(0))); // No initial value! - __ Push(cp, r2, r1, r0); - } - __ CallRuntime(Runtime::kDeclareContextSlot, 4); - break; + ASSERT(slot != NULL); + switch (slot->type()) { + case Slot::PARAMETER: + case Slot::LOCAL: + if (function != NULL) { + VisitForAccumulatorValue(function); + __ str(result_register(), MemOperand(fp, SlotOffset(slot))); + } else if (mode == Variable::CONST || mode == Variable::LET) { + __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); + __ str(ip, MemOperand(fp, SlotOffset(slot))); } - } + break; - } else if (prop != NULL) { - // A const declaration aliasing a parameter is an illegal redeclaration. - ASSERT(mode != Variable::CONST); - if (function != NULL) { - // We are declaring a function that rewrites to a property. - // Use (keyed) IC to set the initial value. We cannot visit the - // rewrite because it's shared and we risk recording duplicate AST - // IDs for bailouts from optimized code. - ASSERT(prop->obj()->AsVariableProxy() != NULL); - { AccumulatorValueContext for_object(this); - EmitVariableLoad(prop->obj()->AsVariableProxy()); + case Slot::CONTEXT: + // We bypass the general EmitSlotSearch because we know more about + // this specific context. + + // The variable in the decl always resides in the current function + // context. + ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); + if (FLAG_debug_code) { + // Check that we're not inside a with or catch context. + __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); + __ CompareRoot(r1, Heap::kWithContextMapRootIndex); + __ Check(ne, "Declaration in with context."); + __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); + __ Check(ne, "Declaration in catch context."); } + if (function != NULL) { + VisitForAccumulatorValue(function); + __ str(result_register(), ContextOperand(cp, slot->index())); + int offset = Context::SlotOffset(slot->index()); + // We know that we have written a function, which is not a smi. + __ mov(r1, Operand(cp)); + __ RecordWrite(r1, Operand(offset), r2, result_register()); + } else if (mode == Variable::CONST || mode == Variable::LET) { + __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); + __ str(ip, ContextOperand(cp, slot->index())); + // No write barrier since the_hole_value is in old space. + } + break; - __ push(r0); - VisitForAccumulatorValue(function); - __ pop(r2); - - ASSERT(prop->key()->AsLiteral() != NULL && - prop->key()->AsLiteral()->handle()->IsSmi()); - __ mov(r1, Operand(prop->key()->AsLiteral()->handle())); - - Handle ic = is_strict_mode() - ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() - : isolate()->builtins()->KeyedStoreIC_Initialize(); - __ Call(ic); - // Value in r0 is ignored (declarations are statements). + case Slot::LOOKUP: { + __ mov(r2, Operand(variable->name())); + // Declaration nodes are always introduced in one of two modes. + ASSERT(mode == Variable::VAR || + mode == Variable::CONST || + mode == Variable::LET); + PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE; + __ mov(r1, Operand(Smi::FromInt(attr))); + // Push initial value, if any. + // Note: For variables we must not push an initial value (such as + // 'undefined') because we may have a (legal) redeclaration and we + // must not destroy the current value. + if (function != NULL) { + __ Push(cp, r2, r1); + // Push initial value for function declaration. + VisitForStackValue(function); + } else if (mode == Variable::CONST || mode == Variable::LET) { + __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); + __ Push(cp, r2, r1, r0); + } else { + __ mov(r0, Operand(Smi::FromInt(0))); // No initial value! + __ Push(cp, r2, r1, r0); + } + __ CallRuntime(Runtime::kDeclareContextSlot, 4); + break; } } } @@ -878,7 +846,7 @@ __ bind(&next_test); __ Drop(1); // Switch value is no longer needed. if (default_clause == NULL) { - __ b(nested_statement.break_target()); + __ b(nested_statement.break_label()); } else { __ b(default_clause->body_target()); } @@ -892,7 +860,7 @@ VisitStatements(clause->statements()); } - __ bind(nested_statement.break_target()); + __ bind(nested_statement.break_label()); PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); } @@ -1023,7 +991,7 @@ // Load the current count to r0, load the length to r1. __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize)); __ cmp(r0, r1); // Compare to the array length. - __ b(hs, loop_statement.break_target()); + __ b(hs, loop_statement.break_label()); // Get the current entry of the array into register r3. __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); @@ -1049,7 +1017,7 @@ __ push(r3); // Current entry. __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); __ mov(r3, Operand(r0), SetCC); - __ b(eq, loop_statement.continue_target()); + __ b(eq, loop_statement.continue_label()); // Update the 'each' property or variable from the possibly filtered // entry in register r3. @@ -1065,7 +1033,7 @@ // Generate code for the going to the next element by incrementing // the index (smi) stored on top of the stack. - __ bind(loop_statement.continue_target()); + __ bind(loop_statement.continue_label()); __ pop(r0); __ add(r0, r0, Operand(Smi::FromInt(1))); __ push(r0); @@ -1074,7 +1042,7 @@ __ b(&loop); // Remove the pointers stored on the stack. - __ bind(loop_statement.break_target()); + __ bind(loop_statement.break_label()); __ Drop(5); // Exit and decrement the loop depth. @@ -1311,6 +1279,20 @@ __ cmp(r0, ip); __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); context()->Plug(r0); + } else if (var->mode() == Variable::LET) { + // Let bindings may be the hole value if they have not been initialized. + // Throw a type error in this case. + Label done; + MemOperand slot_operand = EmitSlotSearch(slot, r0); + __ ldr(r0, slot_operand); + __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); + __ cmp(r0, ip); + __ b(ne, &done); + __ mov(r0, Operand(var->name())); + __ push(r0); + __ CallRuntime(Runtime::kThrowReferenceError, 1); + __ bind(&done); + context()->Plug(r0); } else { context()->Plug(slot); } @@ -1891,6 +1873,59 @@ } __ bind(&skip); + } else if (var->mode() == Variable::LET && op != Token::INIT_LET) { + // Perform the assignment for non-const variables. Const assignments + // are simply skipped. + Slot* slot = var->AsSlot(); + switch (slot->type()) { + case Slot::PARAMETER: + case Slot::LOCAL: { + Label assign; + // Check for an initialized let binding. + __ ldr(r1, MemOperand(fp, SlotOffset(slot))); + __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); + __ cmp(r1, ip); + __ b(ne, &assign); + __ mov(r1, Operand(var->name())); + __ push(r1); + __ CallRuntime(Runtime::kThrowReferenceError, 1); + // Perform the assignment. + __ bind(&assign); + __ str(result_register(), MemOperand(fp, SlotOffset(slot))); + break; + } + case Slot::CONTEXT: { + // Let variables may be the hole value if they have not been + // initialized. Throw a type error in this case. + Label assign; + MemOperand target = EmitSlotSearch(slot, r1); + // Check for an initialized let binding. + __ ldr(r3, target); + __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); + __ cmp(r3, ip); + __ b(ne, &assign); + __ mov(r3, Operand(var->name())); + __ push(r3); + __ CallRuntime(Runtime::kThrowReferenceError, 1); + // Perform the assignment. + __ bind(&assign); + __ str(result_register(), target); + // RecordWrite may destroy all its register arguments. + __ mov(r3, result_register()); + int offset = Context::SlotOffset(slot->index()); + __ RecordWrite(r1, Operand(offset), r2, r3); + break; + } + case Slot::LOOKUP: + // Call the runtime for the assignment. + __ push(r0); // Value. + __ mov(r1, Operand(slot->var()->name())); + __ mov(r0, Operand(Smi::FromInt(strict_mode_flag()))); + __ Push(cp, r1, r0); // Context, name, strict mode. + __ CallRuntime(Runtime::kStoreContextSlot, 4); + break; + } + } else if (var->mode() != Variable::CONST) { // Perform the assignment for non-const variables. Const assignments // are simply skipped. @@ -2272,36 +2307,10 @@ EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); } else { // Call to a keyed property. - // For a synthetic property use keyed load IC followed by function call, - // for a regular property use EmitKeyedCallWithIC. - if (prop->is_synthetic()) { - // Do not visit the object and key subexpressions (they are shared - // by all occurrences of the same rewritten parameter). - ASSERT(prop->obj()->AsVariableProxy() != NULL); - ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL); - Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot(); - MemOperand operand = EmitSlotSearch(slot, r1); - __ ldr(r1, operand); - - ASSERT(prop->key()->AsLiteral() != NULL); - ASSERT(prop->key()->AsLiteral()->handle()->IsSmi()); - __ mov(r0, Operand(prop->key()->AsLiteral()->handle())); - - // Record source code position for IC call. - SetSourcePosition(prop->position()); - - Handle ic = isolate()->builtins()->KeyedLoadIC_Initialize(); - __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop)); - __ ldr(r1, GlobalObjectOperand()); - __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); - __ Push(r0, r1); // Function, receiver. - EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); - } else { - { PreservePositionScope scope(masm()->positions_recorder()); - VisitForStackValue(prop->obj()); - } - EmitKeyedCallWithIC(expr, prop->key()); + { PreservePositionScope scope(masm()->positions_recorder()); + VisitForStackValue(prop->obj()); } + EmitKeyedCallWithIC(expr, prop->key()); } } else { { PreservePositionScope scope(masm()->positions_recorder()); @@ -2753,7 +2762,7 @@ // Objects with a non-function constructor have class 'Object'. __ bind(&non_function_constructor); - __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex); + __ LoadRoot(r0, Heap::kObject_symbolRootIndex); __ jmp(&done); // Non-JS objects have class null. @@ -3252,7 +3261,7 @@ Label done, not_found; // tmp now holds finger offset as a smi. - ASSERT(kSmiTag == 0 && kSmiTagSize == 1); + STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); // r2 now holds finger offset as a smi. __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); @@ -3580,39 +3589,6 @@ } -void FullCodeGenerator::EmitIsNativeOrStrictMode(ZoneList* args) { - ASSERT(args->length() == 1); - - // Load the function into r0. - VisitForAccumulatorValue(args->at(0)); - - // Prepare for the test. - Label materialize_true, materialize_false; - Label* if_true = NULL; - Label* if_false = NULL; - Label* fall_through = NULL; - context()->PrepareTest(&materialize_true, &materialize_false, - &if_true, &if_false, &fall_through); - - // Test for strict mode function. - __ ldr(r1, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); - __ ldr(r1, FieldMemOperand(r1, SharedFunctionInfo::kCompilerHintsOffset)); - __ tst(r1, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + - kSmiTagSize))); - __ b(ne, if_true); - - // Test for native function. - __ tst(r1, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); - __ b(ne, if_true); - - // Not native or strict-mode function. - __ b(if_false); - - PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); - context()->Plug(if_true, if_false); -} - - void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { Handle name = expr->name(); if (name->length() > 0 && name->Get(0) == '_') { @@ -3664,18 +3640,12 @@ Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); if (prop != NULL) { - if (prop->is_synthetic()) { - // Result of deleting parameters is false, even when they rewrite - // to accesses on the arguments object. - context()->Plug(false); - } else { - VisitForStackValue(prop->obj()); - VisitForStackValue(prop->key()); - __ mov(r1, Operand(Smi::FromInt(strict_mode_flag()))); - __ push(r1); - __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); - context()->Plug(r0); - } + VisitForStackValue(prop->obj()); + VisitForStackValue(prop->key()); + __ mov(r1, Operand(Smi::FromInt(strict_mode_flag()))); + __ push(r1); + __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); + context()->Plug(r0); } else if (var != NULL) { // Delete of an unqualified identifier is disallowed in strict mode // but "delete this" is. @@ -4030,6 +4000,10 @@ __ b(eq, if_true); __ CompareRoot(r0, Heap::kFalseValueRootIndex); Split(eq, if_true, if_false, fall_through); + } else if (FLAG_harmony_typeof && + check->Equals(isolate()->heap()->null_symbol())) { + __ CompareRoot(r0, Heap::kNullValueRootIndex); + Split(eq, if_true, if_false, fall_through); } else if (check->Equals(isolate()->heap()->undefined_symbol())) { __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); __ b(eq, if_true); @@ -4047,8 +4021,10 @@ } else if (check->Equals(isolate()->heap()->object_symbol())) { __ JumpIfSmi(r0, if_false); - __ CompareRoot(r0, Heap::kNullValueRootIndex); - __ b(eq, if_true); + if (!FLAG_harmony_typeof) { + __ CompareRoot(r0, Heap::kNullValueRootIndex); + __ b(eq, if_true); + } // Check for JS objects => true. __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); __ b(lt, if_false); @@ -4123,11 +4099,8 @@ default: { VisitForAccumulatorValue(expr->right()); Condition cond = eq; - bool strict = false; switch (op) { case Token::EQ_STRICT: - strict = true; - // Fall through case Token::EQ: cond = eq; __ pop(r1); @@ -4276,7 +4249,7 @@ // Cook return address in link register to stack (smi encoded Code* delta) __ sub(r1, lr, Operand(masm_->CodeObject())); ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); - ASSERT_EQ(0, kSmiTag); + STATIC_ASSERT(kSmiTag == 0); __ add(r1, r1, Operand(r1)); // Convert to smi. __ push(r1); } @@ -4296,6 +4269,34 @@ #undef __ +#define __ ACCESS_MASM(masm()) + +FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( + int* stack_depth, + int* context_length) { + // The macros used here must preserve the result register. + + // Because the handler block contains the context of the finally + // code, we can restore it directly from there for the finally code + // rather than iteratively unwinding contexts via their previous + // links. + __ Drop(*stack_depth); // Down to the handler block. + if (*context_length > 0) { + // Restore the context to its dedicated register and the stack. + __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); + __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); + } + __ PopTryHandler(); + __ bl(finally_entry_); + + *stack_depth = 0; + *context_length = 0; + return previous_; +} + + +#undef __ + } } // namespace v8::internal #endif // V8_TARGET_ARCH_ARM diff -Nru libv8-3.4.14.21/src/arm/ic-arm.cc libv8-3.5.10.24/src/arm/ic-arm.cc --- libv8-3.4.14.21/src/arm/ic-arm.cc 2011-07-11 11:41:22.000000000 +0000 +++ libv8-3.5.10.24/src/arm/ic-arm.cc 2011-08-31 09:03:56.000000000 +0000 @@ -337,7 +337,7 @@ // Fast case: Do the load. __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); // The key is a smi. - ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); + STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); __ ldr(scratch2, MemOperand(scratch1, key, LSL, kPointerSizeLog2 - kSmiTagSize)); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); @@ -370,7 +370,7 @@ // Is the string a symbol? // map: key map __ ldrb(hash, FieldMemOperand(map, Map::kInstanceTypeOffset)); - ASSERT(kSymbolTag != 0); + STATIC_ASSERT(kSymbolTag != 0); __ tst(hash, Operand(kIsSymbolMask)); __ b(eq, not_symbol); } @@ -1333,7 +1333,7 @@ __ cmp(key, Operand(ip)); __ b(hs, &slow); // Calculate key + 1 as smi. - ASSERT_EQ(0, kSmiTag); + STATIC_ASSERT(kSmiTag == 0); __ add(r4, key, Operand(Smi::FromInt(1))); __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset)); __ b(&fast); diff -Nru libv8-3.4.14.21/src/arm/lithium-arm.cc libv8-3.5.10.24/src/arm/lithium-arm.cc --- libv8-3.4.14.21/src/arm/lithium-arm.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/arm/lithium-arm.cc 2011-09-20 11:34:48.000000000 +0000 @@ -710,7 +710,9 @@ LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) { HEnvironment* hydrogen_env = current_block_->last_environment(); - instr->set_environment(CreateEnvironment(hydrogen_env)); + int argument_index_accumulator = 0; + instr->set_environment(CreateEnvironment(hydrogen_env, + &argument_index_accumulator)); return instr; } @@ -993,10 +995,13 @@ } -LEnvironment* LChunkBuilder::CreateEnvironment(HEnvironment* hydrogen_env) { +LEnvironment* LChunkBuilder::CreateEnvironment( + HEnvironment* hydrogen_env, + int* argument_index_accumulator) { if (hydrogen_env == NULL) return NULL; - LEnvironment* outer = CreateEnvironment(hydrogen_env->outer()); + LEnvironment* outer = + CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator); int ast_id = hydrogen_env->ast_id(); ASSERT(ast_id != AstNode::kNoNumber); int value_count = hydrogen_env->length(); @@ -1006,7 +1011,6 @@ argument_count_, value_count, outer); - int argument_index = 0; for (int i = 0; i < value_count; ++i) { if (hydrogen_env->is_special_index(i)) continue; @@ -1015,7 +1019,7 @@ if (value->IsArgumentsObject()) { op = NULL; } else if (value->IsPushArgument()) { - op = new LArgument(argument_index++); + op = new LArgument((*argument_index_accumulator)++); } else { op = UseAny(value); } @@ -1039,7 +1043,7 @@ : instr->SecondSuccessor(); return new LGoto(successor->block_id()); } - return new LBranch(UseRegisterAtStart(v)); + return AssignEnvironment(new LBranch(UseRegister(v))); } @@ -1399,7 +1403,6 @@ LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) { Token::Value op = instr->token(); - Representation r = instr->GetInputRepresentation(); ASSERT(instr->left()->representation().IsTagged()); ASSERT(instr->right()->representation().IsTagged()); bool reversed = (op == Token::GT || op == Token::LTE); @@ -1509,16 +1512,10 @@ } -LInstruction* LChunkBuilder::DoExternalArrayLength( - HExternalArrayLength* instr) { - LOperand* array = UseRegisterAtStart(instr->value()); - return DefineAsRegister(new LExternalArrayLength(array)); -} - - -LInstruction* LChunkBuilder::DoFixedArrayLength(HFixedArrayLength* instr) { +LInstruction* LChunkBuilder::DoFixedArrayBaseLength( + HFixedArrayBaseLength* instr) { LOperand* array = UseRegisterAtStart(instr->value()); - return DefineAsRegister(new LFixedArrayLength(array)); + return DefineAsRegister(new LFixedArrayBaseLength(array)); } @@ -2006,8 +2003,8 @@ LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) { - LOperand* string = UseRegister(instr->string()); - LOperand* index = UseRegisterOrConstant(instr->index()); + LOperand* string = UseTempRegister(instr->string()); + LOperand* index = UseTempRegister(instr->index()); LStringCharCodeAt* result = new LStringCharCodeAt(string, index); return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); } diff -Nru libv8-3.4.14.21/src/arm/lithium-arm.h libv8-3.5.10.24/src/arm/lithium-arm.h --- libv8-3.4.14.21/src/arm/lithium-arm.h 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/arm/lithium-arm.h 2011-09-20 11:34:48.000000000 +0000 @@ -92,8 +92,7 @@ V(DivI) \ V(DoubleToI) \ V(ElementsKind) \ - V(ExternalArrayLength) \ - V(FixedArrayLength) \ + V(FixedArrayBaseLength) \ V(FunctionLiteral) \ V(GetCachedArrayIndex) \ V(GlobalObject) \ @@ -915,25 +914,15 @@ }; -class LExternalArrayLength: public LTemplateInstruction<1, 1, 0> { +class LFixedArrayBaseLength: public LTemplateInstruction<1, 1, 0> { public: - explicit LExternalArrayLength(LOperand* value) { + explicit LFixedArrayBaseLength(LOperand* value) { inputs_[0] = value; } - DECLARE_CONCRETE_INSTRUCTION(ExternalArrayLength, "external-array-length") - DECLARE_HYDROGEN_ACCESSOR(ExternalArrayLength) -}; - - -class LFixedArrayLength: public LTemplateInstruction<1, 1, 0> { - public: - explicit LFixedArrayLength(LOperand* value) { - inputs_[0] = value; - } - - DECLARE_CONCRETE_INSTRUCTION(FixedArrayLength, "fixed-array-length") - DECLARE_HYDROGEN_ACCESSOR(FixedArrayLength) + DECLARE_CONCRETE_INSTRUCTION(FixedArrayBaseLength, + "fixed-array-base-length") + DECLARE_HYDROGEN_ACCESSOR(FixedArrayBaseLength) }; @@ -2170,7 +2159,8 @@ LInstruction* instr, int ast_id); void ClearInstructionPendingDeoptimizationEnvironment(); - LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env); + LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env, + int* argument_index_accumulator); void VisitInstruction(HInstruction* current); diff -Nru libv8-3.4.14.21/src/arm/lithium-codegen-arm.cc libv8-3.5.10.24/src/arm/lithium-codegen-arm.cc --- libv8-3.4.14.21/src/arm/lithium-codegen-arm.cc 2011-08-02 15:57:14.000000000 +0000 +++ libv8-3.5.10.24/src/arm/lithium-codegen-arm.cc 2011-08-31 09:03:56.000000000 +0000 @@ -1378,17 +1378,10 @@ } -void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) { +void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) { Register result = ToRegister(instr->result()); Register array = ToRegister(instr->InputAt(0)); - __ ldr(result, FieldMemOperand(array, ExternalArray::kLengthOffset)); -} - - -void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) { - Register result = ToRegister(instr->result()); - Register array = ToRegister(instr->InputAt(0)); - __ ldr(result, FieldMemOperand(array, FixedArray::kLengthOffset)); + __ ldr(result, FieldMemOperand(array, FixedArrayBase::kLengthOffset)); } @@ -1564,52 +1557,96 @@ } else { ASSERT(r.IsTagged()); Register reg = ToRegister(instr->InputAt(0)); - if (instr->hydrogen()->value()->type().IsBoolean()) { - __ LoadRoot(ip, Heap::kTrueValueRootIndex); - __ cmp(reg, ip); + HType type = instr->hydrogen()->value()->type(); + if (type.IsBoolean()) { + __ CompareRoot(reg, Heap::kTrueValueRootIndex); EmitBranch(true_block, false_block, eq); + } else if (type.IsSmi()) { + __ cmp(reg, Operand(0)); + EmitBranch(true_block, false_block, ne); } else { Label* true_label = chunk_->GetAssemblyLabel(true_block); Label* false_label = chunk_->GetAssemblyLabel(false_block); - __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); - __ cmp(reg, ip); - __ b(eq, false_label); - __ LoadRoot(ip, Heap::kTrueValueRootIndex); - __ cmp(reg, ip); - __ b(eq, true_label); - __ LoadRoot(ip, Heap::kFalseValueRootIndex); - __ cmp(reg, ip); - __ b(eq, false_label); - __ cmp(reg, Operand(0)); - __ b(eq, false_label); - __ JumpIfSmi(reg, true_label); + ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); + // Avoid deopts in the case where we've never executed this path before. + if (expected.IsEmpty()) expected = ToBooleanStub::all_types(); + + if (expected.Contains(ToBooleanStub::UNDEFINED)) { + // undefined -> false. + __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); + __ b(eq, false_label); + } + if (expected.Contains(ToBooleanStub::BOOLEAN)) { + // Boolean -> its value. + __ CompareRoot(reg, Heap::kTrueValueRootIndex); + __ b(eq, true_label); + __ CompareRoot(reg, Heap::kFalseValueRootIndex); + __ b(eq, false_label); + } + if (expected.Contains(ToBooleanStub::NULL_TYPE)) { + // 'null' -> false. + __ CompareRoot(reg, Heap::kNullValueRootIndex); + __ b(eq, false_label); + } - // Test double values. Zero and NaN are false. - Label call_stub; - DoubleRegister dbl_scratch = double_scratch0(); - Register scratch = scratch0(); - __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset)); - __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); - __ cmp(scratch, Operand(ip)); - __ b(ne, &call_stub); - __ sub(ip, reg, Operand(kHeapObjectTag)); - __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset); - __ VFPCompareAndLoadFlags(dbl_scratch, 0.0, scratch); - __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit)); - __ b(ne, false_label); - __ b(true_label); - - // The conversion stub doesn't cause garbage collections so it's - // safe to not record a safepoint after the call. - __ bind(&call_stub); - ToBooleanStub stub(reg); - RegList saved_regs = kJSCallerSaved | kCalleeSaved; - __ stm(db_w, sp, saved_regs); - __ CallStub(&stub); - __ cmp(reg, Operand(0)); - __ ldm(ia_w, sp, saved_regs); - EmitBranch(true_block, false_block, ne); + if (expected.Contains(ToBooleanStub::SMI)) { + // Smis: 0 -> false, all other -> true. + __ cmp(reg, Operand(0)); + __ b(eq, false_label); + __ JumpIfSmi(reg, true_label); + } else if (expected.NeedsMap()) { + // If we need a map later and have a Smi -> deopt. + __ tst(reg, Operand(kSmiTagMask)); + DeoptimizeIf(eq, instr->environment()); + } + + const Register map = scratch0(); + if (expected.NeedsMap()) { + __ ldr(map, FieldMemOperand(reg, HeapObject::kMapOffset)); + + if (expected.CanBeUndetectable()) { + // Undetectable -> false. + __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset)); + __ tst(ip, Operand(1 << Map::kIsUndetectable)); + __ b(ne, false_label); + } + } + + if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) { + // spec object -> true. + __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE); + __ b(ge, true_label); + } + + if (expected.Contains(ToBooleanStub::STRING)) { + // String value -> false iff empty. + Label not_string; + __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE); + __ b(ge, ¬_string); + __ ldr(ip, FieldMemOperand(reg, String::kLengthOffset)); + __ cmp(ip, Operand(0)); + __ b(ne, true_label); + __ b(false_label); + __ bind(¬_string); + } + + if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) { + // heap number -> false iff +0, -0, or NaN. + DoubleRegister dbl_scratch = double_scratch0(); + Label not_heap_number; + __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); + __ b(ne, ¬_heap_number); + __ vldr(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset)); + __ VFPCompareAndSetFlags(dbl_scratch, 0.0); + __ b(vs, false_label); // NaN -> false. + __ b(eq, false_label); // +0, -0 -> false. + __ b(true_label); + __ bind(¬_heap_number); + } + + // We've seen something for the first time -> deopt. + DeoptimizeIf(al, instr->environment()); } } } @@ -1767,7 +1804,6 @@ void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) { Register reg = ToRegister(instr->InputAt(0)); Register temp1 = ToRegister(instr->TempAt(0)); - Register temp2 = scratch0(); int true_block = chunk_->LookupDestination(instr->true_block_id()); int false_block = chunk_->LookupDestination(instr->false_block_id()); @@ -2722,7 +2758,6 @@ void LCodeGen::DoGlobalObject(LGlobalObject* instr) { - Register context = ToRegister(instr->context()); Register result = ToRegister(instr->result()); __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX)); } @@ -2928,19 +2963,18 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) { DoubleRegister input = ToDoubleRegister(instr->InputAt(0)); Register result = ToRegister(instr->result()); - Register scratch1 = result; - Register scratch2 = scratch0(); + Register scratch = scratch0(); Label done, check_sign_on_zero; // Extract exponent bits. - __ vmov(scratch1, input.high()); - __ ubfx(scratch2, - scratch1, + __ vmov(result, input.high()); + __ ubfx(scratch, + result, HeapNumber::kExponentShift, HeapNumber::kExponentBits); // If the number is in ]-0.5, +0.5[, the result is +/- 0. - __ cmp(scratch2, Operand(HeapNumber::kExponentBias - 2)); + __ cmp(scratch, Operand(HeapNumber::kExponentBias - 2)); __ mov(result, Operand(0), LeaveCC, le); if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { __ b(le, &check_sign_on_zero); @@ -2950,19 +2984,19 @@ // The following conversion will not work with numbers // outside of ]-2^32, 2^32[. - __ cmp(scratch2, Operand(HeapNumber::kExponentBias + 32)); + __ cmp(scratch, Operand(HeapNumber::kExponentBias + 32)); DeoptimizeIf(ge, instr->environment()); // Save the original sign for later comparison. - __ and_(scratch2, scratch1, Operand(HeapNumber::kSignMask)); + __ and_(scratch, result, Operand(HeapNumber::kSignMask)); __ Vmov(double_scratch0(), 0.5); __ vadd(input, input, double_scratch0()); // Check sign of the result: if the sign changed, the input // value was in ]0.5, 0[ and the result should be -0. - __ vmov(scratch1, input.high()); - __ eor(scratch1, scratch1, Operand(scratch2), SetCC); + __ vmov(result, input.high()); + __ eor(result, result, Operand(scratch), SetCC); if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { DeoptimizeIf(mi, instr->environment()); } else { @@ -2973,8 +3007,8 @@ __ EmitVFPTruncate(kRoundToMinusInf, double_scratch0().low(), input, - scratch1, - scratch2); + result, + scratch); DeoptimizeIf(ne, instr->environment()); __ vmov(result, double_scratch0().low()); @@ -2983,8 +3017,8 @@ __ cmp(result, Operand(0)); __ b(ne, &done); __ bind(&check_sign_on_zero); - __ vmov(scratch1, input.high()); - __ tst(scratch1, Operand(HeapNumber::kSignMask)); + __ vmov(scratch, input.high()); + __ tst(scratch, Operand(HeapNumber::kSignMask)); DeoptimizeIf(ne, instr->environment()); } __ bind(&done); @@ -3421,97 +3455,81 @@ LStringCharCodeAt* instr_; }; - Register scratch = scratch0(); Register string = ToRegister(instr->string()); - Register index = no_reg; - int const_index = -1; - if (instr->index()->IsConstantOperand()) { - const_index = ToInteger32(LConstantOperand::cast(instr->index())); - STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); - if (!Smi::IsValid(const_index)) { - // Guaranteed to be out of bounds because of the assert above. - // So the bounds check that must dominate this instruction must - // have deoptimized already. - if (FLAG_debug_code) { - __ Abort("StringCharCodeAt: out of bounds index."); - } - // No code needs to be generated. - return; - } - } else { - index = ToRegister(instr->index()); - } + Register index = ToRegister(instr->index()); Register result = ToRegister(instr->result()); DeferredStringCharCodeAt* deferred = new DeferredStringCharCodeAt(this, instr); - Label flat_string, ascii_string, done; - // Fetch the instance type of the receiver into result register. __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset)); __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset)); - // We need special handling for non-flat strings. - STATIC_ASSERT(kSeqStringTag == 0); - __ tst(result, Operand(kStringRepresentationMask)); - __ b(eq, &flat_string); - - // Handle non-flat strings. - __ tst(result, Operand(kIsConsStringMask)); - __ b(eq, deferred->entry()); + // We need special handling for indirect strings. + Label check_sequential; + __ tst(result, Operand(kIsIndirectStringMask)); + __ b(eq, &check_sequential); + + // Dispatch on the indirect string shape: slice or cons. + Label cons_string; + __ tst(result, Operand(kSlicedNotConsMask)); + __ b(eq, &cons_string); + + // Handle slices. + Label indirect_string_loaded; + __ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset)); + __ add(index, index, Operand(result, ASR, kSmiTagSize)); + __ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset)); + __ jmp(&indirect_string_loaded); - // ConsString. + // Handle conses. // Check whether the right hand side is the empty string (i.e. if // this is really a flat string in a cons string). If that is not // the case we would rather go to the runtime system now to flatten // the string. - __ ldr(scratch, FieldMemOperand(string, ConsString::kSecondOffset)); + __ bind(&cons_string); + __ ldr(result, FieldMemOperand(string, ConsString::kSecondOffset)); __ LoadRoot(ip, Heap::kEmptyStringRootIndex); - __ cmp(scratch, ip); + __ cmp(result, ip); __ b(ne, deferred->entry()); // Get the first of the two strings and load its instance type. __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset)); + + __ bind(&indirect_string_loaded); __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset)); __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset)); - // If the first cons component is also non-flat, then go to runtime. + + // Check whether the string is sequential. The only non-sequential + // shapes we support have just been unwrapped above. + __ bind(&check_sequential); STATIC_ASSERT(kSeqStringTag == 0); __ tst(result, Operand(kStringRepresentationMask)); __ b(ne, deferred->entry()); - // Check for 1-byte or 2-byte string. - __ bind(&flat_string); + // Dispatch on the encoding: ASCII or two-byte. + Label ascii_string; STATIC_ASSERT(kAsciiStringTag != 0); __ tst(result, Operand(kStringEncodingMask)); __ b(ne, &ascii_string); - // 2-byte string. - // Load the 2-byte character code into the result register. - STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); - if (instr->index()->IsConstantOperand()) { - __ ldrh(result, - FieldMemOperand(string, - SeqTwoByteString::kHeaderSize + 2 * const_index)); - } else { - __ add(scratch, - string, - Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - __ ldrh(result, MemOperand(scratch, index, LSL, 1)); - } + // Two-byte string. + // Load the two-byte character code into the result register. + Label done; + __ add(result, + string, + Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); + __ ldrh(result, MemOperand(result, index, LSL, 1)); __ jmp(&done); // ASCII string. // Load the byte into the result register. __ bind(&ascii_string); - if (instr->index()->IsConstantOperand()) { - __ ldrb(result, FieldMemOperand(string, - SeqAsciiString::kHeaderSize + const_index)); - } else { - __ add(scratch, - string, - Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); - __ ldrb(result, MemOperand(scratch, index)); - } + __ add(result, + string, + Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); + __ ldrb(result, MemOperand(result, index)); + __ bind(&done); __ bind(deferred->exit()); } @@ -3739,7 +3757,7 @@ LOperand* input = instr->InputAt(0); ASSERT(input->IsRegister() && input->Equals(instr->result())); if (instr->needs_check()) { - ASSERT(kHeapObjectTag == 1); + STATIC_ASSERT(kHeapObjectTag == 1); // If the input is a HeapObject, SmiUntag will set the carry flag. __ SmiUntag(ToRegister(input), SetCC); DeoptimizeIf(cs, instr->environment()); @@ -3824,7 +3842,7 @@ // The input was optimistically untagged; revert it. // The carry flag is set when we reach this deferred code as we just executed // SmiUntag(heap_object, SetCC) - ASSERT(kHeapObjectTag == 1); + STATIC_ASSERT(kHeapObjectTag == 1); __ adc(input_reg, input_reg, Operand(input_reg)); // Heap number map check. @@ -3929,7 +3947,6 @@ Register scratch1 = scratch0(); Register scratch2 = ToRegister(instr->TempAt(0)); DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0)); - DwVfpRegister double_scratch = double_scratch0(); SwVfpRegister single_scratch = double_scratch0().low(); Label done; @@ -4070,7 +4087,7 @@ // conversions. __ cmp(input_reg, Operand(factory()->undefined_value())); DeoptimizeIf(ne, instr->environment()); - __ movt(input_reg, 0); + __ mov(result_reg, Operand(0)); __ jmp(&done); // Heap number @@ -4309,6 +4326,10 @@ __ CompareRoot(input, Heap::kFalseValueRootIndex); final_branch_condition = eq; + } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) { + __ CompareRoot(input, Heap::kNullValueRootIndex); + final_branch_condition = eq; + } else if (type_name->Equals(heap()->undefined_symbol())) { __ CompareRoot(input, Heap::kUndefinedValueRootIndex); __ b(eq, true_label); @@ -4327,8 +4348,10 @@ } else if (type_name->Equals(heap()->object_symbol())) { __ JumpIfSmi(input, false_label); - __ CompareRoot(input, Heap::kNullValueRootIndex); - __ b(eq, true_label); + if (!FLAG_harmony_typeof) { + __ CompareRoot(input, Heap::kNullValueRootIndex); + __ b(eq, true_label); + } __ CompareObjectType(input, input, scratch, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); __ b(lt, false_label); diff -Nru libv8-3.4.14.21/src/arm/lithium-gap-resolver-arm.cc libv8-3.5.10.24/src/arm/lithium-gap-resolver-arm.cc --- libv8-3.4.14.21/src/arm/lithium-gap-resolver-arm.cc 2011-03-23 11:19:56.000000000 +0000 +++ libv8-3.5.10.24/src/arm/lithium-gap-resolver-arm.cc 2011-08-24 12:02:41.000000000 +0000 @@ -254,7 +254,6 @@ } else { ASSERT(destination->IsStackSlot()); ASSERT(!in_cycle_); // Constant moves happen after all cycles are gone. - MemOperand destination_operand = cgen_->ToMemOperand(destination); __ mov(kSavedValueRegister, source_operand); __ str(kSavedValueRegister, cgen_->ToMemOperand(destination)); } @@ -265,8 +264,7 @@ __ vmov(cgen_->ToDoubleRegister(destination), source_register); } else { ASSERT(destination->IsDoubleStackSlot()); - MemOperand destination_operand = cgen_->ToMemOperand(destination); - __ vstr(source_register, destination_operand); + __ vstr(source_register, cgen_->ToMemOperand(destination)); } } else if (source->IsDoubleStackSlot()) { diff -Nru libv8-3.4.14.21/src/arm/macro-assembler-arm.cc libv8-3.5.10.24/src/arm/macro-assembler-arm.cc --- libv8-3.4.14.21/src/arm/macro-assembler-arm.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/arm/macro-assembler-arm.cc 2011-08-15 13:01:23.000000000 +0000 @@ -1102,7 +1102,13 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location, HandlerType type) { // Adjust this code if not the case. - ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize); + // The pc (return address) is passed in register lr. if (try_location == IN_JAVASCRIPT) { if (type == TRY_CATCH_HANDLER) { @@ -1110,14 +1116,10 @@ } else { mov(r3, Operand(StackHandler::TRY_FINALLY)); } - ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize - && StackHandlerConstants::kFPOffset == 2 * kPointerSize - && StackHandlerConstants::kPCOffset == 3 * kPointerSize); - stm(db_w, sp, r3.bit() | fp.bit() | lr.bit()); + stm(db_w, sp, r3.bit() | cp.bit() | fp.bit() | lr.bit()); // Save the current handler as the next handler. mov(r3, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); ldr(r1, MemOperand(r3)); - ASSERT(StackHandlerConstants::kNextOffset == 0); push(r1); // Link this handler as the new current one. str(sp, MemOperand(r3)); @@ -1127,16 +1129,13 @@ // The frame pointer does not point to a JS frame so we save NULL // for fp. We expect the code throwing an exception to check fp // before dereferencing it to restore the context. - mov(ip, Operand(0, RelocInfo::NONE)); // To save a NULL frame pointer. - mov(r6, Operand(StackHandler::ENTRY)); - ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize - && StackHandlerConstants::kFPOffset == 2 * kPointerSize - && StackHandlerConstants::kPCOffset == 3 * kPointerSize); - stm(db_w, sp, r6.bit() | ip.bit() | lr.bit()); + mov(r5, Operand(StackHandler::ENTRY)); // State. + mov(r6, Operand(Smi::FromInt(0))); // Indicates no context. + mov(r7, Operand(0, RelocInfo::NONE)); // NULL frame pointer. + stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | lr.bit()); // Save the current handler as the next handler. mov(r7, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); ldr(r6, MemOperand(r7)); - ASSERT(StackHandlerConstants::kNextOffset == 0); push(r6); // Link this handler as the new current one. str(sp, MemOperand(r7)); @@ -1145,7 +1144,7 @@ void MacroAssembler::PopTryHandler() { - ASSERT_EQ(0, StackHandlerConstants::kNextOffset); + STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); pop(r1); mov(ip, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); @@ -1154,39 +1153,40 @@ void MacroAssembler::Throw(Register value) { + // Adjust this code if not the case. + STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize); // r0 is expected to hold the exception. if (!value.is(r0)) { mov(r0, value); } - // Adjust this code if not the case. - STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); - // Drop the sp to the top of the handler. mov(r3, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); ldr(sp, MemOperand(r3)); - // Restore the next handler and frame pointer, discard handler state. - STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); + // Restore the next handler. pop(r2); str(r2, MemOperand(r3)); - STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize); - ldm(ia_w, sp, r3.bit() | fp.bit()); // r3: discarded state. - // Before returning we restore the context from the frame pointer if - // not NULL. The frame pointer is NULL in the exception handler of a - // JS entry frame. - cmp(fp, Operand(0, RelocInfo::NONE)); - // Set cp to NULL if fp is NULL. - mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq); - // Restore cp otherwise. - ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); + // Restore context and frame pointer, discard state (r3). + ldm(ia_w, sp, r3.bit() | cp.bit() | fp.bit()); + + // If the handler is a JS frame, restore the context to the frame. + // (r3 == ENTRY) == (fp == 0) == (cp == 0), so we could test any + // of them. + cmp(r3, Operand(StackHandler::ENTRY)); + str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); + #ifdef DEBUG if (emit_debug_code()) { mov(lr, Operand(pc)); } #endif - STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); pop(pc); } @@ -1194,8 +1194,12 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, Register value) { // Adjust this code if not the case. - STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); - + STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize); // r0 is expected to hold the exception. if (!value.is(r0)) { mov(r0, value); @@ -1220,7 +1224,6 @@ bind(&done); // Set the top handler address to next handler past the current ENTRY handler. - STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); pop(r2); str(r2, MemOperand(r3)); @@ -1242,26 +1245,17 @@ // Stack layout at this point. See also StackHandlerConstants. // sp -> state (ENTRY) + // cp // fp // lr - // Discard handler state (r2 is not used) and restore frame pointer. - STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize); - ldm(ia_w, sp, r2.bit() | fp.bit()); // r2: discarded state. - // Before returning we restore the context from the frame pointer if - // not NULL. The frame pointer is NULL in the exception handler of a - // JS entry frame. - cmp(fp, Operand(0, RelocInfo::NONE)); - // Set cp to NULL if fp is NULL. - mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq); - // Restore cp otherwise. - ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); + // Restore context and frame pointer, discard state (r2). + ldm(ia_w, sp, r2.bit() | cp.bit() | fp.bit()); #ifdef DEBUG if (emit_debug_code()) { mov(lr, Operand(pc)); } #endif - STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); pop(pc); } diff -Nru libv8-3.4.14.21/src/arm/regexp-macro-assembler-arm.cc libv8-3.5.10.24/src/arm/regexp-macro-assembler-arm.cc --- libv8-3.4.14.21/src/arm/regexp-macro-assembler-arm.cc 2011-06-20 15:33:18.000000000 +0000 +++ libv8-3.5.10.24/src/arm/regexp-macro-assembler-arm.cc 2011-08-31 09:03:56.000000000 +0000 @@ -1034,12 +1034,13 @@ } // Prepare for possible GC. - HandleScope handles; + HandleScope handles(isolate); Handle code_handle(re_code); Handle subject(frame_entry(re_frame, kInputString)); + // Current string. - bool is_ascii = subject->IsAsciiRepresentation(); + bool is_ascii = subject->IsAsciiRepresentationUnderneath(); ASSERT(re_code->instruction_start() <= *return_address); ASSERT(*return_address <= @@ -1048,7 +1049,7 @@ MaybeObject* result = Execution::HandleStackGuardInterrupt(); if (*code_handle != re_code) { // Return address no longer valid - int delta = *code_handle - re_code; + int delta = code_handle->address() - re_code->address(); // Overwrite the return address on the stack. *return_address += delta; } @@ -1057,8 +1058,20 @@ return EXCEPTION; } + Handle subject_tmp = subject; + int slice_offset = 0; + + // Extract the underlying string and the slice offset. + if (StringShape(*subject_tmp).IsCons()) { + subject_tmp = Handle(ConsString::cast(*subject_tmp)->first()); + } else if (StringShape(*subject_tmp).IsSliced()) { + SlicedString* slice = SlicedString::cast(*subject_tmp); + subject_tmp = Handle(slice->parent()); + slice_offset = slice->offset(); + } + // String might have changed. - if (subject->IsAsciiRepresentation() != is_ascii) { + if (subject_tmp->IsAsciiRepresentation() != is_ascii) { // If we changed between an ASCII and an UC16 string, the specialized // code cannot be used, and we need to restart regexp matching from // scratch (including, potentially, compiling a new version of the code). @@ -1069,8 +1082,8 @@ // be a sequential or external string with the same content. // Update the start and end pointers in the stack frame to the current // location (whether it has actually moved or not). - ASSERT(StringShape(*subject).IsSequential() || - StringShape(*subject).IsExternal()); + ASSERT(StringShape(*subject_tmp).IsSequential() || + StringShape(*subject_tmp).IsExternal()); // The original start address of the characters to match. const byte* start_address = frame_entry(re_frame, kInputStart); @@ -1078,13 +1091,14 @@ // Find the current start address of the same character at the current string // position. int start_index = frame_entry(re_frame, kStartIndex); - const byte* new_address = StringCharacterPosition(*subject, start_index); + const byte* new_address = StringCharacterPosition(*subject_tmp, + start_index + slice_offset); if (start_address != new_address) { // If there is a difference, update the object pointer and start and end // addresses in the RegExp stack frame to match the new value. const byte* end_address = frame_entry(re_frame, kInputEnd); - int byte_length = end_address - start_address; + int byte_length = static_cast(end_address - start_address); frame_entry(re_frame, kInputString) = *subject; frame_entry(re_frame, kInputStart) = new_address; frame_entry(re_frame, kInputEnd) = new_address + byte_length; diff -Nru libv8-3.4.14.21/src/arm/stub-cache-arm.cc libv8-3.5.10.24/src/arm/stub-cache-arm.cc --- libv8-3.4.14.21/src/arm/stub-cache-arm.cc 2011-07-18 09:50:57.000000000 +0000 +++ libv8-3.5.10.24/src/arm/stub-cache-arm.cc 2011-08-31 09:03:56.000000000 +0000 @@ -1183,9 +1183,8 @@ __ JumpIfSmi(receiver, miss); // Check that the maps haven't changed. - Register reg = - CheckPrototypes(object, receiver, holder, - scratch1, scratch2, scratch3, name, miss); + CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3, name, + miss); // Return the constant value. __ mov(r0, Operand(Handle(value))); @@ -3489,16 +3488,16 @@ // Check that the index is in range. __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset)); - __ cmp(ip, Operand(key, ASR, kSmiTagSize)); + __ cmp(key, ip); // Unsigned comparison catches both negative and too-large values. - __ b(lo, &miss_force_generic); + __ b(hs, &miss_force_generic); __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); // r3: base pointer of external storage // We are not untagging smi key and instead work with it // as if it was premultiplied by 2. - ASSERT((kSmiTag == 0) && (kSmiTagSize == 1)); + STATIC_ASSERT((kSmiTag == 0) && (kSmiTagSize == 1)); Register value = r2; switch (elements_kind) { @@ -3811,22 +3810,20 @@ // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset)); - // Check that the key is a smi. __ JumpIfNotSmi(key, &miss_force_generic); + __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset)); + // Check that the index is in range - __ SmiUntag(r4, key); __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset)); - __ cmp(r4, ip); + __ cmp(key, ip); // Unsigned comparison catches both negative and too-large values. __ b(hs, &miss_force_generic); // Handle both smis and HeapNumbers in the fast path. Go to the // runtime for all other kinds of values. // r3: external array. - // r4: key (integer). if (elements_kind == JSObject::EXTERNAL_PIXEL_ELEMENTS) { // Double to pixel conversion is only implemented in the runtime for now. __ JumpIfNotSmi(value, &slow); @@ -3837,32 +3834,32 @@ __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); // r3: base pointer of external storage. - // r4: key (integer). // r5: value (integer). switch (elements_kind) { case JSObject::EXTERNAL_PIXEL_ELEMENTS: // Clamp the value to [0..255]. __ Usat(r5, 8, Operand(r5)); - __ strb(r5, MemOperand(r3, r4, LSL, 0)); + __ strb(r5, MemOperand(r3, key, LSR, 1)); break; case JSObject::EXTERNAL_BYTE_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS: - __ strb(r5, MemOperand(r3, r4, LSL, 0)); + __ strb(r5, MemOperand(r3, key, LSR, 1)); break; case JSObject::EXTERNAL_SHORT_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS: - __ strh(r5, MemOperand(r3, r4, LSL, 1)); + __ strh(r5, MemOperand(r3, key, LSL, 0)); break; case JSObject::EXTERNAL_INT_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS: - __ str(r5, MemOperand(r3, r4, LSL, 2)); + __ str(r5, MemOperand(r3, key, LSL, 1)); break; case JSObject::EXTERNAL_FLOAT_ELEMENTS: // Perform int-to-float conversion and store to memory. + __ SmiUntag(r4, key); StoreIntAsFloat(masm, r3, r4, r5, r6, r7, r9); break; case JSObject::EXTERNAL_DOUBLE_ELEMENTS: - __ add(r3, r3, Operand(r4, LSL, 3)); + __ add(r3, r3, Operand(key, LSL, 2)); // r3: effective address of the double element FloatingPointHelper::Destination destination; if (CpuFeatures::IsSupported(VFP3)) { @@ -3895,7 +3892,6 @@ if (elements_kind != JSObject::EXTERNAL_PIXEL_ELEMENTS) { // r3: external array. - // r4: index (integer). __ bind(&check_heap_number); __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE); __ b(ne, &slow); @@ -3903,7 +3899,6 @@ __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); // r3: base pointer of external storage. - // r4: key (integer). // The WebGL specification leaves the behavior of storing NaN and // +/-Infinity into integer arrays basically undefined. For more @@ -3916,13 +3911,13 @@ // include -kHeapObjectTag into it. __ sub(r5, r0, Operand(kHeapObjectTag)); __ vldr(d0, r5, HeapNumber::kValueOffset); - __ add(r5, r3, Operand(r4, LSL, 2)); + __ add(r5, r3, Operand(key, LSL, 1)); __ vcvt_f32_f64(s0, d0); __ vstr(s0, r5, 0); } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) { __ sub(r5, r0, Operand(kHeapObjectTag)); __ vldr(d0, r5, HeapNumber::kValueOffset); - __ add(r5, r3, Operand(r4, LSL, 3)); + __ add(r5, r3, Operand(key, LSL, 2)); __ vstr(d0, r5, 0); } else { // Hoisted load. vldr requires offset to be a multiple of 4 so we can @@ -3934,15 +3929,15 @@ switch (elements_kind) { case JSObject::EXTERNAL_BYTE_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS: - __ strb(r5, MemOperand(r3, r4, LSL, 0)); + __ strb(r5, MemOperand(r3, key, LSR, 1)); break; case JSObject::EXTERNAL_SHORT_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS: - __ strh(r5, MemOperand(r3, r4, LSL, 1)); + __ strh(r5, MemOperand(r3, key, LSL, 0)); break; case JSObject::EXTERNAL_INT_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS: - __ str(r5, MemOperand(r3, r4, LSL, 2)); + __ str(r5, MemOperand(r3, key, LSL, 1)); break; case JSObject::EXTERNAL_PIXEL_ELEMENTS: case JSObject::EXTERNAL_FLOAT_ELEMENTS: @@ -4004,7 +3999,7 @@ __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift)); __ bind(&done); - __ str(r5, MemOperand(r3, r4, LSL, 2)); + __ str(r5, MemOperand(r3, key, LSL, 1)); // Entry registers are intact, r0 holds the value which is the return // value. __ Ret(); @@ -4017,7 +4012,7 @@ __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift)); __ b(&done); } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) { - __ add(r7, r3, Operand(r4, LSL, 3)); + __ add(r7, r3, Operand(key, LSL, 2)); // r7: effective address of destination element. __ str(r6, MemOperand(r7, 0)); __ str(r5, MemOperand(r7, Register::kSizeInBytes)); @@ -4073,15 +4068,15 @@ switch (elements_kind) { case JSObject::EXTERNAL_BYTE_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS: - __ strb(r5, MemOperand(r3, r4, LSL, 0)); + __ strb(r5, MemOperand(r3, key, LSR, 1)); break; case JSObject::EXTERNAL_SHORT_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS: - __ strh(r5, MemOperand(r3, r4, LSL, 1)); + __ strh(r5, MemOperand(r3, key, LSL, 0)); break; case JSObject::EXTERNAL_INT_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS: - __ str(r5, MemOperand(r3, r4, LSL, 2)); + __ str(r5, MemOperand(r3, key, LSL, 1)); break; case JSObject::EXTERNAL_PIXEL_ELEMENTS: case JSObject::EXTERNAL_FLOAT_ELEMENTS: @@ -4152,7 +4147,7 @@ // Load the result and make sure it's not the hole. __ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); + STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); __ ldr(r4, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); @@ -4284,7 +4279,7 @@ __ add(scratch, elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); + STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); __ str(value_reg, MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize)); __ RecordWrite(scratch, @@ -4399,11 +4394,18 @@ } else { destination = FloatingPointHelper::kCoreRegisters; } - __ SmiUntag(value_reg, value_reg); + + Register untagged_value = receiver_reg; + __ SmiUntag(untagged_value, value_reg); FloatingPointHelper::ConvertIntToDouble( - masm, value_reg, destination, - d0, mantissa_reg, exponent_reg, // These are: double_dst, dst1, dst2. - scratch4, s2); // These are: scratch2, single_scratch. + masm, + untagged_value, + destination, + d0, + mantissa_reg, + exponent_reg, + scratch4, + s2); if (destination == FloatingPointHelper::kVFPRegisters) { CpuFeatures::Scope scope(VFP3); __ vstr(d0, scratch, 0); diff -Nru libv8-3.4.14.21/src/array.js libv8-3.5.10.24/src/array.js --- libv8-3.4.14.21/src/array.js 2011-07-18 09:50:57.000000000 +0000 +++ libv8-3.5.10.24/src/array.js 2011-11-08 15:14:43.000000000 +0000 @@ -172,12 +172,12 @@ } else { for (var i = 0; i < length; i++) { var e = array[i]; - if (IS_NUMBER(e)) { - e = %_NumberToString(e); - } else if (!IS_STRING(e)) { - e = convert(e); - } - elements[i] = e; + if (IS_NUMBER(e)) { + e = %_NumberToString(e); + } else if (!IS_STRING(e)) { + e = convert(e); + } + elements[i] = e; } } var result = %_FastAsciiArrayJoin(elements, separator); @@ -742,8 +742,7 @@ else return x < y ? -1 : 1; }; } - var receiver = - %_IsNativeOrStrictMode(comparefn) ? void 0 : %GetGlobalReceiver(); + var receiver = %GetDefaultReceiver(comparefn); function InsertionSort(a, from, to) { for (var i = from + 1; i < to; i++) { @@ -997,6 +996,9 @@ if (!IS_FUNCTION(f)) { throw MakeTypeError('called_non_callable', [ f ]); } + if (IS_NULL_OR_UNDEFINED(receiver)) { + receiver = %GetDefaultReceiver(f) || receiver; + } // Pull out the length so that modifications to the length in the // loop will not affect the looping. var length = ToUint32(this.length); @@ -1005,7 +1007,7 @@ for (var i = 0; i < length; i++) { var current = this[i]; if (!IS_UNDEFINED(current) || i in this) { - if (f.call(receiver, current, i, this)) { + if (%_CallFunction(receiver, current, i, this, f)) { result[result_length++] = current; } } @@ -1023,13 +1025,16 @@ if (!IS_FUNCTION(f)) { throw MakeTypeError('called_non_callable', [ f ]); } + if (IS_NULL_OR_UNDEFINED(receiver)) { + receiver = %GetDefaultReceiver(f) || receiver; + } // Pull out the length so that modifications to the length in the // loop will not affect the looping. var length = TO_UINT32(this.length); for (var i = 0; i < length; i++) { var current = this[i]; if (!IS_UNDEFINED(current) || i in this) { - f.call(receiver, current, i, this); + %_CallFunction(receiver, current, i, this, f); } } } @@ -1046,13 +1051,16 @@ if (!IS_FUNCTION(f)) { throw MakeTypeError('called_non_callable', [ f ]); } + if (IS_NULL_OR_UNDEFINED(receiver)) { + receiver = %GetDefaultReceiver(f) || receiver; + } // Pull out the length so that modifications to the length in the // loop will not affect the looping. var length = TO_UINT32(this.length); for (var i = 0; i < length; i++) { var current = this[i]; if (!IS_UNDEFINED(current) || i in this) { - if (f.call(receiver, current, i, this)) return true; + if (%_CallFunction(receiver, current, i, this, f)) return true; } } return false; @@ -1068,13 +1076,16 @@ if (!IS_FUNCTION(f)) { throw MakeTypeError('called_non_callable', [ f ]); } + if (IS_NULL_OR_UNDEFINED(receiver)) { + receiver = %GetDefaultReceiver(f) || receiver; + } // Pull out the length so that modifications to the length in the // loop will not affect the looping. var length = TO_UINT32(this.length); for (var i = 0; i < length; i++) { var current = this[i]; if (!IS_UNDEFINED(current) || i in this) { - if (!f.call(receiver, current, i, this)) return false; + if (!%_CallFunction(receiver, current, i, this, f)) return false; } } return true; @@ -1089,6 +1100,9 @@ if (!IS_FUNCTION(f)) { throw MakeTypeError('called_non_callable', [ f ]); } + if (IS_NULL_OR_UNDEFINED(receiver)) { + receiver = %GetDefaultReceiver(f) || receiver; + } // Pull out the length so that modifications to the length in the // loop will not affect the looping. var length = TO_UINT32(this.length); @@ -1097,7 +1111,7 @@ for (var i = 0; i < length; i++) { var current = this[i]; if (!IS_UNDEFINED(current) || i in this) { - accumulator[i] = f.call(receiver, current, i, this); + accumulator[i] = %_CallFunction(receiver, current, i, this, f); } } %MoveArrayContents(accumulator, result); @@ -1234,6 +1248,7 @@ if (!IS_FUNCTION(callback)) { throw MakeTypeError('called_non_callable', [callback]); } + // Pull out the length so that modifications to the length in the // loop will not affect the looping. var length = ToUint32(this.length); @@ -1250,10 +1265,11 @@ throw MakeTypeError('reduce_no_initial', []); } + var receiver = %GetDefaultReceiver(callback); for (; i < length; i++) { var element = this[i]; if (!IS_UNDEFINED(element) || i in this) { - current = callback.call(void 0, current, element, i, this); + current = %_CallFunction(receiver, current, element, i, this, callback); } } return current; @@ -1281,10 +1297,11 @@ throw MakeTypeError('reduce_no_initial', []); } + var receiver = %GetDefaultReceiver(callback); for (; i >= 0; i--) { var element = this[i]; if (!IS_UNDEFINED(element) || i in this) { - current = callback.call(void 0, current, element, i, this); + current = %_CallFunction(receiver, current, element, i, this, callback); } } return current; diff -Nru libv8-3.4.14.21/src/assembler.cc libv8-3.5.10.24/src/assembler.cc --- libv8-3.4.14.21/src/assembler.cc 2011-07-18 09:50:57.000000000 +0000 +++ libv8-3.5.10.24/src/assembler.cc 2011-08-10 11:27:35.000000000 +0000 @@ -74,7 +74,7 @@ const double DoubleConstant::canonical_non_hole_nan = OS::nan_value(); const double DoubleConstant::the_hole_nan = BitCast(kHoleNanInt64); const double DoubleConstant::negative_infinity = -V8_INFINITY; -const char* RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING"; +const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING"; // ----------------------------------------------------------------------------- // Implementation of AssemblerBase diff -Nru libv8-3.4.14.21/src/assembler.h libv8-3.5.10.24/src/assembler.h --- libv8-3.4.14.21/src/assembler.h 2011-07-18 09:50:57.000000000 +0000 +++ libv8-3.5.10.24/src/assembler.h 2011-08-10 11:27:35.000000000 +0000 @@ -171,7 +171,7 @@ // where we are not sure to have enough space for patching in during // lazy deoptimization. This is the case if we have indirect calls for which // we do not normally record relocation info. - static const char* kFillerCommentString; + static const char* const kFillerCommentString; // The minimum size of a comment is equal to three bytes for the extra tagged // pc + the tag for the data, and kPointerSize for the actual pointer to the diff -Nru libv8-3.4.14.21/src/ast.cc libv8-3.5.10.24/src/ast.cc --- libv8-3.4.14.21/src/ast.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/ast.cc 2011-08-24 12:02:41.000000000 +0000 @@ -139,8 +139,7 @@ assignment_id_(GetNextId(isolate)), block_start_(false), block_end_(false), - is_monomorphic_(false), - receiver_types_(NULL) { + is_monomorphic_(false) { ASSERT(Token::IsAssignmentOp(op)); if (is_compound()) { binary_operation_ = @@ -426,7 +425,7 @@ } -bool EnterWithContextStatement::IsInlineable() const { +bool WithStatement::IsInlineable() const { return false; } @@ -652,6 +651,7 @@ void Property::RecordTypeFeedback(TypeFeedbackOracle* oracle) { // Record type feedback from the oracle in the AST. is_monomorphic_ = oracle->LoadIsMonomorphicNormal(this); + receiver_types_.Clear(); if (key()->IsPropertyName()) { if (oracle->LoadIsBuiltin(this, Builtins::kLoadIC_ArrayLength)) { is_array_length_ = true; @@ -664,16 +664,15 @@ Literal* lit_key = key()->AsLiteral(); ASSERT(lit_key != NULL && lit_key->handle()->IsString()); Handle name = Handle::cast(lit_key->handle()); - ZoneMapList* types = oracle->LoadReceiverTypes(this, name); - receiver_types_ = types; + oracle->LoadReceiverTypes(this, name, &receiver_types_); } } else if (oracle->LoadIsBuiltin(this, Builtins::kKeyedLoadIC_String)) { is_string_access_ = true; } else if (is_monomorphic_) { - monomorphic_receiver_type_ = oracle->LoadMonomorphicReceiverType(this); + receiver_types_.Add(oracle->LoadMonomorphicReceiverType(this)); } else if (oracle->LoadIsMegamorphicWithTypeInfo(this)) { - receiver_types_ = new ZoneMapList(kMaxKeyedPolymorphism); - oracle->CollectKeyedReceiverTypes(this->id(), receiver_types_); + receiver_types_.Reserve(kMaxKeyedPolymorphism); + oracle->CollectKeyedReceiverTypes(this->id(), &receiver_types_); } } @@ -682,30 +681,31 @@ Property* prop = target()->AsProperty(); ASSERT(prop != NULL); is_monomorphic_ = oracle->StoreIsMonomorphicNormal(this); + receiver_types_.Clear(); if (prop->key()->IsPropertyName()) { Literal* lit_key = prop->key()->AsLiteral(); ASSERT(lit_key != NULL && lit_key->handle()->IsString()); Handle name = Handle::cast(lit_key->handle()); - ZoneMapList* types = oracle->StoreReceiverTypes(this, name); - receiver_types_ = types; + oracle->StoreReceiverTypes(this, name, &receiver_types_); } else if (is_monomorphic_) { // Record receiver type for monomorphic keyed stores. - monomorphic_receiver_type_ = oracle->StoreMonomorphicReceiverType(this); + receiver_types_.Add(oracle->StoreMonomorphicReceiverType(this)); } else if (oracle->StoreIsMegamorphicWithTypeInfo(this)) { - receiver_types_ = new ZoneMapList(kMaxKeyedPolymorphism); - oracle->CollectKeyedReceiverTypes(this->id(), receiver_types_); + receiver_types_.Reserve(kMaxKeyedPolymorphism); + oracle->CollectKeyedReceiverTypes(this->id(), &receiver_types_); } } void CountOperation::RecordTypeFeedback(TypeFeedbackOracle* oracle) { is_monomorphic_ = oracle->StoreIsMonomorphicNormal(this); + receiver_types_.Clear(); if (is_monomorphic_) { // Record receiver type for monomorphic keyed stores. - monomorphic_receiver_type_ = oracle->StoreMonomorphicReceiverType(this); + receiver_types_.Add(oracle->StoreMonomorphicReceiverType(this)); } else if (oracle->StoreIsMegamorphicWithTypeInfo(this)) { - receiver_types_ = new ZoneMapList(kMaxKeyedPolymorphism); - oracle->CollectKeyedReceiverTypes(this->id(), receiver_types_); + receiver_types_.Reserve(kMaxKeyedPolymorphism); + oracle->CollectKeyedReceiverTypes(this->id(), &receiver_types_); } } @@ -789,15 +789,14 @@ Literal* key = property->key()->AsLiteral(); ASSERT(key != NULL && key->handle()->IsString()); Handle name = Handle::cast(key->handle()); - receiver_types_ = oracle->CallReceiverTypes(this, name, call_kind); + receiver_types_.Clear(); + oracle->CallReceiverTypes(this, name, call_kind, &receiver_types_); #ifdef DEBUG if (FLAG_enable_slow_asserts) { - if (receiver_types_ != NULL) { - int length = receiver_types_->length(); - for (int i = 0; i < length; i++) { - Handle map = receiver_types_->at(i); - ASSERT(!map.is_null() && *map != NULL); - } + int length = receiver_types_.length(); + for (int i = 0; i < length; i++) { + Handle map = receiver_types_.at(i); + ASSERT(!map.is_null() && *map != NULL); } } #endif @@ -805,9 +804,9 @@ check_type_ = oracle->GetCallCheckType(this); if (is_monomorphic_) { Handle map; - if (receiver_types_ != NULL && receiver_types_->length() > 0) { + if (receiver_types_.length() > 0) { ASSERT(check_type_ == RECEIVER_MAP_CHECK); - map = receiver_types_->at(0); + map = receiver_types_.at(0); } else { ASSERT(check_type_ != RECEIVER_MAP_CHECK); holder_ = Handle( diff -Nru libv8-3.4.14.21/src/ast.h libv8-3.5.10.24/src/ast.h --- libv8-3.4.14.21/src/ast.h 2011-08-09 12:57:00.000000000 +0000 +++ libv8-3.5.10.24/src/ast.h 2011-08-29 10:41:00.000000000 +0000 @@ -33,6 +33,7 @@ #include "factory.h" #include "jsregexp.h" #include "runtime.h" +#include "small-pointer-list.h" #include "token.h" #include "variables.h" @@ -60,7 +61,7 @@ V(ContinueStatement) \ V(BreakStatement) \ V(ReturnStatement) \ - V(EnterWithContextStatement) \ + V(WithStatement) \ V(ExitContextStatement) \ V(SwitchStatement) \ V(DoWhileStatement) \ @@ -207,6 +208,36 @@ }; +class SmallMapList { + public: + SmallMapList() {} + explicit SmallMapList(int capacity) : list_(capacity) {} + + void Reserve(int capacity) { list_.Reserve(capacity); } + void Clear() { list_.Clear(); } + + bool is_empty() const { return list_.is_empty(); } + int length() const { return list_.length(); } + + void Add(Handle handle) { + list_.Add(handle.location()); + } + + Handle at(int i) const { + return Handle(list_.at(i)); + } + + Handle first() const { return at(0); } + Handle last() const { return at(length() - 1); } + + private: + // The list stores pointers to Map*, that is Map**, so it's GC safe. + SmallPointerList list_; + + DISALLOW_COPY_AND_ASSIGN(SmallMapList); +}; + + class Expression: public AstNode { public: enum Context { @@ -265,13 +296,15 @@ UNREACHABLE(); return false; } - virtual ZoneMapList* GetReceiverTypes() { + virtual SmallMapList* GetReceiverTypes() { UNREACHABLE(); return NULL; } - virtual Handle GetMonomorphicReceiverType() { - UNREACHABLE(); - return Handle(); + Handle GetMonomorphicReceiverType() { + ASSERT(IsMonomorphic()); + SmallMapList* types = GetReceiverTypes(); + ASSERT(types != NULL && types->length() == 1); + return types->at(0); } unsigned id() const { return id_; } @@ -359,9 +392,13 @@ ZoneList* statements() { return &statements_; } bool is_initializer_block() const { return is_initializer_block_; } + Scope* block_scope() const { return block_scope_; } + void set_block_scope(Scope* block_scope) { block_scope_ = block_scope; } + private: ZoneList statements_; bool is_initializer_block_; + Scope* block_scope_; }; @@ -371,9 +408,11 @@ : proxy_(proxy), mode_(mode), fun_(fun) { - ASSERT(mode == Variable::VAR || mode == Variable::CONST); + ASSERT(mode == Variable::VAR || + mode == Variable::CONST || + mode == Variable::LET); // At the moment there are no "const functions"'s in JavaScript... - ASSERT(fun == NULL || mode == Variable::VAR); + ASSERT(fun == NULL || mode == Variable::VAR || mode == Variable::LET); } DECLARE_NODE_TYPE(Declaration) @@ -627,19 +666,21 @@ }; -class EnterWithContextStatement: public Statement { +class WithStatement: public Statement { public: - explicit EnterWithContextStatement(Expression* expression) - : expression_(expression) { } + WithStatement(Expression* expression, Statement* statement) + : expression_(expression), statement_(statement) { } - DECLARE_NODE_TYPE(EnterWithContextStatement) + DECLARE_NODE_TYPE(WithStatement) Expression* expression() const { return expression_; } + Statement* statement() const { return statement_; } virtual bool IsInlineable() const; private: Expression* expression_; + Statement* statement_; }; @@ -1190,22 +1231,14 @@ class Property: public Expression { public: - // Synthetic properties are property lookups introduced by the system, - // to objects that aren't visible to the user. Function calls to synthetic - // properties should use the global object as receiver, not the base object - // of the resolved Reference. - enum Type { NORMAL, SYNTHETIC }; Property(Isolate* isolate, Expression* obj, Expression* key, - int pos, - Type type = NORMAL) + int pos) : Expression(isolate), obj_(obj), key_(key), pos_(pos), - type_(type), - receiver_types_(NULL), is_monomorphic_(false), is_array_length_(false), is_string_length_(false), @@ -1220,7 +1253,6 @@ Expression* obj() const { return obj_; } Expression* key() const { return key_; } virtual int position() const { return pos_; } - bool is_synthetic() const { return type_ == SYNTHETIC; } bool IsStringLength() const { return is_string_length_; } bool IsStringAccess() const { return is_string_access_; } @@ -1229,25 +1261,20 @@ // Type feedback information. void RecordTypeFeedback(TypeFeedbackOracle* oracle); virtual bool IsMonomorphic() { return is_monomorphic_; } - virtual ZoneMapList* GetReceiverTypes() { return receiver_types_; } + virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; } virtual bool IsArrayLength() { return is_array_length_; } - virtual Handle GetMonomorphicReceiverType() { - return monomorphic_receiver_type_; - } private: Expression* obj_; Expression* key_; int pos_; - Type type_; - ZoneMapList* receiver_types_; + SmallMapList receiver_types_; bool is_monomorphic_ : 1; bool is_array_length_ : 1; bool is_string_length_ : 1; bool is_string_access_ : 1; bool is_function_prototype_ : 1; - Handle monomorphic_receiver_type_; }; @@ -1263,7 +1290,6 @@ pos_(pos), is_monomorphic_(false), check_type_(RECEIVER_MAP_CHECK), - receiver_types_(NULL), return_id_(GetNextId(isolate)) { } @@ -1277,7 +1303,7 @@ void RecordTypeFeedback(TypeFeedbackOracle* oracle, CallKind call_kind); - virtual ZoneMapList* GetReceiverTypes() { return receiver_types_; } + virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; } virtual bool IsMonomorphic() { return is_monomorphic_; } CheckType check_type() const { return check_type_; } Handle target() { return target_; } @@ -1302,7 +1328,7 @@ bool is_monomorphic_; CheckType check_type_; - ZoneMapList* receiver_types_; + SmallMapList receiver_types_; Handle target_; Handle holder_; Handle cell_; @@ -1477,8 +1503,7 @@ expression_(expr), pos_(pos), assignment_id_(GetNextId(isolate)), - count_id_(GetNextId(isolate)), - receiver_types_(NULL) { } + count_id_(GetNextId(isolate)) {} DECLARE_NODE_TYPE(CountOperation) @@ -1499,10 +1524,7 @@ void RecordTypeFeedback(TypeFeedbackOracle* oracle); virtual bool IsMonomorphic() { return is_monomorphic_; } - virtual Handle GetMonomorphicReceiverType() { - return monomorphic_receiver_type_; - } - virtual ZoneMapList* GetReceiverTypes() { return receiver_types_; } + virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; } // Bailout support. int AssignmentId() const { return assignment_id_; } @@ -1516,8 +1538,7 @@ int pos_; int assignment_id_; int count_id_; - Handle monomorphic_receiver_type_; - ZoneMapList* receiver_types_; + SmallMapList receiver_types_; }; @@ -1665,10 +1686,7 @@ // Type feedback information. void RecordTypeFeedback(TypeFeedbackOracle* oracle); virtual bool IsMonomorphic() { return is_monomorphic_; } - virtual ZoneMapList* GetReceiverTypes() { return receiver_types_; } - virtual Handle GetMonomorphicReceiverType() { - return monomorphic_receiver_type_; - } + virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; } // Bailout support. int CompoundLoadId() const { return compound_load_id_; } @@ -1687,8 +1705,7 @@ bool block_end_; bool is_monomorphic_; - ZoneMapList* receiver_types_; - Handle monomorphic_receiver_type_; + SmallMapList receiver_types_; }; diff -Nru libv8-3.4.14.21/src/ast-inl.h libv8-3.5.10.24/src/ast-inl.h --- libv8-3.4.14.21/src/ast-inl.h 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/ast-inl.h 2011-08-15 13:01:23.000000000 +0000 @@ -50,7 +50,8 @@ bool is_initializer_block) : BreakableStatement(isolate, labels, TARGET_FOR_NAMED_ONLY), statements_(capacity), - is_initializer_block_(is_initializer_block) { + is_initializer_block_(is_initializer_block), + block_scope_(NULL) { } diff -Nru libv8-3.4.14.21/src/bootstrapper.cc libv8-3.5.10.24/src/bootstrapper.cc --- libv8-3.4.14.21/src/bootstrapper.cc 2011-08-11 16:03:29.000000000 +0000 +++ libv8-3.5.10.24/src/bootstrapper.cc 2011-08-31 09:03:56.000000000 +0000 @@ -199,6 +199,7 @@ // New context initialization. Used for creating a context from scratch. void InitializeGlobal(Handle inner_global, Handle empty_function); + void InitializeExperimentalGlobal(); // Installs the contents of the native .js files on the global objects. // Used for creating a context from scratch. void InstallNativeFunctions(); @@ -1159,7 +1160,7 @@ { - // Setup the call-as-function delegate. + // Set up the call-as-function delegate. Handle code = Handle(isolate->builtins()->builtin( Builtins::kHandleApiCallAsFunction)); @@ -1171,7 +1172,7 @@ } { - // Setup the call-as-constructor delegate. + // Set up the call-as-constructor delegate. Handle code = Handle(isolate->builtins()->builtin( Builtins::kHandleApiCallAsConstructor)); @@ -1190,6 +1191,20 @@ } +void Genesis::InitializeExperimentalGlobal() { + Handle global = Handle(global_context()->global()); + + // TODO(mstarzinger): Move this into Genesis::InitializeGlobal once we no + // longer need to live behind a flag, so WeakMap gets added to the snapshot. + if (FLAG_harmony_weakmaps) { // -- W e a k M a p + Handle prototype = + factory()->NewJSObject(isolate()->object_function(), TENURED); + InstallFunction(global, "WeakMap", JS_WEAK_MAP_TYPE, JSWeakMap::kSize, + prototype, Builtins::kIllegal, true); + } +} + + bool Genesis::CompileBuiltin(Isolate* isolate, int index) { Vector name = Natives::GetScriptName(index); Handle source_code = @@ -1680,6 +1695,11 @@ "native proxy.js") == 0) { if (!CompileExperimentalBuiltin(isolate(), i)) return false; } + if (FLAG_harmony_weakmaps && + strcmp(ExperimentalNatives::GetScriptName(i).start(), + "native weakmap.js") == 0) { + if (!CompileExperimentalBuiltin(isolate(), i)) return false; + } } InstallExperimentalNativeFunctions(); @@ -2169,7 +2189,8 @@ isolate->counters()->contexts_created_from_scratch()->Increment(); } - // Install experimental natives. + // Initialize experimental globals and install experimental natives. + InitializeExperimentalGlobal(); if (!InstallExperimentalNatives()) return; result_ = global_context_; diff -Nru libv8-3.4.14.21/src/checks.h libv8-3.5.10.24/src/checks.h --- libv8-3.4.14.21/src/checks.h 2011-04-04 08:25:31.000000000 +0000 +++ libv8-3.5.10.24/src/checks.h 2011-08-31 09:03:56.000000000 +0000 @@ -251,9 +251,9 @@ // actually causes each use to introduce a new defined type with a // name depending on the source line. template class StaticAssertionHelper { }; -#define STATIC_CHECK(test) \ - typedef \ - StaticAssertionHelper(test)>)> \ +#define STATIC_CHECK(test) \ + typedef \ + StaticAssertionHelper((test))>)> \ SEMI_STATIC_JOIN(__StaticAssertTypedef__, __LINE__) diff -Nru libv8-3.4.14.21/src/codegen.cc libv8-3.5.10.24/src/codegen.cc --- libv8-3.4.14.21/src/codegen.cc 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/codegen.cc 2011-08-10 11:27:35.000000000 +0000 @@ -169,7 +169,6 @@ #endif // ENABLE_DISASSEMBLER } -static Vector kRegexp = CStrVector("regexp"); bool CodeGenerator::ShouldGenerateLog(Expression* type) { ASSERT(type != NULL); @@ -179,7 +178,7 @@ } Handle name = Handle::cast(type->AsLiteral()->handle()); if (FLAG_log_regexp) { - if (name->IsEqualTo(kRegexp)) + if (name->IsEqualTo(CStrVector("regexp"))) return true; } return false; diff -Nru libv8-3.4.14.21/src/code-stubs.cc libv8-3.5.10.24/src/code-stubs.cc --- libv8-3.4.14.21/src/code-stubs.cc 2011-07-18 09:50:57.000000000 +0000 +++ libv8-3.5.10.24/src/code-stubs.cc 2011-08-15 13:01:23.000000000 +0000 @@ -329,4 +329,84 @@ stream->Add("CallFunctionStub_Args%d%s%s", argc_, in_loop_name, flags_name); } + +void ToBooleanStub::PrintName(StringStream* stream) { + stream->Add("ToBooleanStub_"); + types_.Print(stream); +} + + +void ToBooleanStub::Types::Print(StringStream* stream) const { + if (IsEmpty()) stream->Add("None"); + if (Contains(UNDEFINED)) stream->Add("Undefined"); + if (Contains(BOOLEAN)) stream->Add("Bool"); + if (Contains(NULL_TYPE)) stream->Add("Null"); + if (Contains(SMI)) stream->Add("Smi"); + if (Contains(SPEC_OBJECT)) stream->Add("SpecObject"); + if (Contains(STRING)) stream->Add("String"); + if (Contains(HEAP_NUMBER)) stream->Add("HeapNumber"); +} + + +void ToBooleanStub::Types::TraceTransition(Types to) const { + if (!FLAG_trace_ic) return; + char buffer[100]; + NoAllocationStringAllocator allocator(buffer, + static_cast(sizeof(buffer))); + StringStream stream(&allocator); + stream.Add("[ToBooleanIC ("); + Print(&stream); + stream.Add("->"); + to.Print(&stream); + stream.Add(")]\n"); + stream.OutputToStdOut(); +} + + +bool ToBooleanStub::Types::Record(Handle object) { + if (object->IsUndefined()) { + Add(UNDEFINED); + return false; + } else if (object->IsBoolean()) { + Add(BOOLEAN); + return object->IsTrue(); + } else if (object->IsNull()) { + Add(NULL_TYPE); + return false; + } else if (object->IsSmi()) { + Add(SMI); + return Smi::cast(*object)->value() != 0; + } else if (object->IsSpecObject()) { + Add(SPEC_OBJECT); + return !object->IsUndetectableObject(); + } else if (object->IsString()) { + Add(STRING); + return !object->IsUndetectableObject() && + String::cast(*object)->length() != 0; + } else if (object->IsHeapNumber()) { + ASSERT(!object->IsUndetectableObject()); + Add(HEAP_NUMBER); + double value = HeapNumber::cast(*object)->value(); + return value != 0 && !isnan(value); + } else { + // We should never see an internal object at runtime here! + UNREACHABLE(); + return true; + } +} + + +bool ToBooleanStub::Types::NeedsMap() const { + return Contains(ToBooleanStub::SPEC_OBJECT) + || Contains(ToBooleanStub::STRING) + || Contains(ToBooleanStub::HEAP_NUMBER); +} + + +bool ToBooleanStub::Types::CanBeUndetectable() const { + return Contains(ToBooleanStub::SPEC_OBJECT) + || Contains(ToBooleanStub::STRING); +} + + } } // namespace v8::internal diff -Nru libv8-3.4.14.21/src/code-stubs.h libv8-3.5.10.24/src/code-stubs.h --- libv8-3.4.14.21/src/code-stubs.h 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/code-stubs.h 2011-08-15 13:01:23.000000000 +0000 @@ -900,14 +900,66 @@ class ToBooleanStub: public CodeStub { public: - explicit ToBooleanStub(Register tos) : tos_(tos) { } + enum Type { + UNDEFINED, + BOOLEAN, + NULL_TYPE, + SMI, + SPEC_OBJECT, + STRING, + HEAP_NUMBER, + NUMBER_OF_TYPES + }; + + // At most 8 different types can be distinguished, because the Code object + // only has room for a single byte to hold a set of these types. :-P + STATIC_ASSERT(NUMBER_OF_TYPES <= 8); + + class Types { + public: + Types() {} + explicit Types(byte bits) : set_(bits) {} + + bool IsEmpty() const { return set_.IsEmpty(); } + bool Contains(Type type) const { return set_.Contains(type); } + void Add(Type type) { set_.Add(type); } + byte ToByte() const { return set_.ToIntegral(); } + void Print(StringStream* stream) const; + void TraceTransition(Types to) const; + bool Record(Handle object); + bool NeedsMap() const; + bool CanBeUndetectable() const; + + private: + EnumSet set_; + }; + + static Types no_types() { return Types(); } + static Types all_types() { return Types((1 << NUMBER_OF_TYPES) - 1); } + + explicit ToBooleanStub(Register tos, Types types = Types()) + : tos_(tos), types_(types) { } void Generate(MacroAssembler* masm); + virtual int GetCodeKind() { return Code::TO_BOOLEAN_IC; } + virtual void PrintName(StringStream* stream); private: - Register tos_; Major MajorKey() { return ToBoolean; } - int MinorKey() { return tos_.code(); } + int MinorKey() { return (tos_.code() << NUMBER_OF_TYPES) | types_.ToByte(); } + + virtual void FinishCode(Code* code) { + code->set_to_boolean_state(types_.ToByte()); + } + + void CheckOddball(MacroAssembler* masm, + Type type, + Heap::RootListIndex value, + bool result); + void GenerateTypeTransition(MacroAssembler* masm); + + Register tos_; + Types types_; }; } } // namespace v8::internal diff -Nru libv8-3.4.14.21/src/compiler.cc libv8-3.5.10.24/src/compiler.cc --- libv8-3.4.14.21/src/compiler.cc 2011-08-09 12:57:00.000000000 +0000 +++ libv8-3.5.10.24/src/compiler.cc 2011-10-17 09:05:38.000000000 +0000 @@ -478,15 +478,21 @@ // that would be compiled lazily anyway, so we skip the preparse step // in that case too. ScriptDataImpl* pre_data = input_pre_data; + bool harmony_block_scoping = natives != NATIVES_CODE && + FLAG_harmony_block_scoping; if (pre_data == NULL && source_length >= FLAG_min_preparse_length) { if (source->IsExternalTwoByteString()) { ExternalTwoByteStringUC16CharacterStream stream( Handle::cast(source), 0, source->length()); - pre_data = ParserApi::PartialPreParse(&stream, extension); + pre_data = ParserApi::PartialPreParse(&stream, + extension, + harmony_block_scoping); } else { GenericStringUC16CharacterStream stream(source, 0, source->length()); - pre_data = ParserApi::PartialPreParse(&stream, extension); + pre_data = ParserApi::PartialPreParse(&stream, + extension, + harmony_block_scoping); } } @@ -509,9 +515,6 @@ info.MarkAsGlobal(); info.SetExtension(extension); info.SetPreParseData(pre_data); - if (natives == NATIVES_CODE) { - info.MarkAsAllowingNativesSyntax(); - } result = MakeFunctionInfo(&info); if (extension == NULL && !result.is_null()) { compilation_cache->PutScript(source, result); diff -Nru libv8-3.4.14.21/src/compiler.h libv8-3.5.10.24/src/compiler.h --- libv8-3.4.14.21/src/compiler.h 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/compiler.h 2011-10-17 09:05:38.000000000 +0000 @@ -83,12 +83,6 @@ ASSERT(is_lazy()); flags_ |= IsInLoop::encode(true); } - void MarkAsAllowingNativesSyntax() { - flags_ |= IsNativesSyntaxAllowed::encode(true); - } - bool allows_natives_syntax() const { - return IsNativesSyntaxAllowed::decode(flags_); - } void MarkAsNative() { flags_ |= IsNative::encode(true); } @@ -199,8 +193,6 @@ class IsInLoop: public BitField {}; // Strict mode - used in eager compilation. class IsStrictMode: public BitField {}; - // Native syntax (%-stuff) allowed? - class IsNativesSyntaxAllowed: public BitField {}; // Is this a function from our natives. class IsNative: public BitField {}; diff -Nru libv8-3.4.14.21/src/contexts.cc libv8-3.5.10.24/src/contexts.cc --- libv8-3.4.14.21/src/contexts.cc 2011-06-29 13:20:01.000000000 +0000 +++ libv8-3.5.10.24/src/contexts.cc 2011-08-31 09:03:56.000000000 +0000 @@ -87,13 +87,15 @@ Handle Context::Lookup(Handle name, ContextLookupFlags flags, int* index_, - PropertyAttributes* attributes) { + PropertyAttributes* attributes, + BindingFlags* binding_flags) { Isolate* isolate = GetIsolate(); Handle context(this, isolate); bool follow_context_chain = (flags & FOLLOW_CONTEXT_CHAIN) != 0; *index_ = -1; *attributes = ABSENT; + *binding_flags = MISSING_BINDING; if (FLAG_trace_contexts) { PrintF("Context::Lookup("); @@ -109,7 +111,7 @@ } // Check extension/with/global object. - if (context->has_extension()) { + if (!context->IsBlockContext() && context->has_extension()) { if (context->IsCatchContext()) { // Catch contexts have the variable name in the extension slot. if (name->Equals(String::cast(context->extension()))) { @@ -118,9 +120,13 @@ } *index_ = Context::THROWN_OBJECT_INDEX; *attributes = NONE; + *binding_flags = MUTABLE_IS_INITIALIZED; return context; } } else { + ASSERT(context->IsGlobalContext() || + context->IsFunctionContext() || + context->IsWithContext()); // Global, function, and with contexts may have an object in the // extension slot. Handle extension(JSObject::cast(context->extension()), @@ -145,11 +151,20 @@ } } - // Only functions can have locals, parameters, and a function name. - if (context->IsFunctionContext()) { + // Check serialized scope information of functions and blocks. Only + // functions can have parameters, and a function name. + if (context->IsFunctionContext() || context->IsBlockContext()) { // We may have context-local slots. Check locals in the context. - Handle scope_info( - context->closure()->shared()->scope_info(), isolate); + Handle scope_info; + if (context->IsFunctionContext()) { + scope_info = Handle( + context->closure()->shared()->scope_info(), isolate); + } else { + ASSERT(context->IsBlockContext()); + scope_info = Handle( + SerializedScopeInfo::cast(context->extension()), isolate); + } + Variable::Mode mode; int index = scope_info->ContextSlotIndex(*name, &mode); ASSERT(index < 0 || index >= MIN_CONTEXT_SLOTS); @@ -169,9 +184,15 @@ case Variable::INTERNAL: // Fall through. case Variable::VAR: *attributes = NONE; + *binding_flags = MUTABLE_IS_INITIALIZED; + break; + case Variable::LET: + *attributes = NONE; + *binding_flags = MUTABLE_CHECK_INITIALIZED; break; case Variable::CONST: *attributes = READ_ONLY; + *binding_flags = IMMUTABLE_CHECK_INITIALIZED; break; case Variable::DYNAMIC: case Variable::DYNAMIC_GLOBAL: @@ -194,6 +215,7 @@ } *index_ = index; *attributes = READ_ONLY; + *binding_flags = IMMUTABLE_IS_INITIALIZED; return context; } } diff -Nru libv8-3.4.14.21/src/contexts.h libv8-3.5.10.24/src/contexts.h --- libv8-3.4.14.21/src/contexts.h 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/contexts.h 2011-08-31 09:03:56.000000000 +0000 @@ -44,6 +44,30 @@ }; +// ES5 10.2 defines lexical environments with mutable and immutable bindings. +// Immutable bindings have two states, initialized and uninitialized, and +// their state is changed by the InitializeImmutableBinding method. +// +// The harmony proposal for block scoped bindings also introduces the +// uninitialized state for mutable bindings. A 'let' declared variable +// is a mutable binding that is created uninitalized upon activation of its +// lexical environment and it is initialized when evaluating its declaration +// statement. Var declared variables are mutable bindings that are +// immediately initialized upon creation. The BindingFlags enum represents +// information if a binding has definitely been initialized. 'const' declared +// variables are created as uninitialized immutable bindings. + +// In harmony mode accessing an uninitialized binding produces a reference +// error. +enum BindingFlags { + MUTABLE_IS_INITIALIZED, + MUTABLE_CHECK_INITIALIZED, + IMMUTABLE_IS_INITIALIZED, + IMMUTABLE_CHECK_INITIALIZED, + MISSING_BINDING +}; + + // Heap-allocated activation contexts. // // Contexts are implemented as FixedArray objects; the Context @@ -295,6 +319,10 @@ Map* map = this->map(); return map == map->GetHeap()->with_context_map(); } + bool IsBlockContext() { + Map* map = this->map(); + return map == map->GetHeap()->block_context_map(); + } // Tells whether the global context is marked with out of memory. inline bool has_out_of_memory(); @@ -347,8 +375,11 @@ // 4) index_ < 0 && result.is_null(): // there was no context found with the corresponding property. // attributes == ABSENT. - Handle Lookup(Handle name, ContextLookupFlags flags, - int* index_, PropertyAttributes* attributes); + Handle Lookup(Handle name, + ContextLookupFlags flags, + int* index_, + PropertyAttributes* attributes, + BindingFlags* binding_flags); // Determine if a local variable with the given name exists in a // context. Do not consider context extension objects. This is diff -Nru libv8-3.4.14.21/src/conversions.h libv8-3.5.10.24/src/conversions.h --- libv8-3.4.14.21/src/conversions.h 2011-07-11 11:41:22.000000000 +0000 +++ libv8-3.5.10.24/src/conversions.h 2011-09-27 07:55:59.000000000 +0000 @@ -28,9 +28,8 @@ #ifndef V8_CONVERSIONS_H_ #define V8_CONVERSIONS_H_ -#include - #include "scanner-base.h" +#include "utils.h" namespace v8 { namespace internal { diff -Nru libv8-3.4.14.21/src/conversions-inl.h libv8-3.5.10.24/src/conversions-inl.h --- libv8-3.4.14.21/src/conversions-inl.h 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/conversions-inl.h 2011-09-27 07:55:59.000000000 +0000 @@ -44,7 +44,7 @@ namespace internal { static inline double JunkStringValue() { - return std::numeric_limits::quiet_NaN(); + return BitCast(kQuietNaNMask); } diff -Nru libv8-3.4.14.21/src/cpu-profiler.cc libv8-3.5.10.24/src/cpu-profiler.cc --- libv8-3.4.14.21/src/cpu-profiler.cc 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/cpu-profiler.cc 2011-10-14 09:24:03.000000000 +0000 @@ -562,12 +562,12 @@ sampler->Stop(); need_to_stop_sampler_ = false; } + NoBarrier_Store(&is_profiling_, false); processor_->Stop(); processor_->Join(); delete processor_; delete generator_; processor_ = NULL; - NoBarrier_Store(&is_profiling_, false); generator_ = NULL; logger->logging_nesting_ = saved_logging_nesting_; } diff -Nru libv8-3.4.14.21/src/d8.cc libv8-3.5.10.24/src/d8.cc --- libv8-3.4.14.21/src/d8.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/d8.cc 2011-09-21 14:08:56.000000000 +0000 @@ -26,8 +26,8 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -#ifdef V8_SHARED -#define USING_V8_SHARED +#ifdef USING_V8_SHARED // Defined when linking against shared lib on Windows. +#define V8_SHARED #endif #ifdef COMPRESS_STARTUP_DATA_BZ2 @@ -37,15 +37,16 @@ #include #include #include +#include -#ifdef USING_V8_SHARED +#ifdef V8_SHARED #include #include "../include/v8-testing.h" -#endif // USING_V8_SHARED +#endif // V8_SHARED #include "d8.h" -#ifndef USING_V8_SHARED +#ifndef V8_SHARED #include "api.h" #include "checks.h" #include "d8-debug.h" @@ -53,20 +54,20 @@ #include "natives.h" #include "platform.h" #include "v8.h" -#endif // USING_V8_SHARED +#endif // V8_SHARED #if !defined(_WIN32) && !defined(_WIN64) #include // NOLINT #endif -#ifdef USING_V8_SHARED +#ifndef ASSERT #define ASSERT(condition) assert(condition) -#endif // USING_V8_SHARED +#endif namespace v8 { -#ifndef USING_V8_SHARED +#ifndef V8_SHARED LineEditor *LineEditor::first_ = NULL; const char* Shell::kHistoryFileName = ".d8_history"; @@ -116,20 +117,20 @@ CounterCollection* Shell::counters_ = &local_counters_; i::Mutex* Shell::context_mutex_(i::OS::CreateMutex()); Persistent Shell::utility_context_; -#endif // USING_V8_SHARED +#endif // V8_SHARED Persistent Shell::evaluation_context_; ShellOptions Shell::options; const char* Shell::kPrompt = "d8> "; -#ifndef USING_V8_SHARED +#ifndef V8_SHARED bool CounterMap::Match(void* key1, void* key2) { const char* name1 = reinterpret_cast(key1); const char* name2 = reinterpret_cast(key2); return strcmp(name1, name2) == 0; } -#endif // USING_V8_SHARED +#endif // V8_SHARED // Converts a V8 value to a C string. @@ -143,11 +144,11 @@ Handle name, bool print_result, bool report_exceptions) { -#ifndef USING_V8_SHARED +#ifndef V8_SHARED bool FLAG_debugger = i::FLAG_debugger; #else bool FLAG_debugger = false; -#endif // USING_V8_SHARED +#endif // V8_SHARED HandleScope handle_scope; TryCatch try_catch; options.script_executed = true; @@ -199,7 +200,7 @@ printf(" "); } v8::String::Utf8Value str(args[i]); - int n = fwrite(*str, sizeof(**str), str.length(), stdout); + int n = static_cast(fwrite(*str, sizeof(**str), str.length(), stdout)); if (n != str.length()) { printf("Error in fwrite\n"); exit(1); @@ -226,17 +227,24 @@ static const int kBufferSize = 256; char buffer[kBufferSize]; Handle accumulator = String::New(""); - bool linebreak; int length; - do { // Repeat if the line ends with an escape '\'. - // fgets got an error. Just give up. + while (true) { + // Continue reading if the line ends with an escape '\\' or the line has + // not been fully read into the buffer yet (does not end with '\n'). + // If fgets gets an error, just give up. if (fgets(buffer, kBufferSize, stdin) == NULL) return Null(); - length = strlen(buffer); - linebreak = (length > 1 && buffer[length-2] == '\\'); - if (linebreak) buffer[length-2] = '\n'; - accumulator = String::Concat(accumulator, String::New(buffer, length-1)); - } while (linebreak); - return accumulator; + length = static_cast(strlen(buffer)); + if (length == 0) { + return accumulator; + } else if (buffer[length-1] != '\n') { + accumulator = String::Concat(accumulator, String::New(buffer, length)); + } else if (length > 1 && buffer[length-2] == '\\') { + buffer[length-2] = '\n'; + accumulator = String::Concat(accumulator, String::New(buffer, length-1)); + } else { + return String::Concat(accumulator, String::New(buffer, length-1)); + } + } } @@ -269,9 +277,9 @@ String::New("Array constructor needs one parameter.")); } static const int kMaxLength = 0x3fffffff; -#ifndef USING_V8_SHARED +#ifndef V8_SHARED ASSERT(kMaxLength == i::ExternalArray::kMaxLength); -#endif // USING_V8_SHARED +#endif // V8_SHARED size_t length = 0; if (args[0]->IsUint32()) { length = args[0]->Uint32Value(); @@ -299,9 +307,12 @@ Persistent persistent_array = Persistent::New(array); persistent_array.MakeWeak(data, ExternalArrayWeakCallback); persistent_array.MarkIndependent(); - array->SetIndexedPropertiesToExternalArrayData(data, type, length); - array->Set(String::New("length"), Int32::New(length), ReadOnly); - array->Set(String::New("BYTES_PER_ELEMENT"), Int32::New(element_size)); + array->SetIndexedPropertiesToExternalArrayData(data, type, + static_cast(length)); + array->Set(String::New("length"), + Int32::New(static_cast(length)), ReadOnly); + array->Set(String::New("BYTES_PER_ELEMENT"), + Int32::New(static_cast(element_size))); return array; } @@ -368,9 +379,9 @@ Handle Shell::Quit(const Arguments& args) { int exit_code = args[0]->Int32Value(); -#ifndef USING_V8_SHARED +#ifndef V8_SHARED OnExit(); -#endif // USING_V8_SHARED +#endif // V8_SHARED exit(exit_code); return Undefined(); } @@ -419,7 +430,7 @@ } -#ifndef USING_V8_SHARED +#ifndef V8_SHARED Handle Shell::GetCompletions(Handle text, Handle full) { HandleScope handle_scope; Context::Scope context_scope(utility_context_); @@ -454,10 +465,10 @@ return val; } #endif // ENABLE_DEBUGGER_SUPPORT -#endif // USING_V8_SHARED +#endif // V8_SHARED -#ifndef USING_V8_SHARED +#ifndef V8_SHARED int32_t* Counter::Bind(const char* name, bool is_histogram) { int i; for (i = 0; i < kMaxNameSize - 1 && name[i]; i++) @@ -605,7 +616,7 @@ } #endif // ENABLE_DEBUGGER_SUPPORT } -#endif // USING_V8_SHARED +#endif // V8_SHARED #ifdef COMPRESS_STARTUP_DATA_BZ2 @@ -667,16 +678,16 @@ FunctionTemplate::New(PixelArray)); #ifdef LIVE_OBJECT_LIST - global_template->Set(String::New("lol_is_enabled"), Boolean::New(true)); + global_template->Set(String::New("lol_is_enabled"), True()); #else - global_template->Set(String::New("lol_is_enabled"), Boolean::New(false)); + global_template->Set(String::New("lol_is_enabled"), False()); #endif -#ifndef USING_V8_SHARED +#ifndef V8_SHARED Handle os_templ = ObjectTemplate::New(); AddOSMethods(os_templ); global_template->Set(String::New("os"), os_templ); -#endif // USING_V8_SHARED +#endif // V8_SHARED return global_template; } @@ -692,7 +703,7 @@ } #endif -#ifndef USING_V8_SHARED +#ifndef V8_SHARED Shell::counter_map_ = new CounterMap(); // Set up counters if (i::StrLength(i::FLAG_map_counters) != 0) @@ -702,10 +713,10 @@ V8::SetCreateHistogramFunction(CreateHistogram); V8::SetAddHistogramSampleFunction(AddHistogramSample); } -#endif // USING_V8_SHARED +#endif // V8_SHARED if (options.test_shell) return; -#ifndef USING_V8_SHARED +#ifndef V8_SHARED Locker lock; HandleScope scope; Handle global_template = CreateGlobalTemplate(); @@ -717,21 +728,22 @@ v8::Debug::EnableAgent("d8 shell", i::FLAG_debugger_port, true); } #endif // ENABLE_DEBUGGER_SUPPORT -#endif // USING_V8_SHARED +#endif // V8_SHARED } Persistent Shell::CreateEvaluationContext() { -#ifndef USING_V8_SHARED +#ifndef V8_SHARED // This needs to be a critical section since this is not thread-safe i::ScopedLock lock(context_mutex_); -#endif // USING_V8_SHARED +#endif // V8_SHARED // Initialize the global objects Handle global_template = CreateGlobalTemplate(); Persistent context = Context::New(NULL, global_template); + ASSERT(!context.IsEmpty()); Context::Scope scope(context); -#ifndef USING_V8_SHARED +#ifndef V8_SHARED i::JSArguments js_args = i::FLAG_js_arguments; i::Handle arguments_array = FACTORY->NewFixedArray(js_args.argc()); @@ -744,12 +756,12 @@ FACTORY->NewJSArrayWithElements(arguments_array); context->Global()->Set(String::New("arguments"), Utils::ToLocal(arguments_jsarray)); -#endif // USING_V8_SHARED +#endif // V8_SHARED return context; } -#ifndef USING_V8_SHARED +#ifndef V8_SHARED void Shell::OnExit() { if (i::FLAG_dump_counters) { printf("+----------------------------------------+-------------+\n"); @@ -769,18 +781,34 @@ if (counters_file_ != NULL) delete counters_file_; } -#endif // USING_V8_SHARED +#endif // V8_SHARED + + +static FILE* FOpen(const char* path, const char* mode) { +#if (defined(_WIN32) || defined(_WIN64)) + FILE* result; + if (fopen_s(&result, path, mode) == 0) { + return result; + } else { + return NULL; + } +#else + FILE* file = fopen(path, mode); + if (file == NULL) return NULL; + struct stat file_stat; + if (fstat(fileno(file), &file_stat) != 0) return NULL; + bool is_regular_file = ((file_stat.st_mode & S_IFREG) != 0); + if (is_regular_file) return file; + fclose(file); + return NULL; +#endif +} static char* ReadChars(const char* name, int* size_out) { // Release the V8 lock while reading files. v8::Unlocker unlocker(Isolate::GetCurrent()); -#ifndef USING_V8_SHARED - FILE* file = i::OS::FOpen(name, "rb"); -#else - // TODO(yangguo@chromium.org): reading from a directory hangs! - FILE* file = fopen(name, "rb"); -#endif // USING_V8_SHARED + FILE* file = FOpen(name, "rb"); if (file == NULL) return NULL; fseek(file, 0, SEEK_END); @@ -790,7 +818,7 @@ char* chars = new char[size + 1]; chars[size] = '\0'; for (int i = 0; i < size;) { - int read = fread(&chars[i], 1, size - i, file); + int read = static_cast(fread(&chars[i], 1, size - i, file)); i += read; } fclose(file); @@ -799,7 +827,7 @@ } -#ifndef USING_V8_SHARED +#ifndef V8_SHARED static char* ReadToken(char* data, char token) { char* next = i::OS::StrChr(data, token); if (next != NULL) { @@ -819,7 +847,7 @@ static char* ReadWord(char* data) { return ReadToken(data, ' '); } -#endif // USING_V8_SHARED +#endif // V8_SHARED // Reads a file into a v8 string. @@ -838,7 +866,7 @@ Context::Scope context_scope(evaluation_context_); HandleScope handle_scope; Handle name = String::New("(d8)"); -#ifndef USING_V8_SHARED +#ifndef V8_SHARED LineEditor* editor = LineEditor::Get(); printf("V8 version %s [console: %s]\n", V8::GetVersion(), editor->name()); if (i::FLAG_debugger) { @@ -861,12 +889,12 @@ if (fgets(buffer, kBufferSize, stdin) == NULL) break; ExecuteString(String::New(buffer), name, true, true); } -#endif // USING_V8_SHARED +#endif // V8_SHARED printf("\n"); } -#ifndef USING_V8_SHARED +#ifndef V8_SHARED class ShellThread : public i::Thread { public: ShellThread(int no, i::Vector files) @@ -919,7 +947,7 @@ ptr = next_line; } } -#endif // USING_V8_SHARED +#endif // V8_SHARED void SourceGroup::ExitShell(int exit_code) { @@ -966,32 +994,16 @@ Handle SourceGroup::ReadFile(const char* name) { -#ifndef USING_V8_SHARED - FILE* file = i::OS::FOpen(name, "rb"); -#else - // TODO(yangguo@chromium.org): reading from a directory hangs! - FILE* file = fopen(name, "rb"); -#endif // USING_V8_SHARED - if (file == NULL) return Handle(); - - fseek(file, 0, SEEK_END); - int size = ftell(file); - rewind(file); - - char* chars = new char[size + 1]; - chars[size] = '\0'; - for (int i = 0; i < size;) { - int read = fread(&chars[i], 1, size - i, file); - i += read; - } - fclose(file); + int size; + const char* chars = ReadChars(name, &size); + if (chars == NULL) return Handle(); Handle result = String::New(chars, size); delete[] chars; return result; } -#ifndef USING_V8_SHARED +#ifndef V8_SHARED i::Thread::Options SourceGroup::GetThreadOptions() { i::Thread::Options options; options.name = "IsolateThread"; @@ -1043,7 +1055,7 @@ done_semaphore_->Wait(); } } -#endif // USING_V8_SHARED +#endif // V8_SHARED bool Shell::SetOptions(int argc, char* argv[]) { @@ -1065,23 +1077,23 @@ options.test_shell = true; argv[i] = NULL; } else if (strcmp(argv[i], "--preemption") == 0) { -#ifdef USING_V8_SHARED +#ifdef V8_SHARED printf("D8 with shared library does not support multi-threading\n"); return false; #else options.use_preemption = true; argv[i] = NULL; -#endif // USING_V8_SHARED +#endif // V8_SHARED } else if (strcmp(argv[i], "--no-preemption") == 0) { -#ifdef USING_V8_SHARED +#ifdef V8_SHARED printf("D8 with shared library does not support multi-threading\n"); return false; #else options.use_preemption = false; argv[i] = NULL; -#endif // USING_V8_SHARED +#endif // V8_SHARED } else if (strcmp(argv[i], "--preemption-interval") == 0) { -#ifdef USING_V8_SHARED +#ifdef V8_SHARED printf("D8 with shared library does not support multi-threading\n"); return false; #else @@ -1100,19 +1112,19 @@ printf("Missing value for --preemption-interval\n"); return false; } -#endif // USING_V8_SHARED +#endif // V8_SHARED } else if (strcmp(argv[i], "-f") == 0) { // Ignore any -f flags for compatibility with other stand-alone // JavaScript engines. continue; } else if (strcmp(argv[i], "--isolate") == 0) { -#ifdef USING_V8_SHARED +#ifdef V8_SHARED printf("D8 with shared library does not support multi-threading\n"); return false; -#endif // USING_V8_SHARED +#endif // V8_SHARED options.num_isolates++; } -#ifdef USING_V8_SHARED +#ifdef V8_SHARED else if (strcmp(argv[i], "--dump-counters") == 0) { printf("D8 with shared library does not include counters\n"); return false; @@ -1123,10 +1135,10 @@ printf("Javascript debugger not included\n"); return false; } -#endif // USING_V8_SHARED +#endif // V8_SHARED } -#ifndef USING_V8_SHARED +#ifndef V8_SHARED // Run parallel threads if we are not using --isolate for (int i = 1; i < argc; i++) { if (argv[i] == NULL) continue; @@ -1147,9 +1159,10 @@ } argv[i] = NULL; options.parallel_files->Add(i::Vector(files, size)); + delete[] files; } } -#endif // USING_V8_SHARED +#endif // V8_SHARED v8::V8::SetFlagsFromCommandLine(&argc, argv, true); @@ -1174,7 +1187,7 @@ int Shell::RunMain(int argc, char* argv[]) { -#ifndef USING_V8_SHARED +#ifndef V8_SHARED i::List threads(1); if (options.parallel_files != NULL) for (int i = 0; i < options.parallel_files->length(); i++) { @@ -1187,33 +1200,41 @@ for (int i = 1; i < options.num_isolates; ++i) { options.isolate_sources[i].StartExecuteInThread(); } -#endif // USING_V8_SHARED +#endif // V8_SHARED { // NOLINT Locker lock; HandleScope scope; Persistent context = CreateEvaluationContext(); + if (options.last_run) { + // Keep using the same context in the interactive shell. + evaluation_context_ = context; +#ifndef V8_SHARED + // If the interactive debugger is enabled make sure to activate + // it before running the files passed on the command line. + if (i::FLAG_debugger) { + InstallUtilityScript(); + } +#endif // V8_SHARED + } { Context::Scope cscope(context); options.isolate_sources[0].Execute(); } - if (options.last_run) { - // Keep using the same context in the interactive shell - evaluation_context_ = context; - } else { + if (!options.last_run) { context.Dispose(); } -#ifndef USING_V8_SHARED +#ifndef V8_SHARED // Start preemption if threads have been created and preemption is enabled. if (options.parallel_files != NULL && threads.length() > 0 && options.use_preemption) { Locker::StartPreemption(options.preemption_interval); } -#endif // USING_V8_SHARED +#endif // V8_SHARED } -#ifndef USING_V8_SHARED +#ifndef V8_SHARED for (int i = 1; i < options.num_isolates; ++i) { options.isolate_sources[i].WaitForThread(); } @@ -1224,9 +1245,7 @@ thread->Join(); delete thread; } - - OnExit(); -#endif // USING_V8_SHARED +#endif // V8_SHARED return 0; } @@ -1254,14 +1273,14 @@ } -#if !defined(USING_V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT) +#if !defined(V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT) // Run remote debugger if requested, but never on --test if (i::FLAG_remote_debugger && !options.test_shell) { InstallUtilityScript(); RunRemoteDebugger(i::FLAG_debugger_port); return 0; } -#endif // !USING_V8_SHARED && ENABLE_DEBUGGER_SUPPORT +#endif // !V8_SHARED && ENABLE_DEBUGGER_SUPPORT // Run interactive shell if explicitly requested or if no script has been // executed, but never on --test @@ -1269,14 +1288,20 @@ if (( options.interactive_shell || !options.script_executed ) && !options.test_shell ) { -#ifndef USING_V8_SHARED - InstallUtilityScript(); -#endif // USING_V8_SHARED +#ifndef V8_SHARED + if (!i::FLAG_debugger) { + InstallUtilityScript(); + } +#endif // V8_SHARED RunShell(); } V8::Dispose(); +#ifndef V8_SHARED + OnExit(); +#endif // V8_SHARED + return result; } diff -Nru libv8-3.4.14.21/src/d8.gyp libv8-3.5.10.24/src/d8.gyp --- libv8-3.4.14.21/src/d8.gyp 2011-07-18 09:50:57.000000000 +0000 +++ libv8-3.5.10.24/src/d8.gyp 2011-08-29 10:41:00.000000000 +0000 @@ -26,6 +26,7 @@ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. { + 'includes': ['../build/common.gypi'], 'variables': { 'console%': '', }, @@ -36,6 +37,7 @@ 'dependencies': [ '../tools/gyp/v8.gyp:v8', ], + # Generated source files need this explicitly: 'include_dirs+': [ '../src', ], @@ -47,9 +49,17 @@ ], 'conditions': [ [ 'component!="shared_library"', { - 'dependencies': [ 'd8_js2c#host', ], 'sources': [ 'd8-debug.cc', '<(SHARED_INTERMEDIATE_DIR)/d8-js.cc', ], 'conditions': [ + [ 'want_separate_host_toolset==1', { + 'dependencies': [ + 'd8_js2c#host', + ], + }, { + 'dependencies': [ + 'd8_js2c', + ], + }], [ 'console=="readline"', { 'libraries': [ '-lreadline', ], 'sources': [ 'd8-readline.cc' ], @@ -68,13 +78,19 @@ { 'target_name': 'd8_js2c', 'type': 'none', - 'toolsets': ['host'], 'variables': { 'js_files': [ 'd8.js', 'macros.py', ], }, + 'conditions': [ + [ 'want_separate_host_toolset==1', { + 'toolsets': ['host'], + }, { + 'toolsets': ['target'], + }] + ], 'actions': [ { 'action_name': 'd8_js2c', @@ -90,6 +106,7 @@ '../tools/js2c.py', '<@(_outputs)', 'D8', + 'off', # compress startup data '<@(js_files)' ], }, diff -Nru libv8-3.4.14.21/src/d8.h libv8-3.5.10.24/src/d8.h --- libv8-3.4.14.21/src/d8.h 2011-07-18 09:50:57.000000000 +0000 +++ libv8-3.5.10.24/src/d8.h 2011-08-15 13:01:23.000000000 +0000 @@ -29,22 +29,18 @@ #define V8_D8_H_ -#ifndef USING_V8_SHARED +#ifndef V8_SHARED #include "v8.h" #include "allocation.h" #include "hashmap.h" #else #include "../include/v8.h" -#endif // USING_V8_SHARED +#endif // V8_SHARED namespace v8 { -#ifndef USING_V8_SHARED -namespace i = v8::internal; -#endif // USING_V8_SHARED - -#ifndef USING_V8_SHARED +#ifndef V8_SHARED // A single counter in a counter collection. class Counter { public: @@ -117,17 +113,17 @@ static bool Match(void* key1, void* key2); i::HashMap hash_map_; }; -#endif // USING_V8_SHARED +#endif // V8_SHARED class SourceGroup { public: SourceGroup() : -#ifndef USING_V8_SHARED +#ifndef V8_SHARED next_semaphore_(v8::internal::OS::CreateSemaphore(0)), done_semaphore_(v8::internal::OS::CreateSemaphore(0)), thread_(NULL), -#endif // USING_V8_SHARED +#endif // V8_SHARED argv_(NULL), begin_offset_(0), end_offset_(0) { } @@ -141,7 +137,7 @@ void Execute(); -#ifndef USING_V8_SHARED +#ifndef V8_SHARED void StartExecuteInThread(); void WaitForThread(); @@ -165,7 +161,7 @@ i::Semaphore* next_semaphore_; i::Semaphore* done_semaphore_; i::Thread* thread_; -#endif // USING_V8_SHARED +#endif // V8_SHARED void ExitShell(int exit_code); Handle ReadFile(const char* name); @@ -179,11 +175,11 @@ class ShellOptions { public: ShellOptions() : -#ifndef USING_V8_SHARED +#ifndef V8_SHARED use_preemption(true), preemption_interval(10), parallel_files(NULL), -#endif // USING_V8_SHARED +#endif // V8_SHARED script_executed(false), last_run(true), stress_opt(false), @@ -193,11 +189,11 @@ num_isolates(1), isolate_sources(NULL) { } -#ifndef USING_V8_SHARED +#ifndef V8_SHARED bool use_preemption; int preemption_interval; i::List< i::Vector >* parallel_files; -#endif // USING_V8_SHARED +#endif // V8_SHARED bool script_executed; bool last_run; bool stress_opt; @@ -208,11 +204,11 @@ SourceGroup* isolate_sources; }; -#ifdef USING_V8_SHARED +#ifdef V8_SHARED class Shell { #else class Shell : public i::AllStatic { -#endif // USING_V8_SHARED +#endif // V8_SHARED public: static bool ExecuteString(Handle source, Handle name, @@ -225,7 +221,7 @@ static int RunMain(int argc, char* argv[]); static int Main(int argc, char* argv[]); -#ifndef USING_V8_SHARED +#ifndef V8_SHARED static Handle GetCompletions(Handle text, Handle full); static void OnExit(); @@ -236,7 +232,7 @@ size_t buckets); static void AddHistogramSample(void* histogram, int sample); static void MapCounters(const char* name); -#endif // USING_V8_SHARED +#endif // V8_SHARED #ifdef ENABLE_DEBUGGER_SUPPORT static Handle DebugMessageDetails(Handle message); @@ -300,15 +296,15 @@ static Handle RemoveDirectory(const Arguments& args); static void AddOSMethods(Handle os_template); -#ifndef USING_V8_SHARED +#ifndef V8_SHARED static const char* kHistoryFileName; -#endif // USING_V8_SHARED +#endif // V8_SHARED static const char* kPrompt; static ShellOptions options; private: static Persistent evaluation_context_; -#ifndef USING_V8_SHARED +#ifndef V8_SHARED static Persistent utility_context_; static CounterMap* counter_map_; // We statically allocate a set of local counters to be used if we @@ -320,7 +316,7 @@ static Counter* GetCounter(const char* name, bool is_histogram); static void InstallUtilityScript(); -#endif // USING_V8_SHARED +#endif // V8_SHARED static void Initialize(); static void RunShell(); static bool SetOptions(int argc, char* argv[]); @@ -332,7 +328,7 @@ }; -#ifndef USING_V8_SHARED +#ifndef V8_SHARED class LineEditor { public: enum Type { DUMB = 0, READLINE = 1 }; @@ -352,7 +348,7 @@ LineEditor* next_; static LineEditor* first_; }; -#endif // USING_V8_SHARED +#endif // V8_SHARED } // namespace v8 diff -Nru libv8-3.4.14.21/src/d8.js libv8-3.5.10.24/src/d8.js --- libv8-3.4.14.21/src/d8.js 2011-06-23 06:29:21.000000000 +0000 +++ libv8-3.5.10.24/src/d8.js 2011-08-22 11:03:23.000000000 +0000 @@ -103,7 +103,8 @@ Local: 1, With: 2, Closure: 3, - Catch: 4 }; + Catch: 4, + Block: 5 }; // Current debug state. @@ -391,14 +392,14 @@ this.frameCommandToJSONRequest_('' + (Debug.State.currentFrame + 1)); break; - + case 'down': case 'do': this.request_ = this.frameCommandToJSONRequest_('' + (Debug.State.currentFrame - 1)); break; - + case 'set': case 'print': case 'p': @@ -1071,7 +1072,7 @@ arg2 = 'uncaught'; } excType = arg2; - + // Check for: // en[able] [all|unc[aught]] exc[eptions] // dis[able] [all|unc[aught]] exc[eptions] @@ -1130,7 +1131,7 @@ request.arguments.ignoreCount = parseInt(otherArgs); break; default: - throw new Error('Invalid arguments.'); + throw new Error('Invalid arguments.'); } } else { throw new Error('Invalid arguments.'); @@ -1251,7 +1252,7 @@ start_index = parseInt(args[i]); // The user input start index starts at 1: if (start_index <= 0) { - throw new Error('Invalid index ' + args[i] + '.'); + throw new Error('Invalid index ' + args[i] + '.'); } start_index -= 1; is_verbose = true; @@ -2020,7 +2021,7 @@ } else if (body.breakOnUncaughtExceptions) { result += '* breaking on UNCAUGHT exceptions is enabled\n'; } else { - result += '* all exception breakpoints are disabled\n'; + result += '* all exception breakpoints are disabled\n'; } details.text = result; break; diff -Nru libv8-3.4.14.21/src/debug.cc libv8-3.5.10.24/src/debug.cc --- libv8-3.4.14.21/src/debug.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/debug.cc 2011-08-10 11:27:35.000000000 +0000 @@ -169,7 +169,8 @@ if ((code->is_inline_cache_stub() && !code->is_binary_op_stub() && !code->is_unary_op_stub() && - !code->is_compare_ic_stub()) || + !code->is_compare_ic_stub() && + !code->is_to_boolean_ic_stub()) || RelocInfo::IsConstructCall(rmode())) { break_point_++; return; @@ -1964,7 +1965,7 @@ Debugger::Debugger(Isolate* isolate) - : debugger_access_(OS::CreateMutex()), + : debugger_access_(isolate->debugger_access()), event_listener_(Handle()), event_listener_data_(Handle()), compiling_natives_(false), @@ -1986,8 +1987,6 @@ Debugger::~Debugger() { - delete debugger_access_; - debugger_access_ = 0; delete dispatch_handler_access_; dispatch_handler_access_ = 0; delete command_received_; diff -Nru libv8-3.4.14.21/src/debug-debugger.js libv8-3.5.10.24/src/debug-debugger.js --- libv8-3.4.14.21/src/debug-debugger.js 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/debug-debugger.js 2011-08-29 10:41:00.000000000 +0000 @@ -404,7 +404,7 @@ return this.script_name_ == script.nameOrSourceURL(); } else if (this.type_ == Debug.ScriptBreakPointType.ScriptRegExp) { return this.script_regexp_object_.test(script.nameOrSourceURL()); - } else { + } else { throw new Error("Unexpected breakpoint type " + this.type_); } } @@ -1579,7 +1579,7 @@ response.failed('Missing argument "type" or "target"'); return; } - + // Either function or script break point. var break_point_number; if (type == 'function') { @@ -1623,10 +1623,10 @@ break_point_number = Debug.setScriptBreakPointByName(target, line, column, condition, groupId); - } else if (type == 'scriptId') { + } else if (type == 'scriptId') { break_point_number = Debug.setScriptBreakPointById(target, line, column, condition, groupId); - } else if (type == 'scriptRegExp') { + } else if (type == 'scriptRegExp') { break_point_number = Debug.setScriptBreakPointByRegExp(target, line, column, condition, groupId); @@ -1797,7 +1797,7 @@ description.type = 'scriptRegExp'; description.script_regexp = break_point.script_regexp_object().source; } else { - throw new Error("Internal error: Unexpected breakpoint type: " + break_point.type()); + throw new Error("Internal error: Unexpected breakpoint type: " + break_point.type()); } array.push(description); } @@ -1838,7 +1838,7 @@ enabled = !Debug.isBreakOnException(); } else if (type == 'uncaught') { enabled = !Debug.isBreakOnUncaughtException(); - } + } // Pull out and check the 'enabled' argument if present: if (!IS_UNDEFINED(request.arguments.enabled)) { @@ -2022,22 +2022,22 @@ if (!IS_UNDEFINED(frame) && global) { return response.failed('Arguments "frame" and "global" are exclusive'); } - + var additional_context_object; if (additional_context) { additional_context_object = {}; for (var i = 0; i < additional_context.length; i++) { var mapping = additional_context[i]; if (!IS_STRING(mapping.name) || !IS_NUMBER(mapping.handle)) { - return response.failed("Context element #" + i + + return response.failed("Context element #" + i + " must contain name:string and handle:number"); - } + } var context_value_mirror = LookupMirror(mapping.handle); if (!context_value_mirror) { return response.failed("Context object '" + mapping.name + "' #" + mapping.handle + "# not found"); } - additional_context_object[mapping.name] = context_value_mirror.value(); + additional_context_object[mapping.name] = context_value_mirror.value(); } } diff -Nru libv8-3.4.14.21/src/deoptimizer.cc libv8-3.5.10.24/src/deoptimizer.cc --- libv8-3.4.14.21/src/deoptimizer.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/deoptimizer.cc 2011-09-20 11:34:48.000000000 +0000 @@ -613,11 +613,13 @@ intptr_t input_value = input_->GetRegister(input_reg); if (FLAG_trace_deopt) { PrintF( - " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s\n", + " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s ", output_[frame_index]->GetTop() + output_offset, output_offset, input_value, converter.NameOfCPURegister(input_reg)); + reinterpret_cast(input_value)->ShortPrint(); + PrintF("\n"); } output_[frame_index]->SetFrameSlot(output_offset, input_value); return; @@ -675,10 +677,12 @@ if (FLAG_trace_deopt) { PrintF(" 0x%08" V8PRIxPTR ": ", output_[frame_index]->GetTop() + output_offset); - PrintF("[top + %d] <- 0x%08" V8PRIxPTR " ; [esp + %d]\n", + PrintF("[top + %d] <- 0x%08" V8PRIxPTR " ; [esp + %d] ", output_offset, input_value, input_offset); + reinterpret_cast(input_value)->ShortPrint(); + PrintF("\n"); } output_[frame_index]->SetFrameSlot(output_offset, input_value); return; @@ -1183,11 +1187,11 @@ int32_t TranslationIterator::Next() { - ASSERT(HasNext()); // Run through the bytes until we reach one with a least significant // bit of zero (marks the end). uint32_t bits = 0; for (int i = 0; true; i += 7) { + ASSERT(HasNext()); uint8_t next = buffer_->get(index_++); bits |= (next >> 1) << i; if ((next & 1) == 0) break; @@ -1438,6 +1442,7 @@ UNREACHABLE(); } +#ifdef ENABLE_DEBUGGER_SUPPORT DeoptimizedFrameInfo::DeoptimizedFrameInfo( Deoptimizer* deoptimizer, int frame_index) { @@ -1467,5 +1472,6 @@ v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); } +#endif // ENABLE_DEBUGGER_SUPPORT } } // namespace v8::internal diff -Nru libv8-3.4.14.21/src/deoptimizer.h libv8-3.5.10.24/src/deoptimizer.h --- libv8-3.4.14.21/src/deoptimizer.h 2011-07-11 11:41:22.000000000 +0000 +++ libv8-3.5.10.24/src/deoptimizer.h 2011-08-29 10:41:00.000000000 +0000 @@ -317,7 +317,7 @@ List deferred_heap_numbers_; - static int table_entry_size_; + static const int table_entry_size_; friend class FrameDescription; friend class DeoptimizingCodeListNode; @@ -336,6 +336,10 @@ return malloc(size + frame_size - kPointerSize); } + void operator delete(void* pointer, uint32_t frame_size) { + free(pointer); + } + void operator delete(void* description) { free(description); } @@ -497,9 +501,7 @@ int32_t Next(); - bool HasNext() const { return index_ >= 0; } - - void Done() { index_ = -1; } + bool HasNext() const { return index_ < buffer_->length(); } void Skip(int n) { for (int i = 0; i < n; i++) Next(); diff -Nru libv8-3.4.14.21/src/disassembler.cc libv8-3.5.10.24/src/disassembler.cc --- libv8-3.4.14.21/src/disassembler.cc 2011-05-02 14:30:53.000000000 +0000 +++ libv8-3.5.10.24/src/disassembler.cc 2011-08-15 13:01:23.000000000 +0000 @@ -97,14 +97,17 @@ } -static void DumpBuffer(FILE* f, char* buff) { +static void DumpBuffer(FILE* f, StringBuilder* out) { if (f == NULL) { - PrintF("%s", buff); + PrintF("%s\n", out->Finalize()); } else { - fprintf(f, "%s", buff); + fprintf(f, "%s\n", out->Finalize()); } + out->Reset(); } + + static const int kOutBufferSize = 2048 + String::kMaxShortPrintLength; static const int kRelocInfoPosition = 57; @@ -119,6 +122,7 @@ v8::internal::EmbeddedVector decode_buffer; v8::internal::EmbeddedVector out_buffer; + StringBuilder out(out_buffer.start(), out_buffer.length()); byte* pc = begin; disasm::Disassembler d(converter); RelocIterator* it = NULL; @@ -181,17 +185,12 @@ } } - StringBuilder out(out_buffer.start(), out_buffer.length()); - // Comments. for (int i = 0; i < comments.length(); i++) { - out.AddFormatted(" %s\n", comments[i]); + out.AddFormatted(" %s", comments[i]); + DumpBuffer(f, &out); } - // Write out comments, resets outp so that we can format the next line. - DumpBuffer(f, out.Finalize()); - out.Reset(); - // Instruction address and instruction offset. out.AddFormatted("%p %4d ", prev_pc, prev_pc - begin); @@ -209,7 +208,7 @@ out.AddPadding(' ', kRelocInfoPosition - out.position()); } else { // Additional reloc infos are printed on separate lines. - out.AddFormatted("\n"); + DumpBuffer(f, &out); out.AddPadding(' ', kRelocInfoPosition); } @@ -299,9 +298,18 @@ out.AddFormatted(" ;; %s", RelocInfo::RelocModeName(rmode)); } } - out.AddString("\n"); - DumpBuffer(f, out.Finalize()); - out.Reset(); + DumpBuffer(f, &out); + } + + // Emit comments following the last instruction (if any). + if (it != NULL) { + for ( ; !it->done(); it->next()) { + if (RelocInfo::IsComment(it->rinfo()->rmode())) { + out.AddFormatted(" %s", + reinterpret_cast(it->rinfo()->data())); + DumpBuffer(f, &out); + } + } } delete it; diff -Nru libv8-3.4.14.21/src/elements.cc libv8-3.5.10.24/src/elements.cc --- libv8-3.4.14.21/src/elements.cc 1970-01-01 00:00:00.000000000 +0000 +++ libv8-3.5.10.24/src/elements.cc 2011-08-31 09:03:56.000000000 +0000 @@ -0,0 +1,634 @@ +// Copyright 2011 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "v8.h" + +#include "objects.h" +#include "elements.h" +#include "utils.h" + +namespace v8 { +namespace internal { + + +ElementsAccessor** ElementsAccessor::elements_accessors_; + + +bool HasKey(FixedArray* array, Object* key) { + int len0 = array->length(); + for (int i = 0; i < len0; i++) { + Object* element = array->get(i); + if (element->IsSmi() && element == key) return true; + if (element->IsString() && + key->IsString() && String::cast(element)->Equals(String::cast(key))) { + return true; + } + } + return false; +} + + +// Base class for element handler implementations. Contains the +// the common logic for objects with different ElementsKinds. +// Subclasses must specialize method for which the element +// implementation differs from the base class implementation. +// +// This class is intended to be used in the following way: +// +// class SomeElementsAccessor : +// public ElementsAccessorBase { +// ... +// } +// +// This is an example of the Curiously Recurring Template Pattern (see +// http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern). We use +// CRTP to guarantee aggressive compile time optimizations (i.e. inlining and +// specialization of SomeElementsAccessor methods). +template +class ElementsAccessorBase : public ElementsAccessor { + protected: + ElementsAccessorBase() { } + virtual MaybeObject* Get(FixedArrayBase* backing_store, + uint32_t key, + JSObject* obj, + Object* receiver) { + return ElementsAccessorSubclass::Get( + BackingStoreClass::cast(backing_store), key, obj, receiver); + } + + static MaybeObject* Get(BackingStoreClass* backing_store, + uint32_t key, + JSObject* obj, + Object* receiver) { + if (key < ElementsAccessorSubclass::GetCapacity(backing_store)) { + return backing_store->get(key); + } + return backing_store->GetHeap()->the_hole_value(); + } + + virtual MaybeObject* Delete(JSObject* obj, + uint32_t key, + JSReceiver::DeleteMode mode) = 0; + + virtual MaybeObject* AddElementsToFixedArray(FixedArrayBase* from, + FixedArray* to, + JSObject* holder, + Object* receiver) { + int len0 = to->length(); +#ifdef DEBUG + if (FLAG_enable_slow_asserts) { + for (int i = 0; i < len0; i++) { + ASSERT(!to->get(i)->IsTheHole()); + } + } +#endif + BackingStoreClass* backing_store = BackingStoreClass::cast(from); + uint32_t len1 = ElementsAccessorSubclass::GetCapacity(backing_store); + + // Optimize if 'other' is empty. + // We cannot optimize if 'this' is empty, as other may have holes. + if (len1 == 0) return to; + + // Compute how many elements are not in other. + int extra = 0; + for (uint32_t y = 0; y < len1; y++) { + if (ElementsAccessorSubclass::HasElementAtIndex(backing_store, + y, + holder, + receiver)) { + uint32_t key = + ElementsAccessorSubclass::GetKeyForIndex(backing_store, y); + MaybeObject* maybe_value = + ElementsAccessorSubclass::Get(backing_store, key, holder, receiver); + Object* value; + if (!maybe_value->ToObject(&value)) return maybe_value; + ASSERT(!value->IsTheHole()); + if (!HasKey(to, value)) { + extra++; + } + } + } + + if (extra == 0) return to; + + // Allocate the result + FixedArray* result; + MaybeObject* maybe_obj = + backing_store->GetHeap()->AllocateFixedArray(len0 + extra); + if (!maybe_obj->To(&result)) return maybe_obj; + + // Fill in the content + { + AssertNoAllocation no_gc; + WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); + for (int i = 0; i < len0; i++) { + Object* e = to->get(i); + ASSERT(e->IsString() || e->IsNumber()); + result->set(i, e, mode); + } + } + // Fill in the extra values. + int index = 0; + for (uint32_t y = 0; y < len1; y++) { + if (ElementsAccessorSubclass::HasElementAtIndex(backing_store, + y, + holder, + receiver)) { + uint32_t key = + ElementsAccessorSubclass::GetKeyForIndex(backing_store, y); + MaybeObject* maybe_value = + ElementsAccessorSubclass::Get(backing_store, key, holder, receiver); + Object* value; + if (!maybe_value->ToObject(&value)) return maybe_value; + if (!value->IsTheHole() && !HasKey(to, value)) { + result->set(len0 + index, value); + index++; + } + } + } + ASSERT(extra == index); + return result; + } + + protected: + static uint32_t GetCapacity(BackingStoreClass* backing_store) { + return backing_store->length(); + } + + virtual uint32_t GetCapacity(FixedArrayBase* backing_store) { + return ElementsAccessorSubclass::GetCapacity( + BackingStoreClass::cast(backing_store)); + } + + static bool HasElementAtIndex(BackingStoreClass* backing_store, + uint32_t index, + JSObject* holder, + Object* receiver) { + uint32_t key = + ElementsAccessorSubclass::GetKeyForIndex(backing_store, index); + MaybeObject* element = ElementsAccessorSubclass::Get(backing_store, + key, + holder, + receiver); + return !element->IsTheHole(); + } + + virtual bool HasElementAtIndex(FixedArrayBase* backing_store, + uint32_t index, + JSObject* holder, + Object* receiver) { + return ElementsAccessorSubclass::HasElementAtIndex( + BackingStoreClass::cast(backing_store), index, holder, receiver); + } + + static uint32_t GetKeyForIndex(BackingStoreClass* backing_store, + uint32_t index) { + return index; + } + + virtual uint32_t GetKeyForIndex(FixedArrayBase* backing_store, + uint32_t index) { + return ElementsAccessorSubclass::GetKeyForIndex( + BackingStoreClass::cast(backing_store), index); + } + + private: + DISALLOW_COPY_AND_ASSIGN(ElementsAccessorBase); +}; + + +class FastElementsAccessor + : public ElementsAccessorBase { + public: + static MaybeObject* DeleteCommon(JSObject* obj, + uint32_t key) { + ASSERT(obj->HasFastElements() || obj->HasFastArgumentsElements()); + Heap* heap = obj->GetHeap(); + FixedArray* backing_store = FixedArray::cast(obj->elements()); + if (backing_store->map() == heap->non_strict_arguments_elements_map()) { + backing_store = FixedArray::cast(backing_store->get(1)); + } else { + Object* writable; + MaybeObject* maybe = obj->EnsureWritableFastElements(); + if (!maybe->ToObject(&writable)) return maybe; + backing_store = FixedArray::cast(writable); + } + uint32_t length = static_cast( + obj->IsJSArray() + ? Smi::cast(JSArray::cast(obj)->length())->value() + : backing_store->length()); + if (key < length) { + backing_store->set_the_hole(key); + // If an old space backing store is larger than a certain size and + // has too few used values, normalize it. + // To avoid doing the check on every delete we require at least + // one adjacent hole to the value being deleted. + Object* hole = heap->the_hole_value(); + const int kMinLengthForSparsenessCheck = 64; + if (backing_store->length() >= kMinLengthForSparsenessCheck && + !heap->InNewSpace(backing_store) && + ((key > 0 && backing_store->get(key - 1) == hole) || + (key + 1 < length && backing_store->get(key + 1) == hole))) { + int num_used = 0; + for (int i = 0; i < backing_store->length(); ++i) { + if (backing_store->get(i) != hole) ++num_used; + // Bail out early if more than 1/4 is used. + if (4 * num_used > backing_store->length()) break; + } + if (4 * num_used <= backing_store->length()) { + MaybeObject* result = obj->NormalizeElements(); + if (result->IsFailure()) return result; + } + } + } + return heap->true_value(); + } + + protected: + virtual MaybeObject* Delete(JSObject* obj, + uint32_t key, + JSReceiver::DeleteMode mode) { + return DeleteCommon(obj, key); + } +}; + + +class FastDoubleElementsAccessor + : public ElementsAccessorBase { + protected: + friend class ElementsAccessorBase; + + virtual MaybeObject* Delete(JSObject* obj, + uint32_t key, + JSReceiver::DeleteMode mode) { + int length = obj->IsJSArray() + ? Smi::cast(JSArray::cast(obj)->length())->value() + : FixedDoubleArray::cast(obj->elements())->length(); + if (key < static_cast(length)) { + FixedDoubleArray::cast(obj->elements())->set_the_hole(key); + } + return obj->GetHeap()->true_value(); + } + + static bool HasElementAtIndex(FixedDoubleArray* backing_store, + uint32_t index, + JSObject* holder, + Object* receiver) { + return !backing_store->is_the_hole(index); + } +}; + + +// Super class for all external element arrays. +template +class ExternalElementsAccessor + : public ElementsAccessorBase { + protected: + friend class ElementsAccessorBase; + + static MaybeObject* Get(ExternalArray* backing_store, + uint32_t key, + JSObject* obj, + Object* receiver) { + if (key < ExternalElementsAccessorSubclass::GetCapacity(backing_store)) { + return backing_store->get(key); + } else { + return backing_store->GetHeap()->undefined_value(); + } + } + + virtual MaybeObject* Delete(JSObject* obj, + uint32_t key, + JSReceiver::DeleteMode mode) { + // External arrays always ignore deletes. + return obj->GetHeap()->true_value(); + } +}; + + +class ExternalByteElementsAccessor + : public ExternalElementsAccessor { +}; + + +class ExternalUnsignedByteElementsAccessor + : public ExternalElementsAccessor { +}; + + +class ExternalShortElementsAccessor + : public ExternalElementsAccessor { +}; + + +class ExternalUnsignedShortElementsAccessor + : public ExternalElementsAccessor { +}; + + +class ExternalIntElementsAccessor + : public ExternalElementsAccessor { +}; + + +class ExternalUnsignedIntElementsAccessor + : public ExternalElementsAccessor { +}; + + +class ExternalFloatElementsAccessor + : public ExternalElementsAccessor { +}; + + +class ExternalDoubleElementsAccessor + : public ExternalElementsAccessor { +}; + + +class PixelElementsAccessor + : public ExternalElementsAccessor { +}; + + +class DictionaryElementsAccessor + : public ElementsAccessorBase { + public: + static MaybeObject* DeleteCommon(JSObject* obj, + uint32_t key, + JSReceiver::DeleteMode mode) { + Isolate* isolate = obj->GetIsolate(); + Heap* heap = isolate->heap(); + FixedArray* backing_store = FixedArray::cast(obj->elements()); + bool is_arguments = + (obj->GetElementsKind() == JSObject::NON_STRICT_ARGUMENTS_ELEMENTS); + if (is_arguments) { + backing_store = FixedArray::cast(backing_store->get(1)); + } + NumberDictionary* dictionary = NumberDictionary::cast(backing_store); + int entry = dictionary->FindEntry(key); + if (entry != NumberDictionary::kNotFound) { + Object* result = dictionary->DeleteProperty(entry, mode); + if (result == heap->true_value()) { + MaybeObject* maybe_elements = dictionary->Shrink(key); + FixedArray* new_elements = NULL; + if (!maybe_elements->To(&new_elements)) { + return maybe_elements; + } + if (is_arguments) { + FixedArray::cast(obj->elements())->set(1, new_elements); + } else { + obj->set_elements(new_elements); + } + } + if (mode == JSObject::STRICT_DELETION && + result == heap->false_value()) { + // In strict mode, attempting to delete a non-configurable property + // throws an exception. + HandleScope scope(isolate); + Handle holder(obj); + Handle name = isolate->factory()->NewNumberFromUint(key); + Handle args[2] = { name, holder }; + Handle error = + isolate->factory()->NewTypeError("strict_delete_property", + HandleVector(args, 2)); + return isolate->Throw(*error); + } + } + return heap->true_value(); + } + + protected: + friend class ElementsAccessorBase; + + virtual MaybeObject* Delete(JSObject* obj, + uint32_t key, + JSReceiver::DeleteMode mode) { + return DeleteCommon(obj, key, mode); + } + + static MaybeObject* Get(NumberDictionary* backing_store, + uint32_t key, + JSObject* obj, + Object* receiver) { + int entry = backing_store->FindEntry(key); + if (entry != NumberDictionary::kNotFound) { + Object* element = backing_store->ValueAt(entry); + PropertyDetails details = backing_store->DetailsAt(entry); + if (details.type() == CALLBACKS) { + return obj->GetElementWithCallback(receiver, + element, + key, + obj); + } else { + return element; + } + } + return obj->GetHeap()->the_hole_value(); + } + + static uint32_t GetKeyForIndex(NumberDictionary* dict, + uint32_t index) { + Object* key = dict->KeyAt(index); + return Smi::cast(key)->value(); + } +}; + + +class NonStrictArgumentsElementsAccessor + : public ElementsAccessorBase { + protected: + friend class ElementsAccessorBase; + + static MaybeObject* Get(FixedArray* parameter_map, + uint32_t key, + JSObject* obj, + Object* receiver) { + Object* probe = GetParameterMapArg(parameter_map, key); + if (!probe->IsTheHole()) { + Context* context = Context::cast(parameter_map->get(0)); + int context_index = Smi::cast(probe)->value(); + ASSERT(!context->get(context_index)->IsTheHole()); + return context->get(context_index); + } else { + // Object is not mapped, defer to the arguments. + FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); + return ElementsAccessor::ForArray(arguments)->Get(arguments, + key, + obj, + receiver); + } + } + + virtual MaybeObject* Delete(JSObject* obj, + uint32_t key + , + JSReceiver::DeleteMode mode) { + FixedArray* parameter_map = FixedArray::cast(obj->elements()); + Object* probe = GetParameterMapArg(parameter_map, key); + if (!probe->IsTheHole()) { + // TODO(kmillikin): We could check if this was the last aliased + // parameter, and revert to normal elements in that case. That + // would enable GC of the context. + parameter_map->set_the_hole(key + 2); + } else { + FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); + if (arguments->IsDictionary()) { + return DictionaryElementsAccessor::DeleteCommon(obj, key, mode); + } else { + return FastElementsAccessor::DeleteCommon(obj, key); + } + } + return obj->GetHeap()->true_value(); + } + + static uint32_t GetCapacity(FixedArray* parameter_map) { + FixedArrayBase* arguments = FixedArrayBase::cast(parameter_map->get(1)); + return Max(static_cast(parameter_map->length() - 2), + ForArray(arguments)->GetCapacity(arguments)); + } + + static uint32_t GetKeyForIndex(FixedArray* dict, + uint32_t index) { + return index; + } + + static bool HasElementAtIndex(FixedArray* parameter_map, + uint32_t index, + JSObject* holder, + Object* receiver) { + Object* probe = GetParameterMapArg(parameter_map, index); + if (!probe->IsTheHole()) { + return true; + } else { + FixedArrayBase* arguments = FixedArrayBase::cast(parameter_map->get(1)); + ElementsAccessor* accessor = ElementsAccessor::ForArray(arguments); + return !accessor->Get(arguments, index, holder, receiver)->IsTheHole(); + } + } + + private: + static Object* GetParameterMapArg(FixedArray* parameter_map, + uint32_t key) { + uint32_t length = parameter_map->length(); + return key < (length - 2 ) + ? parameter_map->get(key + 2) + : parameter_map->GetHeap()->the_hole_value(); + } +}; + + +ElementsAccessor* ElementsAccessor::ForArray(FixedArrayBase* array) { + switch (array->map()->instance_type()) { + case FIXED_ARRAY_TYPE: + if (array->IsDictionary()) { + return elements_accessors_[JSObject::DICTIONARY_ELEMENTS]; + } else { + return elements_accessors_[JSObject::FAST_ELEMENTS]; + } + case EXTERNAL_BYTE_ARRAY_TYPE: + return elements_accessors_[JSObject::EXTERNAL_BYTE_ELEMENTS]; + case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE: + return elements_accessors_[JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS]; + case EXTERNAL_SHORT_ARRAY_TYPE: + return elements_accessors_[JSObject::EXTERNAL_SHORT_ELEMENTS]; + case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE: + return elements_accessors_[JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS]; + case EXTERNAL_INT_ARRAY_TYPE: + return elements_accessors_[JSObject::EXTERNAL_INT_ELEMENTS]; + case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE: + return elements_accessors_[JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS]; + case EXTERNAL_FLOAT_ARRAY_TYPE: + return elements_accessors_[JSObject::EXTERNAL_FLOAT_ELEMENTS]; + case EXTERNAL_DOUBLE_ARRAY_TYPE: + return elements_accessors_[JSObject::EXTERNAL_DOUBLE_ELEMENTS]; + case EXTERNAL_PIXEL_ARRAY_TYPE: + return elements_accessors_[JSObject::EXTERNAL_PIXEL_ELEMENTS]; + default: + UNREACHABLE(); + return NULL; + } +} + + +void ElementsAccessor::InitializeOncePerProcess() { + static struct ConcreteElementsAccessors { + FastElementsAccessor fast_elements_handler; + FastDoubleElementsAccessor fast_double_elements_handler; + DictionaryElementsAccessor dictionary_elements_handler; + NonStrictArgumentsElementsAccessor non_strict_arguments_elements_handler; + ExternalByteElementsAccessor byte_elements_handler; + ExternalUnsignedByteElementsAccessor unsigned_byte_elements_handler; + ExternalShortElementsAccessor short_elements_handler; + ExternalUnsignedShortElementsAccessor unsigned_short_elements_handler; + ExternalIntElementsAccessor int_elements_handler; + ExternalUnsignedIntElementsAccessor unsigned_int_elements_handler; + ExternalFloatElementsAccessor float_elements_handler; + ExternalDoubleElementsAccessor double_elements_handler; + PixelElementsAccessor pixel_elements_handler; + } element_accessors; + + static ElementsAccessor* accessor_array[] = { + &element_accessors.fast_elements_handler, + &element_accessors.fast_double_elements_handler, + &element_accessors.dictionary_elements_handler, + &element_accessors.non_strict_arguments_elements_handler, + &element_accessors.byte_elements_handler, + &element_accessors.unsigned_byte_elements_handler, + &element_accessors.short_elements_handler, + &element_accessors.unsigned_short_elements_handler, + &element_accessors.int_elements_handler, + &element_accessors.unsigned_int_elements_handler, + &element_accessors.float_elements_handler, + &element_accessors.double_elements_handler, + &element_accessors.pixel_elements_handler + }; + + elements_accessors_ = accessor_array; +} + + +} } // namespace v8::internal diff -Nru libv8-3.4.14.21/src/elements.h libv8-3.5.10.24/src/elements.h --- libv8-3.4.14.21/src/elements.h 1970-01-01 00:00:00.000000000 +0000 +++ libv8-3.5.10.24/src/elements.h 2011-08-22 11:03:23.000000000 +0000 @@ -0,0 +1,95 @@ +// Copyright 2011 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef V8_ELEMENTS_H_ +#define V8_ELEMENTS_H_ + +#include "objects.h" + +namespace v8 { +namespace internal { + +// Abstract base class for handles that can operate on objects with differing +// ElementsKinds. +class ElementsAccessor { + public: + ElementsAccessor() { } + virtual ~ElementsAccessor() { } + virtual MaybeObject* Get(FixedArrayBase* backing_store, + uint32_t key, + JSObject* holder, + Object* receiver) = 0; + + virtual MaybeObject* Delete(JSObject* holder, + uint32_t key, + JSReceiver::DeleteMode mode) = 0; + + virtual MaybeObject* AddElementsToFixedArray(FixedArrayBase* from, + FixedArray* to, + JSObject* holder, + Object* receiver) = 0; + + // Returns a shared ElementsAccessor for the specified ElementsKind. + static ElementsAccessor* ForKind(JSObject::ElementsKind elements_kind) { + ASSERT(elements_kind < JSObject::kElementsKindCount); + return elements_accessors_[elements_kind]; + } + + static ElementsAccessor* ForArray(FixedArrayBase* array); + + static void InitializeOncePerProcess(); + + protected: + friend class NonStrictArgumentsElementsAccessor; + + virtual uint32_t GetCapacity(FixedArrayBase* backing_store) = 0; + + virtual bool HasElementAtIndex(FixedArrayBase* backing_store, + uint32_t index, + JSObject* holder, + Object* receiver) = 0; + + // Element handlers distinguish between indexes and keys when the manipulate + // elements. Indexes refer to elements in terms of their location in the + // underlying storage's backing store representation, and are between 0 + // GetCapacity. Keys refer to elements in terms of the value that would be + // specific in JavaScript to access the element. In most implementations, keys + // are equivalent to indexes, and GetKeyForIndex returns the same value it is + // passed. In the NumberDictionary ElementsAccessor, GetKeyForIndex maps the + // index to a key using the KeyAt method on the NumberDictionary. + virtual uint32_t GetKeyForIndex(FixedArrayBase* backing_store, + uint32_t index) = 0; + + private: + static ElementsAccessor** elements_accessors_; + + DISALLOW_COPY_AND_ASSIGN(ElementsAccessor); +}; + +} } // namespace v8::internal + +#endif // V8_ELEMENTS_H_ diff -Nru libv8-3.4.14.21/src/execution.cc libv8-3.5.10.24/src/execution.cc --- libv8-3.4.14.21/src/execution.cc 2011-07-06 11:27:02.000000000 +0000 +++ libv8-3.5.10.24/src/execution.cc 2011-08-10 11:27:35.000000000 +0000 @@ -132,7 +132,7 @@ if (*has_pending_exception) { isolate->ReportPendingMessages(); if (isolate->pending_exception() == Failure::OutOfMemoryException()) { - if (!isolate->handle_scope_implementer()->ignore_out_of_memory()) { + if (!isolate->ignore_out_of_memory()) { V8::FatalProcessOutOfMemory("JS", true); } } diff -Nru libv8-3.4.14.21/src/extensions/experimental/datetime-format.cc libv8-3.5.10.24/src/extensions/experimental/datetime-format.cc --- libv8-3.4.14.21/src/extensions/experimental/datetime-format.cc 2011-06-27 13:21:41.000000000 +0000 +++ libv8-3.5.10.24/src/extensions/experimental/datetime-format.cc 2011-08-17 14:33:23.000000000 +0000 @@ -135,7 +135,7 @@ v8::Handle DateTimeFormat::GetWeekdays(const v8::Arguments& args) { icu::SimpleDateFormat* date_format = UnpackDateTimeFormat(args.Holder()); if (!date_format) { - ThrowUnexpectedObjectError(); + return ThrowUnexpectedObjectError(); } const icu::DateFormatSymbols* symbols = date_format->getDateFormatSymbols(); diff -Nru libv8-3.4.14.21/src/extensions/experimental/number-format.cc libv8-3.5.10.24/src/extensions/experimental/number-format.cc --- libv8-3.4.14.21/src/extensions/experimental/number-format.cc 2011-06-27 13:21:41.000000000 +0000 +++ libv8-3.5.10.24/src/extensions/experimental/number-format.cc 2011-07-25 11:05:35.000000000 +0000 @@ -36,6 +36,8 @@ #include "unicode/numfmt.h" #include "unicode/uchar.h" #include "unicode/ucurr.h" +#include "unicode/unum.h" +#include "unicode/uversion.h" namespace v8 { namespace internal { @@ -231,6 +233,8 @@ } // Generates ICU number format pattern from given skeleton. +// TODO(cira): Remove once ICU includes equivalent method +// (see http://bugs.icu-project.org/trac/ticket/8610). static icu::DecimalFormat* CreateFormatterFromSkeleton( const icu::Locale& icu_locale, const icu::UnicodeString& skeleton, @@ -251,6 +255,7 @@ // Case of non-consecutive U+00A4 is taken care of in i18n.js. int32_t end_index = skeleton.lastIndexOf(currency_symbol, index); +#if (U_ICU_VERSION_MAJOR_NUM == 4) && (U_ICU_VERSION_MINOR_NUM <= 6) icu::NumberFormat::EStyles style; switch (end_index - index) { case 0: @@ -262,6 +267,19 @@ default: style = icu::NumberFormat::kPluralCurrencyStyle; } +#else // ICU version is 4.8 or above (we ignore versions below 4.0). + UNumberFormatStyle style; + switch (end_index - index) { + case 0: + style = UNUM_CURRENCY; + break; + case 1: + style = UNUM_CURRENCY_ISO; + break; + default: + style = UNUM_CURRENCY_PLURAL; + } +#endif base_format = static_cast( icu::NumberFormat::createInstance(icu_locale, style, *status)); diff -Nru libv8-3.4.14.21/src/factory.cc libv8-3.5.10.24/src/factory.cc --- libv8-3.4.14.21/src/factory.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/factory.cc 2011-08-15 13:01:23.000000000 +0000 @@ -34,6 +34,7 @@ #include "macro-assembler.h" #include "objects.h" #include "objects-visiting.h" +#include "scopeinfo.h" namespace v8 { namespace internal { @@ -84,6 +85,14 @@ } +Handle Factory::NewObjectHashTable(int at_least_space_for) { + ASSERT(0 <= at_least_space_for); + CALL_HEAP_FUNCTION(isolate(), + ObjectHashTable::Allocate(at_least_space_for), + ObjectHashTable); +} + + Handle Factory::NewDescriptorArray(int number_of_descriptors) { ASSERT(0 <= number_of_descriptors); CALL_HEAP_FUNCTION(isolate(), @@ -283,6 +292,19 @@ } +Handle Factory::NewBlockContext( + Handle function, + Handle previous, + Handle scope_info) { + CALL_HEAP_FUNCTION( + isolate(), + isolate()->heap()->AllocateBlockContext(*function, + *previous, + *scope_info), + Context); +} + + Handle Factory::NewStruct(InstanceType type) { CALL_HEAP_FUNCTION( isolate(), @@ -726,6 +748,14 @@ } +Handle Factory::NewSerializedScopeInfo(int length) { + CALL_HEAP_FUNCTION( + isolate(), + isolate()->heap()->AllocateSerializedScopeInfo(length), + SerializedScopeInfo); +} + + Handle Factory::NewCode(const CodeDesc& desc, Code::Flags flags, Handle self_ref, diff -Nru libv8-3.4.14.21/src/factory.h libv8-3.5.10.24/src/factory.h --- libv8-3.4.14.21/src/factory.h 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/factory.h 2011-08-15 13:01:23.000000000 +0000 @@ -58,6 +58,8 @@ Handle NewStringDictionary(int at_least_space_for); + Handle NewObjectHashTable(int at_least_space_for); + Handle NewDescriptorArray(int number_of_descriptors); Handle NewDeoptimizationInputData( int deopt_entry_count, @@ -165,6 +167,11 @@ Handle previous, Handle extension); + // Create a 'block' context. + Handle NewBlockContext(Handle function, + Handle previous, + Handle scope_info); + // Return the Symbol matching the passed in string. Handle SymbolFromString(Handle value); @@ -275,6 +282,8 @@ Handle context, PretenureFlag pretenure = TENURED); + Handle NewSerializedScopeInfo(int length); + Handle NewCode(const CodeDesc& desc, Code::Flags flags, Handle self_reference, diff -Nru libv8-3.4.14.21/src/flag-definitions.h libv8-3.5.10.24/src/flag-definitions.h --- libv8-3.4.14.21/src/flag-definitions.h 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/flag-definitions.h 2011-08-29 10:41:00.000000000 +0000 @@ -97,10 +97,14 @@ #define FLAG FLAG_FULL // Flags for experimental language features. +DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof") DEFINE_bool(harmony_proxies, false, "enable harmony proxies") +DEFINE_bool(harmony_weakmaps, false, "enable harmony weak maps") +DEFINE_bool(harmony_block_scoping, false, "enable harmony block scoping") // Flags for experimental implementation features. -DEFINE_bool(unbox_double_arrays, false, "automatically unbox arrays of doubles") +DEFINE_bool(unbox_double_arrays, true, "automatically unbox arrays of doubles") +DEFINE_bool(string_slices, false, "use string slices") // Flags for Crankshaft. #ifdef V8_TARGET_ARCH_MIPS @@ -400,6 +404,7 @@ DEFINE_bool(print_builtin_json_ast, false, "print source AST for builtins as JSON") DEFINE_string(stop_at, "", "function name where to insert a breakpoint") +DEFINE_bool(verify_stack_height, false, "verify stack height tracing on ia32") // compiler.cc DEFINE_bool(print_builtin_scopes, false, "print scopes for builtins") diff -Nru libv8-3.4.14.21/src/frames.h libv8-3.5.10.24/src/frames.h --- libv8-3.4.14.21/src/frames.h 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/frames.h 2011-08-15 13:01:23.000000000 +0000 @@ -1,4 +1,4 @@ -// Copyright 2006-2008 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -114,6 +114,7 @@ // Accessors. inline State state() const; + inline Object** context_address() const; inline Address* pc_address() const; DISALLOW_IMPLICIT_CONSTRUCTORS(StackHandler); diff -Nru libv8-3.4.14.21/src/frames-inl.h libv8-3.5.10.24/src/frames-inl.h --- libv8-3.4.14.21/src/frames-inl.h 2011-04-11 12:33:05.000000000 +0000 +++ libv8-3.5.10.24/src/frames-inl.h 2011-08-15 13:01:23.000000000 +0000 @@ -1,4 +1,4 @@ -// Copyright 2006-2008 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -67,6 +67,7 @@ inline void StackHandler::Iterate(ObjectVisitor* v, Code* holder) const { + v->VisitPointer(context_address()); StackFrame::IteratePc(v, pc_address(), holder); } @@ -82,6 +83,12 @@ } +inline Object** StackHandler::context_address() const { + const int offset = StackHandlerConstants::kContextOffset; + return reinterpret_cast(address() + offset); +} + + inline Address* StackHandler::pc_address() const { const int offset = StackHandlerConstants::kPCOffset; return reinterpret_cast(address() + offset); diff -Nru libv8-3.4.14.21/src/full-codegen.cc libv8-3.5.10.24/src/full-codegen.cc --- libv8-3.4.14.21/src/full-codegen.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/full-codegen.cc 2011-09-07 12:19:49.000000000 +0000 @@ -35,6 +35,7 @@ #include "macro-assembler.h" #include "prettyprinter.h" #include "scopes.h" +#include "scopeinfo.h" #include "stub-cache.h" namespace v8 { @@ -90,8 +91,7 @@ } -void BreakableStatementChecker::VisitEnterWithContextStatement( - EnterWithContextStatement* stmt) { +void BreakableStatementChecker::VisitWithStatement(WithStatement* stmt) { Visit(stmt->expression()); } @@ -317,7 +317,6 @@ // field, and then a sequence of entries. Each entry is a pair of AST id // and code-relative pc offset. masm()->Align(kIntSize); - masm()->RecordComment("[ Stack check table"); unsigned offset = masm()->pc_offset(); unsigned length = stack_checks_.length(); __ dd(length); @@ -325,7 +324,6 @@ __ dd(stack_checks_[i].id); __ dd(stack_checks_[i].pc_and_state); } - masm()->RecordComment("]"); return offset; } @@ -437,6 +435,7 @@ void FullCodeGenerator::StackValueContext::Plug(Register reg) const { __ push(reg); + codegen()->increment_stack_height(); } @@ -450,11 +449,13 @@ void FullCodeGenerator::EffectContext::PlugTOS() const { __ Drop(1); + codegen()->decrement_stack_height(); } void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const { __ pop(result_register()); + codegen()->decrement_stack_height(); } @@ -465,6 +466,7 @@ void FullCodeGenerator::TestContext::PlugTOS() const { // For simplicity we always test the accumulator register. __ pop(result_register()); + codegen()->decrement_stack_height(); codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); codegen()->DoTest(this); } @@ -843,9 +845,24 @@ Breakable nested_statement(this, stmt); SetStatementPosition(stmt); + Scope* saved_scope = scope(); + if (stmt->block_scope() != NULL) { + { Comment cmnt(masm_, "[ Extend block context"); + scope_ = stmt->block_scope(); + __ Push(scope_->GetSerializedScopeInfo()); + PushFunctionArgumentForContextAllocation(); + __ CallRuntime(Runtime::kPushBlockContext, 2); + StoreToFrameField(StandardFrameConstants::kContextOffset, + context_register()); + } + { Comment cmnt(masm_, "[ Declarations"); + VisitDeclarations(scope_->declarations()); + } + } PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); VisitStatements(stmt->statements()); - __ bind(nested_statement.break_target()); + scope_ = saved_scope; + __ bind(nested_statement.break_label()); PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); } @@ -896,19 +913,26 @@ SetStatementPosition(stmt); NestedStatement* current = nesting_stack_; int stack_depth = 0; + int context_length = 0; // When continuing, we clobber the unpredictable value in the accumulator // with one that's safe for GC. If we hit an exit from the try block of // try...finally on our way out, we will unconditionally preserve the // accumulator on the stack. ClearAccumulator(); while (!current->IsContinueTarget(stmt->target())) { - stack_depth = current->Exit(stack_depth); - current = current->outer(); + current = current->Exit(&stack_depth, &context_length); } __ Drop(stack_depth); + if (context_length > 0) { + while (context_length > 0) { + LoadContextField(context_register(), Context::PREVIOUS_INDEX); + --context_length; + } + StoreToFrameField(StandardFrameConstants::kContextOffset, + context_register()); + } - Iteration* loop = current->AsIteration(); - __ jmp(loop->continue_target()); + __ jmp(current->AsIteration()->continue_label()); } @@ -917,19 +941,26 @@ SetStatementPosition(stmt); NestedStatement* current = nesting_stack_; int stack_depth = 0; + int context_length = 0; // When breaking, we clobber the unpredictable value in the accumulator // with one that's safe for GC. If we hit an exit from the try block of // try...finally on our way out, we will unconditionally preserve the // accumulator on the stack. ClearAccumulator(); while (!current->IsBreakTarget(stmt->target())) { - stack_depth = current->Exit(stack_depth); - current = current->outer(); + current = current->Exit(&stack_depth, &context_length); } __ Drop(stack_depth); + if (context_length > 0) { + while (context_length > 0) { + LoadContextField(context_register(), Context::PREVIOUS_INDEX); + --context_length; + } + StoreToFrameField(StandardFrameConstants::kContextOffset, + context_register()); + } - Breakable* target = current->AsBreakable(); - __ jmp(target->break_target()); + __ jmp(current->AsBreakable()->break_label()); } @@ -942,9 +973,9 @@ // Exit all nested statements. NestedStatement* current = nesting_stack_; int stack_depth = 0; + int context_length = 0; while (current != NULL) { - stack_depth = current->Exit(stack_depth); - current = current->outer(); + current = current->Exit(&stack_depth, &context_length); } __ Drop(stack_depth); @@ -952,14 +983,23 @@ } -void FullCodeGenerator::VisitEnterWithContextStatement( - EnterWithContextStatement* stmt) { - Comment cmnt(masm_, "[ EnterWithContextStatement"); +void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) { + Comment cmnt(masm_, "[ WithStatement"); SetStatementPosition(stmt); VisitForStackValue(stmt->expression()); PushFunctionArgumentForContextAllocation(); __ CallRuntime(Runtime::kPushWithContext, 2); + decrement_stack_height(); + StoreToFrameField(StandardFrameConstants::kContextOffset, context_register()); + + { WithOrCatch body(this); + Visit(stmt->statement()); + } + + // Pop context. + LoadContextField(context_register(), Context::PREVIOUS_INDEX); + // Update local stack frame context field. StoreToFrameField(StandardFrameConstants::kContextOffset, context_register()); } @@ -988,12 +1028,12 @@ // Record the position of the do while condition and make sure it is // possible to break on the condition. - __ bind(loop_statement.continue_target()); + __ bind(loop_statement.continue_label()); PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS); SetExpressionPosition(stmt->cond(), stmt->condition_position()); VisitForControl(stmt->cond(), &stack_check, - loop_statement.break_target(), + loop_statement.break_label(), &stack_check); // Check stack before looping. @@ -1003,7 +1043,7 @@ __ jmp(&body); PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); - __ bind(loop_statement.break_target()); + __ bind(loop_statement.break_label()); decrement_loop_depth(); } @@ -1024,7 +1064,7 @@ // Emit the statement position here as this is where the while // statement code starts. - __ bind(loop_statement.continue_target()); + __ bind(loop_statement.continue_label()); SetStatementPosition(stmt); // Check stack before looping. @@ -1033,11 +1073,11 @@ __ bind(&test); VisitForControl(stmt->cond(), &body, - loop_statement.break_target(), - loop_statement.break_target()); + loop_statement.break_label(), + loop_statement.break_label()); PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); - __ bind(loop_statement.break_target()); + __ bind(loop_statement.break_label()); decrement_loop_depth(); } @@ -1060,7 +1100,7 @@ Visit(stmt->body()); PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS); - __ bind(loop_statement.continue_target()); + __ bind(loop_statement.continue_label()); SetStatementPosition(stmt); if (stmt->next() != NULL) { Visit(stmt->next()); @@ -1077,14 +1117,14 @@ if (stmt->cond() != NULL) { VisitForControl(stmt->cond(), &body, - loop_statement.break_target(), - loop_statement.break_target()); + loop_statement.break_label(), + loop_statement.break_label()); } else { __ jmp(&body); } PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); - __ bind(loop_statement.break_target()); + __ bind(loop_statement.break_label()); decrement_loop_depth(); } @@ -1102,7 +1142,7 @@ // to introduce a new scope to bind the catch variable and to remove // that scope again afterwards. - Label try_handler_setup, catch_entry, done; + Label try_handler_setup, done; __ Call(&try_handler_setup); // Try handler code, exception in result register. @@ -1119,17 +1159,22 @@ Scope* saved_scope = scope(); scope_ = stmt->scope(); ASSERT(scope_->declarations()->is_empty()); - Visit(stmt->catch_block()); + { WithOrCatch body(this); + Visit(stmt->catch_block()); + } scope_ = saved_scope; __ jmp(&done); // Try block code. Sets up the exception handler chain. __ bind(&try_handler_setup); { - TryCatch try_block(this, &catch_entry); + const int delta = StackHandlerConstants::kSize / kPointerSize; + TryCatch try_block(this); __ PushTryHandler(IN_JAVASCRIPT, TRY_CATCH_HANDLER); + increment_stack_height(delta); Visit(stmt->try_block()); __ PopTryHandler(); + decrement_stack_height(delta); } __ bind(&done); } @@ -1161,6 +1206,7 @@ // cooked before GC. Label finally_entry; Label try_handler_setup; + const int original_stack_height = stack_height(); // Setup the try-handler chain. Use a call to // Jump to try-handler setup and try-block code. Use call to put try-handler @@ -1169,9 +1215,9 @@ // Try handler code. Return address of call is pushed on handler stack. { // This code is only executed during stack-handler traversal when an - // exception is thrown. The execption is in the result register, which + // exception is thrown. The exception is in the result register, which // is retained by the finally block. - // Call the finally block and then rethrow the exception. + // Call the finally block and then rethrow the exception if it returns. __ Call(&finally_entry); __ push(result_register()); __ CallRuntime(Runtime::kReThrow, 1); @@ -1182,6 +1228,7 @@ // Finally block implementation. Finally finally_block(this); EnterFinallyBlock(); + set_stack_height(original_stack_height + Finally::kElementCount); Visit(stmt->finally_block()); ExitFinallyBlock(); // Return to the calling code. } @@ -1189,10 +1236,13 @@ __ bind(&try_handler_setup); { // Setup try handler (stack pointer registers). + const int delta = StackHandlerConstants::kSize / kPointerSize; TryFinally try_block(this, &finally_entry); __ PushTryHandler(IN_JAVASCRIPT, TRY_FINALLY_HANDLER); + set_stack_height(original_stack_height + delta); Visit(stmt->try_block()); __ PopTryHandler(); + set_stack_height(original_stack_height); } // Execute the finally block on the way out. Clobber the unpredictable // value in the accumulator with one that's safe for GC. The finally @@ -1222,6 +1272,7 @@ __ bind(&true_case); SetExpressionPosition(expr->then_expression(), expr->then_expression_position()); + int start_stack_height = stack_height(); if (context()->IsTest()) { const TestContext* for_test = TestContext::cast(context()); VisitForControl(expr->then_expression(), @@ -1235,6 +1286,7 @@ PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS); __ bind(&false_case); + set_stack_height(start_stack_height); if (context()->IsTest()) ForwardBailoutToChild(expr); SetExpressionPosition(expr->else_expression(), expr->else_expression_position()); @@ -1275,26 +1327,23 @@ void FullCodeGenerator::VisitThrow(Throw* expr) { Comment cmnt(masm_, "[ Throw"); + // Throw has no effect on the stack height or the current expression context. + // Usually the expression context is null, because throw is a statement. VisitForStackValue(expr->exception()); __ CallRuntime(Runtime::kThrow, 1); + decrement_stack_height(); // Never returns here. } -int FullCodeGenerator::TryFinally::Exit(int stack_depth) { +FullCodeGenerator::NestedStatement* FullCodeGenerator::TryCatch::Exit( + int* stack_depth, + int* context_length) { // The macros used here must preserve the result register. - __ Drop(stack_depth); - __ PopTryHandler(); - __ Call(finally_entry_); - return 0; -} - - -int FullCodeGenerator::TryCatch::Exit(int stack_depth) { - // The macros used here must preserve the result register. - __ Drop(stack_depth); + __ Drop(*stack_depth); __ PopTryHandler(); - return 0; + *stack_depth = 0; + return previous_; } diff -Nru libv8-3.4.14.21/src/full-codegen.h libv8-3.5.10.24/src/full-codegen.h --- libv8-3.4.14.21/src/full-codegen.h 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/full-codegen.h 2011-08-22 11:03:23.000000000 +0000 @@ -83,6 +83,7 @@ scope_(NULL), nesting_stack_(NULL), loop_depth_(0), + stack_height_(0), context_(NULL), bailout_entries_(0), stack_checks_(2), // There's always at least one. @@ -110,10 +111,7 @@ private: class Breakable; class Iteration; - class TryCatch; - class TryFinally; - class Finally; - class ForIn; + class TestContext; class NestedStatement BASE_EMBEDDED { @@ -131,134 +129,135 @@ virtual Breakable* AsBreakable() { return NULL; } virtual Iteration* AsIteration() { return NULL; } - virtual TryCatch* AsTryCatch() { return NULL; } - virtual TryFinally* AsTryFinally() { return NULL; } - virtual Finally* AsFinally() { return NULL; } - virtual ForIn* AsForIn() { return NULL; } virtual bool IsContinueTarget(Statement* target) { return false; } virtual bool IsBreakTarget(Statement* target) { return false; } - // Generate code to leave the nested statement. This includes - // cleaning up any stack elements in use and restoring the - // stack to the expectations of the surrounding statements. - // Takes a number of stack elements currently on top of the - // nested statement's stack, and returns a number of stack - // elements left on top of the surrounding statement's stack. - // The generated code must preserve the result register (which - // contains the value in case of a return). - virtual int Exit(int stack_depth) { - // Default implementation for the case where there is - // nothing to clean up. - return stack_depth; + // Notify the statement that we are exiting it via break, continue, or + // return and give it a chance to generate cleanup code. Return the + // next outer statement in the nesting stack. We accumulate in + // *stack_depth the amount to drop the stack and in *context_length the + // number of context chain links to unwind as we traverse the nesting + // stack from an exit to its target. + virtual NestedStatement* Exit(int* stack_depth, int* context_length) { + return previous_; } - NestedStatement* outer() { return previous_; } protected: MacroAssembler* masm() { return codegen_->masm(); } - private: FullCodeGenerator* codegen_; NestedStatement* previous_; DISALLOW_COPY_AND_ASSIGN(NestedStatement); }; + // A breakable statement such as a block. class Breakable : public NestedStatement { public: - Breakable(FullCodeGenerator* codegen, - BreakableStatement* break_target) - : NestedStatement(codegen), - target_(break_target) {} + Breakable(FullCodeGenerator* codegen, BreakableStatement* statement) + : NestedStatement(codegen), statement_(statement) { + } virtual ~Breakable() {} + virtual Breakable* AsBreakable() { return this; } - virtual bool IsBreakTarget(Statement* statement) { - return target_ == statement; + virtual bool IsBreakTarget(Statement* target) { + return statement() == target; } - BreakableStatement* statement() { return target_; } - Label* break_target() { return &break_target_label_; } + + BreakableStatement* statement() { return statement_; } + Label* break_label() { return &break_label_; } + private: - BreakableStatement* target_; - Label break_target_label_; - DISALLOW_COPY_AND_ASSIGN(Breakable); + BreakableStatement* statement_; + Label break_label_; }; + // An iteration statement such as a while, for, or do loop. class Iteration : public Breakable { public: - Iteration(FullCodeGenerator* codegen, - IterationStatement* iteration_statement) - : Breakable(codegen, iteration_statement) {} + Iteration(FullCodeGenerator* codegen, IterationStatement* statement) + : Breakable(codegen, statement) { + } virtual ~Iteration() {} + virtual Iteration* AsIteration() { return this; } - virtual bool IsContinueTarget(Statement* statement) { - return this->statement() == statement; + virtual bool IsContinueTarget(Statement* target) { + return statement() == target; } - Label* continue_target() { return &continue_target_label_; } + + Label* continue_label() { return &continue_label_; } + private: - Label continue_target_label_; - DISALLOW_COPY_AND_ASSIGN(Iteration); + Label continue_label_; }; - // The environment inside the try block of a try/catch statement. + // The try block of a try/catch statement. class TryCatch : public NestedStatement { public: - explicit TryCatch(FullCodeGenerator* codegen, Label* catch_entry) - : NestedStatement(codegen), catch_entry_(catch_entry) { } + explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) { + } virtual ~TryCatch() {} - virtual TryCatch* AsTryCatch() { return this; } - Label* catch_entry() { return catch_entry_; } - virtual int Exit(int stack_depth); - private: - Label* catch_entry_; - DISALLOW_COPY_AND_ASSIGN(TryCatch); + + virtual NestedStatement* Exit(int* stack_depth, int* context_length); }; - // The environment inside the try block of a try/finally statement. + // The try block of a try/finally statement. class TryFinally : public NestedStatement { public: - explicit TryFinally(FullCodeGenerator* codegen, Label* finally_entry) - : NestedStatement(codegen), finally_entry_(finally_entry) { } + TryFinally(FullCodeGenerator* codegen, Label* finally_entry) + : NestedStatement(codegen), finally_entry_(finally_entry) { + } virtual ~TryFinally() {} - virtual TryFinally* AsTryFinally() { return this; } - Label* finally_entry() { return finally_entry_; } - virtual int Exit(int stack_depth); + + virtual NestedStatement* Exit(int* stack_depth, int* context_length); + private: Label* finally_entry_; - DISALLOW_COPY_AND_ASSIGN(TryFinally); }; - // A FinallyEnvironment represents being inside a finally block. - // Abnormal termination of the finally block needs to clean up - // the block's parameters from the stack. + // The finally block of a try/finally statement. class Finally : public NestedStatement { public: + static const int kElementCount = 2; + explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { } virtual ~Finally() {} - virtual Finally* AsFinally() { return this; } - virtual int Exit(int stack_depth) { - return stack_depth + kFinallyStackElementCount; + + virtual NestedStatement* Exit(int* stack_depth, int* context_length) { + *stack_depth += kElementCount; + return previous_; } - private: - // Number of extra stack slots occupied during a finally block. - static const int kFinallyStackElementCount = 2; - DISALLOW_COPY_AND_ASSIGN(Finally); }; - // A ForInEnvironment represents being inside a for-in loop. - // Abnormal termination of the for-in block needs to clean up - // the block's temporary storage from the stack. + // The body of a for/in loop. class ForIn : public Iteration { public: - ForIn(FullCodeGenerator* codegen, - ForInStatement* statement) - : Iteration(codegen, statement) { } + static const int kElementCount = 5; + + ForIn(FullCodeGenerator* codegen, ForInStatement* statement) + : Iteration(codegen, statement) { + } virtual ~ForIn() {} - virtual ForIn* AsForIn() { return this; } - virtual int Exit(int stack_depth) { - return stack_depth + kForInStackElementCount; + + virtual NestedStatement* Exit(int* stack_depth, int* context_length) { + *stack_depth += kElementCount; + return previous_; + } + }; + + + // The body of a with or catch. + class WithOrCatch : public NestedStatement { + public: + explicit WithOrCatch(FullCodeGenerator* codegen) + : NestedStatement(codegen) { + } + virtual ~WithOrCatch() {} + + virtual NestedStatement* Exit(int* stack_depth, int* context_length) { + ++(*context_length); + return previous_; } - private: - static const int kForInStackElementCount = 5; - DISALLOW_COPY_AND_ASSIGN(ForIn); }; // The forward bailout stack keeps track of the expressions that can @@ -519,6 +518,35 @@ loop_depth_--; } +#if defined(V8_TARGET_ARCH_IA32) + int stack_height() { return stack_height_; } + void set_stack_height(int depth) { stack_height_ = depth; } + void increment_stack_height() { stack_height_++; } + void increment_stack_height(int delta) { stack_height_ += delta; } + void decrement_stack_height() { + if (FLAG_verify_stack_height) { + ASSERT(stack_height_ > 0); + } + stack_height_--; + } + void decrement_stack_height(int delta) { + stack_height_-= delta; + if (FLAG_verify_stack_height) { + ASSERT(stack_height_ >= 0); + } + } + // Call this function only if FLAG_verify_stack_height is true. + void verify_stack_height(); // Generates a runtime check of esp - ebp. +#else + int stack_height() { return 0; } + void set_stack_height(int depth) {} + void increment_stack_height() {} + void increment_stack_height(int delta) {} + void decrement_stack_height() {} + void decrement_stack_height(int delta) {} + void verify_stack_height() {} +#endif // V8_TARGET_ARCH_IA32 + MacroAssembler* masm() { return masm_; } class ExpressionContext; @@ -578,6 +606,10 @@ virtual ~ExpressionContext() { codegen_->set_new_context(old_); + if (FLAG_verify_stack_height) { + ASSERT_EQ(expected_stack_height_, codegen()->stack_height()); + codegen()->verify_stack_height(); + } } Isolate* isolate() const { return codegen_->isolate(); } @@ -631,6 +663,7 @@ FullCodeGenerator* codegen() const { return codegen_; } MacroAssembler* masm() const { return masm_; } MacroAssembler* masm_; + int expected_stack_height_; // The expected stack height esp - ebp on exit. private: const ExpressionContext* old_; @@ -640,7 +673,9 @@ class AccumulatorValueContext : public ExpressionContext { public: explicit AccumulatorValueContext(FullCodeGenerator* codegen) - : ExpressionContext(codegen) { } + : ExpressionContext(codegen) { + expected_stack_height_ = codegen->stack_height(); + } virtual void Plug(bool flag) const; virtual void Plug(Register reg) const; @@ -661,7 +696,9 @@ class StackValueContext : public ExpressionContext { public: explicit StackValueContext(FullCodeGenerator* codegen) - : ExpressionContext(codegen) { } + : ExpressionContext(codegen) { + expected_stack_height_ = codegen->stack_height() + 1; + } virtual void Plug(bool flag) const; virtual void Plug(Register reg) const; @@ -690,7 +727,9 @@ condition_(condition), true_label_(true_label), false_label_(false_label), - fall_through_(fall_through) { } + fall_through_(fall_through) { + expected_stack_height_ = codegen->stack_height(); + } static const TestContext* cast(const ExpressionContext* context) { ASSERT(context->IsTest()); @@ -727,7 +766,10 @@ class EffectContext : public ExpressionContext { public: explicit EffectContext(FullCodeGenerator* codegen) - : ExpressionContext(codegen) { } + : ExpressionContext(codegen) { + expected_stack_height_ = codegen->stack_height(); + } + virtual void Plug(bool flag) const; virtual void Plug(Register reg) const; @@ -751,6 +793,7 @@ Label return_label_; NestedStatement* nesting_stack_; int loop_depth_; + int stack_height_; const ExpressionContext* context_; ZoneList bailout_entries_; ZoneList stack_checks_; diff -Nru libv8-3.4.14.21/src/func-name-inferrer.h libv8-3.5.10.24/src/func-name-inferrer.h --- libv8-3.4.14.21/src/func-name-inferrer.h 2011-06-27 13:21:41.000000000 +0000 +++ libv8-3.5.10.24/src/func-name-inferrer.h 2011-10-14 09:24:03.000000000 +0000 @@ -70,6 +70,12 @@ } } + void RemoveLastFunction() { + if (IsOpen() && !funcs_to_infer_.is_empty()) { + funcs_to_infer_.RemoveLast(); + } + } + // Infers a function name and leaves names collection state. void Infer() { ASSERT(IsOpen()); diff -Nru libv8-3.4.14.21/src/globals.h libv8-3.5.10.24/src/globals.h --- libv8-3.4.14.21/src/globals.h 2011-07-18 09:50:57.000000000 +0000 +++ libv8-3.5.10.24/src/globals.h 2011-09-27 07:55:59.000000000 +0000 @@ -226,6 +226,10 @@ const int kBinary32MantissaBits = 23; const int kBinary32ExponentShift = 23; +// Quiet NaNs have bits 51 to 62 set, possibly the sign bit, and no +// other bits set. +const uint64_t kQuietNaNMask = static_cast(0xfff) << 51; + // ASCII/UC16 constants // Code-point values in Unicode 4.0 are 21 bits wide. typedef uint16_t uc16; diff -Nru libv8-3.4.14.21/src/handles.cc libv8-3.5.10.24/src/handles.cc --- libv8-3.4.14.21/src/handles.cc 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/handles.cc 2011-08-24 12:02:41.000000000 +0000 @@ -422,43 +422,18 @@ Handle GetHiddenProperties(Handle obj, - bool create_if_needed) { - Isolate* isolate = obj->GetIsolate(); - Object* holder = obj->BypassGlobalProxy(); - if (holder->IsUndefined()) return isolate->factory()->undefined_value(); - obj = Handle(JSObject::cast(holder), isolate); - - if (obj->HasFastProperties()) { - // If the object has fast properties, check whether the first slot - // in the descriptor array matches the hidden symbol. Since the - // hidden symbols hash code is zero (and no other string has hash - // code zero) it will always occupy the first entry if present. - DescriptorArray* descriptors = obj->map()->instance_descriptors(); - if ((descriptors->number_of_descriptors() > 0) && - (descriptors->GetKey(0) == isolate->heap()->hidden_symbol()) && - descriptors->IsProperty(0)) { - ASSERT(descriptors->GetType(0) == FIELD); - return Handle(obj->FastPropertyAt(descriptors->GetFieldIndex(0)), - isolate); - } - } + JSObject::HiddenPropertiesFlag flag) { + CALL_HEAP_FUNCTION(obj->GetIsolate(), + obj->GetHiddenProperties(flag), + Object); +} - // Only attempt to find the hidden properties in the local object and not - // in the prototype chain. Note that HasLocalProperty() can cause a GC in - // the general case in the presence of interceptors. - if (!obj->HasHiddenPropertiesObject()) { - // Hidden properties object not found. Allocate a new hidden properties - // object if requested. Otherwise return the undefined value. - if (create_if_needed) { - Handle hidden_obj = - isolate->factory()->NewJSObject(isolate->object_function()); - CALL_HEAP_FUNCTION(isolate, - obj->SetHiddenPropertiesObject(*hidden_obj), Object); - } else { - return isolate->factory()->undefined_value(); - } - } - return Handle(obj->GetHiddenPropertiesObject(), isolate); + +int GetIdentityHash(Handle obj) { + CALL_AND_RETRY(obj->GetIsolate(), + obj->GetIdentityHash(JSObject::ALLOW_CREATION), + return Smi::cast(__object__)->value(), + return 0); } @@ -642,15 +617,17 @@ { AssertNoAllocation no_heap_allocation; // ensure vectors stay valid. // Dispatch on type of strings. - if (src->IsAsciiRepresentation()) { + String::FlatContent content = src->GetFlatContent(); + ASSERT(content.IsFlat()); + if (content.IsAscii()) { CalculateLineEnds(isolate, &line_ends, - src->ToAsciiVector(), + content.ToAsciiVector(), with_last_line); } else { CalculateLineEnds(isolate, &line_ends, - src->ToUC16Vector(), + content.ToUC16Vector(), with_last_line); } } @@ -908,6 +885,15 @@ } +Handle PutIntoObjectHashTable(Handle table, + Handle key, + Handle value) { + CALL_HEAP_FUNCTION(table->GetIsolate(), + table->Put(*key, *value), + ObjectHashTable); +} + + bool EnsureCompiled(Handle shared, ClearExceptionFlag flag) { return shared->is_compiled() || CompileLazyShared(shared, flag); diff -Nru libv8-3.4.14.21/src/handles.h libv8-3.5.10.24/src/handles.h --- libv8-3.4.14.21/src/handles.h 2011-07-04 14:01:31.000000000 +0000 +++ libv8-3.5.10.24/src/handles.h 2011-08-01 11:41:52.000000000 +0000 @@ -264,9 +264,13 @@ Handle SetPrototype(Handle obj, Handle value); // Return the object's hidden properties object. If the object has no hidden -// properties and create_if_needed is true, then a new hidden property object -// will be allocated. Otherwise the Heap::undefined_value is returned. -Handle GetHiddenProperties(Handle obj, bool create_if_needed); +// properties and HiddenPropertiesFlag::ALLOW_CREATION is passed, then a new +// hidden property object will be allocated. Otherwise Heap::undefined_value +// is returned. +Handle GetHiddenProperties(Handle obj, + JSObject::HiddenPropertiesFlag flag); + +int GetIdentityHash(Handle obj); Handle DeleteElement(Handle obj, uint32_t index); Handle DeleteProperty(Handle obj, Handle prop); @@ -343,6 +347,10 @@ Handle PreventExtensions(Handle object); +Handle PutIntoObjectHashTable(Handle table, + Handle key, + Handle value); + // Does lazy compilation of the given function. Returns true on success and // false if the compilation resulted in a stack overflow. enum ClearExceptionFlag { KEEP_EXCEPTION, CLEAR_EXCEPTION }; diff -Nru libv8-3.4.14.21/src/heap.cc libv8-3.5.10.24/src/heap.cc --- libv8-3.4.14.21/src/heap.cc 2011-08-09 12:57:00.000000000 +0000 +++ libv8-3.5.10.24/src/heap.cc 2011-09-14 13:44:32.000000000 +0000 @@ -81,14 +81,14 @@ reserved_semispace_size_(16*MB), max_semispace_size_(16*MB), initial_semispace_size_(1*MB), - max_old_generation_size_(1*GB), + max_old_generation_size_(1400*MB), max_executable_size_(256*MB), code_range_size_(512*MB), #else reserved_semispace_size_(8*MB), max_semispace_size_(8*MB), initial_semispace_size_(512*KB), - max_old_generation_size_(512*MB), + max_old_generation_size_(700*MB), max_executable_size_(128*MB), code_range_size_(0), #endif @@ -438,7 +438,9 @@ #if defined(DEBUG) ReportStatisticsAfterGC(); #endif // DEBUG +#ifdef ENABLE_DEBUGGER_SUPPORT isolate_->debug()->AfterGarbageCollection(); +#endif // ENABLE_DEBUGGER_SUPPORT } @@ -1288,10 +1290,18 @@ &ObjectEvacuationStrategy:: template VisitSpecialized); + table_.Register(kVisitSlicedString, + &ObjectEvacuationStrategy:: + template VisitSpecialized); + table_.Register(kVisitSharedFunctionInfo, &ObjectEvacuationStrategy:: template VisitSpecialized); + table_.Register(kVisitJSWeakMap, + &ObjectEvacuationStrategy:: + Visit); + table_.Register(kVisitJSRegExp, &ObjectEvacuationStrategy:: Visit); @@ -1739,6 +1749,12 @@ set_fixed_cow_array_map(Map::cast(obj)); ASSERT(fixed_array_map() != fixed_cow_array_map()); + { MaybeObject* maybe_obj = + AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); + if (!maybe_obj->ToObject(&obj)) return false; + } + set_serialized_scope_info_map(Map::cast(obj)); + { MaybeObject* maybe_obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize); if (!maybe_obj->ToObject(&obj)) return false; } @@ -1904,6 +1920,12 @@ AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); if (!maybe_obj->ToObject(&obj)) return false; } + set_block_context_map(Map::cast(obj)); + + { MaybeObject* maybe_obj = + AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); + if (!maybe_obj->ToObject(&obj)) return false; + } Map* global_context_map = Map::cast(obj); global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext); set_global_context_map(global_context_map); @@ -2546,6 +2568,8 @@ // If the resulting string is small make a flat string. if (length < String::kMinNonFlatLength) { + // Note that neither of the two inputs can be a slice because: + STATIC_ASSERT(String::kMinNonFlatLength <= SlicedString::kMinLength); ASSERT(first->IsFlat()); ASSERT(second->IsFlat()); if (is_ascii) { @@ -2637,24 +2661,69 @@ // Make an attempt to flatten the buffer to reduce access time. buffer = buffer->TryFlattenGetString(); + // TODO(1626): For now slicing external strings is not supported. However, + // a flat cons string can have an external string as first part in some cases. + // Therefore we have to single out this case as well. + if (!FLAG_string_slices || + (buffer->IsConsString() && + (!buffer->IsFlat() || + !ConsString::cast(buffer)->first()->IsSeqString())) || + buffer->IsExternalString() || + length < SlicedString::kMinLength || + pretenure == TENURED) { + Object* result; + { MaybeObject* maybe_result = buffer->IsAsciiRepresentation() + ? AllocateRawAsciiString(length, pretenure) + : AllocateRawTwoByteString(length, pretenure); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + String* string_result = String::cast(result); + // Copy the characters into the new object. + if (buffer->IsAsciiRepresentation()) { + ASSERT(string_result->IsAsciiRepresentation()); + char* dest = SeqAsciiString::cast(string_result)->GetChars(); + String::WriteToFlat(buffer, dest, start, end); + } else { + ASSERT(string_result->IsTwoByteRepresentation()); + uc16* dest = SeqTwoByteString::cast(string_result)->GetChars(); + String::WriteToFlat(buffer, dest, start, end); + } + return result; + } + + ASSERT(buffer->IsFlat()); + ASSERT(!buffer->IsExternalString()); +#if DEBUG + buffer->StringVerify(); +#endif + Object* result; - { MaybeObject* maybe_result = buffer->IsAsciiRepresentation() - ? AllocateRawAsciiString(length, pretenure ) - : AllocateRawTwoByteString(length, pretenure); + { Map* map = buffer->IsAsciiRepresentation() + ? sliced_ascii_string_map() + : sliced_string_map(); + MaybeObject* maybe_result = Allocate(map, NEW_SPACE); if (!maybe_result->ToObject(&result)) return maybe_result; } - String* string_result = String::cast(result); - // Copy the characters into the new object. - if (buffer->IsAsciiRepresentation()) { - ASSERT(string_result->IsAsciiRepresentation()); - char* dest = SeqAsciiString::cast(string_result)->GetChars(); - String::WriteToFlat(buffer, dest, start, end); + + AssertNoAllocation no_gc; + SlicedString* sliced_string = SlicedString::cast(result); + sliced_string->set_length(length); + sliced_string->set_hash_field(String::kEmptyHashField); + if (buffer->IsConsString()) { + ConsString* cons = ConsString::cast(buffer); + ASSERT(cons->second()->length() == 0); + sliced_string->set_parent(cons->first()); + sliced_string->set_offset(start); + } else if (buffer->IsSlicedString()) { + // Prevent nesting sliced strings. + SlicedString* parent_slice = SlicedString::cast(buffer); + sliced_string->set_parent(parent_slice->parent()); + sliced_string->set_offset(start + parent_slice->offset()); } else { - ASSERT(string_result->IsTwoByteRepresentation()); - uc16* dest = SeqTwoByteString::cast(string_result)->GetChars(); - String::WriteToFlat(buffer, dest, start, end); + sliced_string->set_parent(buffer); + sliced_string->set_offset(start); } - + ASSERT(sliced_string->parent()->IsSeqString()); return result; } @@ -3389,17 +3458,22 @@ object_size); } - FixedArray* elements = FixedArray::cast(source->elements()); + FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); FixedArray* properties = FixedArray::cast(source->properties()); // Update elements if necessary. if (elements->length() > 0) { Object* elem; - { MaybeObject* maybe_elem = - (elements->map() == fixed_cow_array_map()) ? - elements : CopyFixedArray(elements); + { MaybeObject* maybe_elem; + if (elements->map() == fixed_cow_array_map()) { + maybe_elem = FixedArray::cast(elements); + } else if (source->HasFastDoubleElements()) { + maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); + } else { + maybe_elem = CopyFixedArray(FixedArray::cast(elements)); + } if (!maybe_elem->ToObject(&elem)) return maybe_elem; } - JSObject::cast(clone)->set_elements(FixedArray::cast(elem)); + JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem)); } // Update properties if necessary. if (properties->length() > 0) { @@ -3758,6 +3832,23 @@ } +MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, + Map* map) { + int len = src->length(); + Object* obj; + { MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(len, NOT_TENURED); + if (!maybe_obj->ToObject(&obj)) return maybe_obj; + } + HeapObject* dst = HeapObject::cast(obj); + dst->set_map(map); + CopyBlock( + dst->address() + FixedDoubleArray::kLengthOffset, + src->address() + FixedDoubleArray::kLengthOffset, + FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); + return obj; +} + + MaybeObject* Heap::AllocateFixedArray(int length) { ASSERT(length >= 0); if (length == 0) return empty_fixed_array(); @@ -3989,6 +4080,36 @@ } +MaybeObject* Heap::AllocateBlockContext(JSFunction* function, + Context* previous, + SerializedScopeInfo* scope_info) { + Object* result; + { MaybeObject* maybe_result = + AllocateFixedArrayWithHoles(scope_info->NumberOfContextSlots()); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + Context* context = reinterpret_cast(result); + context->set_map(block_context_map()); + context->set_closure(function); + context->set_previous(previous); + context->set_extension(scope_info); + context->set_global(previous->global()); + return context; +} + + +MaybeObject* Heap::AllocateSerializedScopeInfo(int length) { + Object* result; + { MaybeObject* maybe_result = AllocateFixedArray(length, TENURED); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + SerializedScopeInfo* scope_info = + reinterpret_cast(result); + scope_info->set_map(serialized_scope_info_map()); + return scope_info; +} + + MaybeObject* Heap::AllocateStruct(InstanceType type) { Map* map; switch (type) { diff -Nru libv8-3.4.14.21/src/heap.h libv8-3.5.10.24/src/heap.h --- libv8-3.4.14.21/src/heap.h 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/heap.h 2011-08-29 10:41:00.000000000 +0000 @@ -65,6 +65,7 @@ V(Map, heap_number_map, HeapNumberMap) \ V(Map, global_context_map, GlobalContextMap) \ V(Map, fixed_array_map, FixedArrayMap) \ + V(Map, serialized_scope_info_map, SerializedScopeInfoMap) \ V(Map, fixed_cow_array_map, FixedCOWArrayMap) \ V(Map, fixed_double_array_map, FixedDoubleArrayMap) \ V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \ @@ -87,6 +88,8 @@ V(Map, symbol_map, SymbolMap) \ V(Map, cons_string_map, ConsStringMap) \ V(Map, cons_ascii_string_map, ConsAsciiStringMap) \ + V(Map, sliced_string_map, SlicedStringMap) \ + V(Map, sliced_ascii_string_map, SlicedAsciiStringMap) \ V(Map, ascii_symbol_map, AsciiSymbolMap) \ V(Map, cons_symbol_map, ConsSymbolMap) \ V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \ @@ -111,6 +114,7 @@ V(Map, function_context_map, FunctionContextMap) \ V(Map, catch_context_map, CatchContextMap) \ V(Map, with_context_map, WithContextMap) \ + V(Map, block_context_map, BlockContextMap) \ V(Map, code_map, CodeMap) \ V(Map, oddball_map, OddballMap) \ V(Map, global_property_cell_map, GlobalPropertyCellMap) \ @@ -160,6 +164,7 @@ V(length_symbol, "length") \ V(name_symbol, "name") \ V(native_symbol, "native") \ + V(null_symbol, "null") \ V(number_symbol, "number") \ V(Number_symbol, "Number") \ V(nan_symbol, "NaN") \ @@ -220,7 +225,8 @@ V(closure_symbol, "(closure)") \ V(use_strict, "use strict") \ V(dot_symbol, ".") \ - V(anonymous_function_symbol, "(anonymous function)") + V(anonymous_function_symbol, "(anonymous function)") \ + V(block_scope_symbol, ".block") // Forward declarations. class GCTracer; @@ -483,6 +489,9 @@ // Allocates an empty code cache. MUST_USE_RESULT MaybeObject* AllocateCodeCache(); + // Allocates a serialized scope info. + MUST_USE_RESULT MaybeObject* AllocateSerializedScopeInfo(int length); + // Allocates an empty PolymorphicCodeCache. MUST_USE_RESULT MaybeObject* AllocatePolymorphicCodeCache(); @@ -617,6 +626,16 @@ // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map); + // Make a copy of src and return it. Returns + // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. + MUST_USE_RESULT inline MaybeObject* CopyFixedDoubleArray( + FixedDoubleArray* src); + + // Make a copy of src, set the map, and return the copy. Returns + // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. + MUST_USE_RESULT MaybeObject* CopyFixedDoubleArrayWithMap( + FixedDoubleArray* src, Map* map); + // Allocates a fixed array initialized with the hole values. // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation // failed. @@ -658,6 +677,11 @@ Context* previous, JSObject* extension); + // Allocate a block context. + MUST_USE_RESULT MaybeObject* AllocateBlockContext(JSFunction* function, + Context* previous, + SerializedScopeInfo* info); + // Allocates a new utility object in the old generation. MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type); @@ -1636,6 +1660,7 @@ friend class Page; friend class Isolate; friend class MarkCompactCollector; + friend class StaticMarkingVisitor; friend class MapCompact; DISALLOW_COPY_AND_ASSIGN(Heap); diff -Nru libv8-3.4.14.21/src/heap-inl.h libv8-3.5.10.24/src/heap-inl.h --- libv8-3.4.14.21/src/heap-inl.h 2011-07-21 12:28:27.000000000 +0000 +++ libv8-3.5.10.24/src/heap-inl.h 2011-08-29 10:41:00.000000000 +0000 @@ -142,6 +142,11 @@ } +MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { + return CopyFixedDoubleArrayWithMap(src, src->map()); +} + + MaybeObject* Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space) { @@ -318,10 +323,10 @@ ASSERT(type != JS_GLOBAL_PROPERTY_CELL_TYPE); if (type < FIRST_NONSTRING_TYPE) { - // There are three string representations: sequential strings, cons - // strings, and external strings. Only cons strings contain - // non-map-word pointers to heap objects. - return ((type & kStringRepresentationMask) == kConsStringTag) + // There are four string representations: sequential strings, external + // strings, cons strings, and sliced strings. + // Only the latter two contain non-map-word pointers to heap objects. + return ((type & kIsIndirectStringMask) == kIsIndirectStringTag) ? OLD_POINTER_SPACE : OLD_DATA_SPACE; } else { diff -Nru libv8-3.4.14.21/src/hydrogen.cc libv8-3.5.10.24/src/hydrogen.cc --- libv8-3.4.14.21/src/hydrogen.cc 2011-09-12 06:47:01.000000000 +0000 +++ libv8-3.5.10.24/src/hydrogen.cc 2011-10-26 11:51:05.000000000 +0000 @@ -736,6 +736,8 @@ HPhase phase("Assign dominators", this); for (int i = 0; i < blocks_.length(); ++i) { if (blocks_[i]->IsLoopHeader()) { + // Only the first predecessor of a loop header is from outside the loop. + // All others are back edges, and thus cannot dominate the loop header. blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->first()); } else { for (int j = 0; j < blocks_[i]->predecessors()->length(); ++j) { @@ -743,13 +745,15 @@ } } } +} - // Propagate flag marking blocks containing unconditional deoptimize. +// Mark all blocks that are dominated by an unconditional soft deoptimize to +// prevent code motion across those blocks. +void HGraph::PropagateDeoptimizingMark() { + HPhase phase("Propagate deoptimizing mark", this); MarkAsDeoptimizingRecursively(entry_block()); } - -// Mark all blocks that are dominated by an unconditional deoptimize. void HGraph::MarkAsDeoptimizingRecursively(HBasicBlock* block) { for (int i = 0; i < block->dominated_blocks()->length(); ++i) { HBasicBlock* dominated = block->dominated_blocks()->at(i); @@ -836,7 +840,7 @@ } -bool HGraph::CheckPhis() { +bool HGraph::CheckArgumentsPhiUses() { int block_count = blocks_.length(); for (int i = 0; i < block_count; ++i) { for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { @@ -849,13 +853,11 @@ } -bool HGraph::CollectPhis() { +bool HGraph::CheckConstPhiUses() { int block_count = blocks_.length(); - phi_list_ = new ZoneList(block_count); for (int i = 0; i < block_count; ++i) { for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { HPhi* phi = blocks_[i]->phis()->at(j); - phi_list_->Add(phi); // Check for the hole value (from an uninitialized const). for (int k = 0; k < phi->OperandCount(); k++) { if (phi->OperandAt(k) == GetConstantHole()) return false; @@ -866,6 +868,18 @@ } +void HGraph::CollectPhis() { + int block_count = blocks_.length(); + phi_list_ = new ZoneList(block_count); + for (int i = 0; i < block_count; ++i) { + for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { + HPhi* phi = blocks_[i]->phis()->at(j); + phi_list_->Add(phi); + } + } +} + + void HGraph::InferTypes(ZoneList* worklist) { BitVector in_worklist(GetMaximumValueID()); for (int i = 0; i < worklist->length(); ++i) { @@ -1473,6 +1487,9 @@ block->block_id() < dominated->block_id() && visited_on_paths_.Add(block->block_id())) { side_effects |= block_side_effects_[block->block_id()]; + if (block->IsLoopHeader()) { + side_effects |= loop_side_effects_[block->block_id()]; + } side_effects |= CollectSideEffectsOnPathsToDominatedBlock( dominator, block); } @@ -2169,7 +2186,9 @@ } HBasicBlock* empty_true = builder->graph()->CreateBasicBlock(); HBasicBlock* empty_false = builder->graph()->CreateBasicBlock(); - HBranch* test = new(zone()) HBranch(value, empty_true, empty_false); + unsigned test_id = condition()->test_id(); + ToBooleanStub::Types expected(builder->oracle()->ToBooleanTypes(test_id)); + HBranch* test = new(zone()) HBranch(value, empty_true, empty_false, expected); builder->current_block()->Finish(test); empty_true->Goto(if_true()); @@ -2304,34 +2323,27 @@ graph()->OrderBlocks(); graph()->AssignDominators(); - graph()->EliminateRedundantPhis(); - if (!graph()->CheckPhis()) { - Bailout("Unsupported phi use of arguments object"); + graph()->PropagateDeoptimizingMark(); + if (!graph()->CheckConstPhiUses()) { + Bailout("Unsupported phi use of const variable"); return NULL; } - if (FLAG_eliminate_dead_phis) graph()->EliminateUnreachablePhis(); - if (!graph()->CollectPhis()) { - Bailout("Unsupported phi use of uninitialized constant"); + graph()->EliminateRedundantPhis(); + if (!graph()->CheckArgumentsPhiUses()) { + Bailout("Unsupported phi use of arguments"); return NULL; } + if (FLAG_eliminate_dead_phis) graph()->EliminateUnreachablePhis(); + graph()->CollectPhis(); HInferRepresentation rep(graph()); rep.Analyze(); - if (FLAG_use_range) { - HRangeAnalysis rangeAnalysis(graph()); - rangeAnalysis.Analyze(); - } - - graph()->InitializeInferredTypes(); - graph()->Canonicalize(); graph()->MarkDeoptimizeOnUndefined(); graph()->InsertRepresentationChanges(); - graph()->ComputeMinusZeroChecks(); - // Eliminate redundant stack checks on backwards branches. - HStackCheckEliminator sce(graph()); - sce.Process(); + graph()->InitializeInferredTypes(); + graph()->Canonicalize(); // Perform common subexpression elimination and loop-invariant code motion. if (FLAG_use_gvn) { @@ -2340,6 +2352,16 @@ gvn.Analyze(); } + if (FLAG_use_range) { + HRangeAnalysis rangeAnalysis(graph()); + rangeAnalysis.Analyze(); + } + graph()->ComputeMinusZeroChecks(); + + // Eliminate redundant stack checks on backwards branches. + HStackCheckEliminator sce(graph()); + sce.Process(); + // Replace the results of check instructions with the original value, if the // result is used. This is safe now, since we don't do code motion after this // point. It enables better register allocation since the value produced by @@ -2481,6 +2503,9 @@ ASSERT(!HasStackOverflow()); ASSERT(current_block() != NULL); ASSERT(current_block()->HasPredecessor()); + if (stmt->block_scope() != NULL) { + return Bailout("ScopedBlock"); + } BreakAndContinueInfo break_info(stmt); { BreakAndContinueScope push(&break_info, this); CHECK_BAILOUT(VisitStatements(stmt->statements())); @@ -2632,12 +2657,11 @@ } -void HGraphBuilder::VisitEnterWithContextStatement( - EnterWithContextStatement* stmt) { +void HGraphBuilder::VisitWithStatement(WithStatement* stmt) { ASSERT(!HasStackOverflow()); ASSERT(current_block() != NULL); ASSERT(current_block()->HasPredecessor()); - return Bailout("EnterWithContextStatement"); + return Bailout("WithStatement"); } @@ -3120,6 +3144,8 @@ Variable* variable = expr->AsVariable(); if (variable == NULL) { return Bailout("reference to rewritten variable"); + } else if (variable->mode() == Variable::LET) { + return Bailout("reference to let variable"); } else if (variable->IsStackAllocated()) { HValue* value = environment()->Lookup(variable); if (variable->mode() == Variable::CONST && @@ -3297,8 +3323,8 @@ // Load the elements array before the first store. if (elements == NULL) { - elements = new(zone()) HLoadElements(literal); - AddInstruction(elements); + elements = new(zone()) HLoadElements(literal); + AddInstruction(elements); } HValue* key = AddInstruction( @@ -3394,7 +3420,7 @@ ASSERT(!name.is_null()); LookupResult lookup; - ZoneMapList* types = expr->GetReceiverTypes(); + SmallMapList* types = expr->GetReceiverTypes(); bool is_monomorphic = expr->IsMonomorphic() && ComputeStoredField(types->first(), name, &lookup); @@ -3408,7 +3434,7 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr, HValue* object, HValue* value, - ZoneMapList* types, + SmallMapList* types, Handle name) { // TODO(ager): We should recognize when the prototype chains for different // maps are identical. In that case we can avoid repeatedly generating the @@ -3499,7 +3525,7 @@ Handle name = Handle::cast(key->handle()); ASSERT(!name.is_null()); - ZoneMapList* types = expr->GetReceiverTypes(); + SmallMapList* types = expr->GetReceiverTypes(); LookupResult lookup; if (expr->IsMonomorphic()) { @@ -3585,8 +3611,9 @@ BinaryOperation* operation = expr->binary_operation(); if (var != NULL) { - if (var->mode() == Variable::CONST) { - return Bailout("unsupported const compound assignment"); + if (var->mode() == Variable::CONST || + var->mode() == Variable::LET) { + return Bailout("unsupported let or const compound assignment"); } CHECK_ALIVE(VisitForValue(operation)); @@ -3729,6 +3756,8 @@ // variables (e.g. initialization inside a loop). HValue* old_value = environment()->Lookup(var); AddInstruction(new HUseConst(old_value)); + } else if (var->mode() == Variable::LET) { + return Bailout("unsupported assignment to let"); } if (proxy->IsArguments()) return Bailout("assignment to arguments"); @@ -3931,13 +3960,17 @@ : BuildLoadKeyedGeneric(object, key); } AddInstruction(new(zone()) HCheckNonSmi(object)); - AddInstruction(new(zone()) HCheckMap(object, map)); - HInstruction* elements = new(zone()) HLoadElements(object); + HInstruction* mapcheck = AddInstruction(new(zone()) HCheckMap(object, map)); + HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object)); + bool fast_double_elements = map->has_fast_double_elements(); + if (is_store && map->has_fast_elements()) { + AddInstruction(new(zone()) HCheckMap( + elements, isolate()->factory()->fixed_array_map())); + } HInstruction* length = NULL; HInstruction* checked_key = NULL; if (map->has_external_array_elements()) { - AddInstruction(elements); - length = AddInstruction(new(zone()) HExternalArrayLength(elements)); + length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements)); checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length)); HLoadExternalArrayPointer* external_elements = new(zone()) HLoadExternalArrayPointer(elements); @@ -3945,25 +3978,13 @@ return BuildExternalArrayElementAccess(external_elements, checked_key, val, map->elements_kind(), is_store); } - bool fast_double_elements = map->has_fast_double_elements(); ASSERT(map->has_fast_elements() || fast_double_elements); if (map->instance_type() == JS_ARRAY_TYPE) { - length = AddInstruction(new(zone()) HJSArrayLength(object)); - checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length)); - AddInstruction(elements); - if (is_store && !fast_double_elements) { - AddInstruction(new(zone()) HCheckMap( - elements, isolate()->factory()->fixed_array_map())); - } + length = AddInstruction(new(zone()) HJSArrayLength(object, mapcheck)); } else { - AddInstruction(elements); - if (is_store && !fast_double_elements) { - AddInstruction(new(zone()) HCheckMap( - elements, isolate()->factory()->fixed_array_map())); - } - length = AddInstruction(new(zone()) HFixedArrayLength(elements)); - checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length)); + length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements)); } + checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length)); if (is_store) { if (fast_double_elements) { return new(zone()) HStoreKeyedFastDoubleElement(elements, @@ -3993,7 +4014,7 @@ *has_side_effects = false; AddInstruction(new(zone()) HCheckNonSmi(object)); AddInstruction(HCheckInstanceType::NewIsSpecObject(object)); - ZoneMapList* maps = prop->GetReceiverTypes(); + SmallMapList* maps = prop->GetReceiverTypes(); bool todo_external_array = false; static const int kNumElementTypes = JSObject::kElementsKindCount; @@ -4015,7 +4036,8 @@ HInstruction* elements_kind_instr = AddInstruction(new(zone()) HElementsKind(object)); - HInstruction* elements = NULL; + HCompareConstantEqAndBranch* elements_kind_branch = NULL; + HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object)); HLoadExternalArrayPointer* external_elements = NULL; HInstruction* checked_key = NULL; @@ -4031,16 +4053,8 @@ JSObject::LAST_ELEMENTS_KIND); if (elements_kind == JSObject::FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND && todo_external_array) { - elements = AddInstruction(new(zone()) HLoadElements(object)); - // We need to forcibly prevent some ElementsKind-dependent instructions - // from being hoisted out of any loops they might occur in, because - // the current loop-invariant-code-motion algorithm isn't clever enough - // to deal with them properly. - // There's some performance to be gained by developing a smarter - // solution for this. - elements->ClearFlag(HValue::kUseGVN); HInstruction* length = - AddInstruction(new(zone()) HExternalArrayLength(elements)); + AddInstruction(new(zone()) HFixedArrayBaseLength(elements)); checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length)); external_elements = new(zone()) HLoadExternalArrayPointer(elements); AddInstruction(external_elements); @@ -4048,18 +4062,23 @@ if (type_todo[elements_kind]) { HBasicBlock* if_true = graph()->CreateBasicBlock(); HBasicBlock* if_false = graph()->CreateBasicBlock(); - HCompareConstantEqAndBranch* compare = - new(zone()) HCompareConstantEqAndBranch(elements_kind_instr, - elements_kind, - Token::EQ_STRICT); - compare->SetSuccessorAt(0, if_true); - compare->SetSuccessorAt(1, if_false); - current_block()->Finish(compare); + elements_kind_branch = new(zone()) HCompareConstantEqAndBranch( + elements_kind_instr, elements_kind, Token::EQ_STRICT); + elements_kind_branch->SetSuccessorAt(0, if_true); + elements_kind_branch->SetSuccessorAt(1, if_false); + current_block()->Finish(elements_kind_branch); set_current_block(if_true); HInstruction* access; if (elements_kind == JSObject::FAST_ELEMENTS || elements_kind == JSObject::FAST_DOUBLE_ELEMENTS) { + bool fast_double_elements = + elements_kind == JSObject::FAST_DOUBLE_ELEMENTS; + if (is_store && elements_kind == JSObject::FAST_ELEMENTS) { + AddInstruction(new(zone()) HCheckMap( + elements, isolate()->factory()->fixed_array_map(), + elements_kind_branch)); + } HBasicBlock* if_jsarray = graph()->CreateBasicBlock(); HBasicBlock* if_fastobject = graph()->CreateBasicBlock(); HHasInstanceTypeAndBranch* typecheck = @@ -4069,14 +4088,9 @@ current_block()->Finish(typecheck); set_current_block(if_jsarray); - HInstruction* length = new(zone()) HJSArrayLength(object); + HInstruction* length = new(zone()) HJSArrayLength(object, typecheck); AddInstruction(length); - length->ClearFlag(HValue::kUseGVN); checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length)); - elements = AddInstruction(new(zone()) HLoadElements(object)); - elements->ClearFlag(HValue::kUseGVN); - bool fast_double_elements = - elements_kind == JSObject::FAST_DOUBLE_ELEMENTS; if (is_store) { if (fast_double_elements) { access = AddInstruction( @@ -4084,8 +4098,6 @@ checked_key, val)); } else { - AddInstruction(new(zone()) HCheckMap( - elements, isolate()->factory()->fixed_array_map())); access = AddInstruction( new(zone()) HStoreKeyedFastElement(elements, checked_key, val)); } @@ -4106,13 +4118,7 @@ if_jsarray->Goto(join); set_current_block(if_fastobject); - elements = AddInstruction(new(zone()) HLoadElements(object)); - elements->ClearFlag(HValue::kUseGVN); - if (is_store && !fast_double_elements) { - AddInstruction(new(zone()) HCheckMap( - elements, isolate()->factory()->fixed_array_map())); - } - length = AddInstruction(new(zone()) HFixedArrayLength(elements)); + length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements)); checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length)); if (is_store) { if (fast_double_elements) { @@ -4256,8 +4262,9 @@ if (expr->IsArrayLength()) { HValue* array = Pop(); AddInstruction(new(zone()) HCheckNonSmi(array)); - AddInstruction(HCheckInstanceType::NewIsJSArray(array)); - instr = new(zone()) HJSArrayLength(array); + HInstruction* mapcheck = + AddInstruction(HCheckInstanceType::NewIsJSArray(array)); + instr = new(zone()) HJSArrayLength(array, mapcheck); } else if (expr->IsStringLength()) { HValue* string = Pop(); @@ -4281,7 +4288,7 @@ } else if (expr->key()->IsPropertyName()) { Handle name = expr->key()->AsLiteral()->AsPropertyName(); - ZoneMapList* types = expr->GetReceiverTypes(); + SmallMapList* types = expr->GetReceiverTypes(); HValue* obj = Pop(); if (expr->IsMonomorphic()) { @@ -4342,7 +4349,7 @@ void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr, HValue* receiver, - ZoneMapList* types, + SmallMapList* types, Handle name) { // TODO(ager): We should recognize when the prototype chains for different // maps are identical. In that case we can avoid repeatedly generating the @@ -4872,13 +4879,14 @@ Handle name = prop->key()->AsLiteral()->AsPropertyName(); - ZoneMapList* types = expr->GetReceiverTypes(); + SmallMapList* types = expr->GetReceiverTypes(); HValue* receiver = environment()->ExpressionStackAt(expr->arguments()->length()); if (expr->IsMonomorphic()) { - Handle receiver_map = - (types == NULL) ? Handle::null() : types->first(); + Handle receiver_map = (types == NULL || types->is_empty()) + ? Handle::null() + : types->first(); if (TryInlineBuiltinFunction(expr, receiver, receiver_map, @@ -5095,19 +5103,13 @@ // The subexpression does not have side effects. return ast_context()->ReturnValue(graph()->GetConstantFalse()); } else if (prop != NULL) { - if (prop->is_synthetic()) { - // Result of deleting parameters is false, even when they rewrite - // to accesses on the arguments object. - return ast_context()->ReturnValue(graph()->GetConstantFalse()); - } else { - CHECK_ALIVE(VisitForValue(prop->obj())); - CHECK_ALIVE(VisitForValue(prop->key())); - HValue* key = Pop(); - HValue* obj = Pop(); - HValue* context = environment()->LookupContext(); - HDeleteProperty* instr = new(zone()) HDeleteProperty(context, obj, key); - return ast_context()->ReturnInstruction(instr, expr->id()); - } + CHECK_ALIVE(VisitForValue(prop->obj())); + CHECK_ALIVE(VisitForValue(prop->key())); + HValue* key = Pop(); + HValue* obj = Pop(); + HValue* context = environment()->LookupContext(); + HDeleteProperty* instr = new(zone()) HDeleteProperty(context, obj, key); + return ast_context()->ReturnInstruction(instr, expr->id()); } else if (var->is_global()) { Bailout("delete with global variable"); } else { @@ -5552,9 +5554,11 @@ // We need an extra block to maintain edge-split form. HBasicBlock* empty_block = graph()->CreateBasicBlock(); HBasicBlock* eval_right = graph()->CreateBasicBlock(); + unsigned test_id = expr->left()->test_id(); + ToBooleanStub::Types expected(oracle()->ToBooleanTypes(test_id)); HBranch* test = is_logical_and - ? new(zone()) HBranch(Top(), eval_right, empty_block) - : new(zone()) HBranch(Top(), empty_block, eval_right); + ? new(zone()) HBranch(Top(), eval_right, empty_block, expected) + : new(zone()) HBranch(Top(), empty_block, eval_right, expected); current_block()->Finish(test); set_current_block(eval_right); @@ -5829,7 +5833,9 @@ void HGraphBuilder::VisitDeclaration(Declaration* decl) { // We support only declarations that do not require code generation. Variable* var = decl->proxy()->var(); - if (!var->IsStackAllocated() || decl->fun() != NULL) { + if (!var->IsStackAllocated() || + decl->fun() != NULL || + decl->mode() == Variable::LET) { return Bailout("unsupported declaration"); } @@ -6241,11 +6247,6 @@ } -void HGraphBuilder::GenerateIsNativeOrStrictMode(CallRuntime* call) { - return Bailout("inlined runtime function: IsNativeOrStrictMode"); -} - - #undef CHECK_BAILOUT #undef CHECK_ALIVE diff -Nru libv8-3.4.14.21/src/hydrogen.h libv8-3.5.10.24/src/hydrogen.h --- libv8-3.4.14.21/src/hydrogen.h 2011-08-11 16:03:29.000000000 +0000 +++ libv8-3.5.10.24/src/hydrogen.h 2011-09-21 08:38:23.000000000 +0000 @@ -238,15 +238,17 @@ void OrderBlocks(); void AssignDominators(); void ReplaceCheckedValues(); - void MarkAsDeoptimizingRecursively(HBasicBlock* block); + void PropagateDeoptimizingMark(); // Returns false if there are phi-uses of the arguments-object // which are not supported by the optimizing compiler. - bool CheckPhis(); + bool CheckArgumentsPhiUses(); + + // Returns false if there are phi-uses of an uninitialized const + // which are not supported by the optimizing compiler. + bool CheckConstPhiUses(); - // Returns false if there are phi-uses of hole values comming - // from uninitialized consts. - bool CollectPhis(); + void CollectPhis(); Handle Compile(CompilationInfo* info); @@ -297,6 +299,7 @@ HConstant* GetConstant(SetOncePointer* pointer, Object* value); + void MarkAsDeoptimizingRecursively(HBasicBlock* block); void InsertTypeConversions(HInstruction* instr); void PropagateMinusZeroChecks(HValue* value, BitVector* visited); void RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi); @@ -723,6 +726,8 @@ HBasicBlock* second, int join_id); + TypeFeedbackOracle* oracle() const { return function_state()->oracle(); } + private: // Type of a member function that generates inline code for a native function. typedef void (HGraphBuilder::*InlineFunctionGenerator)(CallRuntime* call); @@ -751,7 +756,6 @@ CompilationInfo* info() const { return function_state()->compilation_info(); } - TypeFeedbackOracle* oracle() const { return function_state()->oracle(); } AstContext* call_context() const { return function_state()->call_context(); @@ -899,11 +903,11 @@ void HandlePolymorphicStoreNamedField(Assignment* expr, HValue* object, HValue* value, - ZoneMapList* types, + SmallMapList* types, Handle name); void HandlePolymorphicCallNamed(Call* expr, HValue* receiver, - ZoneMapList* types, + SmallMapList* types, Handle name); void HandleLiteralCompareTypeof(CompareOperation* compare_expr, Expression* expr, diff -Nru libv8-3.4.14.21/src/hydrogen-instructions.cc libv8-3.5.10.24/src/hydrogen-instructions.cc --- libv8-3.4.14.21/src/hydrogen-instructions.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/hydrogen-instructions.cc 2011-08-24 12:02:41.000000000 +0000 @@ -635,6 +635,13 @@ } +void HBoundsCheck::PrintDataTo(StringStream* stream) { + index()->PrintNameTo(stream); + stream->Add(" "); + length()->PrintNameTo(stream); +} + + void HCallConstantFunction::PrintDataTo(StringStream* stream) { if (IsApplyFunction()) { stream->Add("optimized apply "); @@ -771,7 +778,7 @@ void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) { value()->PrintNameTo(stream); stream->Add(" == "); - stream->Add(type_literal_->ToAsciiVector()); + stream->Add(type_literal_->GetFlatContent().ToAsciiVector()); } @@ -862,19 +869,25 @@ Range* HValue::InferRange() { - if (representation().IsTagged()) { - // Tagged values are always in int32 range when converted to integer, - // but they can contain -0. - Range* result = new Range(); - result->set_can_be_minus_zero(true); - return result; - } else if (representation().IsNone()) { - return NULL; - } else { - // Untagged integer32 cannot be -0 and we don't compute ranges for - // untagged doubles. - return new Range(); + // Untagged integer32 cannot be -0, all other representations can. + Range* result = new Range(); + result->set_can_be_minus_zero(!representation().IsInteger32()); + return result; +} + + +Range* HChange::InferRange() { + Range* input_range = value()->range(); + if (from().IsInteger32() && + to().IsTagged() && + input_range != NULL && input_range->IsInSmiRange()) { + set_type(HType::Smi()); } + Range* result = (input_range != NULL) + ? input_range->Copy() + : HValue::InferRange(); + if (to().IsInteger32()) result->set_can_be_minus_zero(false); + return result; } @@ -1223,6 +1236,7 @@ ? left()->range()->Copy() : new Range(); result->Sar(c->Integer32Value()); + result->set_can_be_minus_zero(false); return result; } } @@ -1230,6 +1244,31 @@ } +Range* HShr::InferRange() { + if (right()->IsConstant()) { + HConstant* c = HConstant::cast(right()); + if (c->HasInteger32Value()) { + int shift_count = c->Integer32Value() & 0x1f; + if (left()->range()->CanBeNegative()) { + // Only compute bounds if the result always fits into an int32. + return (shift_count >= 1) + ? new Range(0, static_cast(0xffffffff) >> shift_count) + : new Range(); + } else { + // For positive inputs we can use the >> operator. + Range* result = (left()->range() != NULL) + ? left()->range()->Copy() + : new Range(); + result->Sar(c->Integer32Value()); + result->set_can_be_minus_zero(false); + return result; + } + } + } + return HValue::InferRange(); +} + + Range* HShl::InferRange() { if (right()->IsConstant()) { HConstant* c = HConstant::cast(right()); @@ -1238,6 +1277,7 @@ ? left()->range()->Copy() : new Range(); result->Shl(c->Integer32Value()); + result->set_can_be_minus_zero(false); return result; } } @@ -1285,7 +1325,7 @@ HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context, HValue* object, - ZoneMapList* types, + SmallMapList* types, Handle name) : types_(Min(types->length(), kMaxLoadPolymorphism)), name_(name), @@ -1349,6 +1389,20 @@ } +void HLoadNamedFieldPolymorphic::PrintDataTo(StringStream* stream) { + object()->PrintNameTo(stream); + stream->Add(" ."); + stream->Add(*String::cast(*name())->ToCString()); +} + + +void HLoadNamedGeneric::PrintDataTo(StringStream* stream) { + object()->PrintNameTo(stream); + stream->Add(" ."); + stream->Add(*String::cast(*name())->ToCString()); +} + + void HLoadKeyedFastElement::PrintDataTo(StringStream* stream) { object()->PrintNameTo(stream); stream->Add("["); @@ -1798,11 +1852,6 @@ } -void HBoundsCheck::Verify() { - HInstruction::Verify(); -} - - void HCheckSmi::Verify() { HInstruction::Verify(); ASSERT(HasNoUses()); @@ -1815,18 +1864,6 @@ } -void HCheckInstanceType::Verify() { - HInstruction::Verify(); - ASSERT(HasNoUses()); -} - - -void HCheckMap::Verify() { - HInstruction::Verify(); - ASSERT(HasNoUses()); -} - - void HCheckFunction::Verify() { HInstruction::Verify(); ASSERT(HasNoUses()); diff -Nru libv8-3.4.14.21/src/hydrogen-instructions.h libv8-3.5.10.24/src/hydrogen-instructions.h --- libv8-3.4.14.21/src/hydrogen-instructions.h 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/hydrogen-instructions.h 2011-08-24 12:02:41.000000000 +0000 @@ -104,8 +104,7 @@ V(Div) \ V(ElementsKind) \ V(EnterInlined) \ - V(ExternalArrayLength) \ - V(FixedArrayLength) \ + V(FixedArrayBaseLength) \ V(ForceRepresentation) \ V(FunctionLiteral) \ V(GetCachedArrayIndex) \ @@ -184,6 +183,7 @@ V(InobjectFields) \ V(BackingStoreFields) \ V(ArrayElements) \ + V(DoubleArrayElements) \ V(SpecializedArrayElements) \ V(GlobalVars) \ V(Maps) \ @@ -227,14 +227,20 @@ Range* next() const { return next_; } Range* CopyClearLower() const { return new Range(kMinInt, upper_); } Range* CopyClearUpper() const { return new Range(lower_, kMaxInt); } - Range* Copy() const { return new Range(lower_, upper_); } + Range* Copy() const { + Range* result = new Range(lower_, upper_); + result->set_can_be_minus_zero(CanBeMinusZero()); + return result; + } int32_t Mask() const; void set_can_be_minus_zero(bool b) { can_be_minus_zero_ = b; } bool CanBeMinusZero() const { return CanBeZero() && can_be_minus_zero_; } bool CanBeZero() const { return upper_ >= 0 && lower_ <= 0; } bool CanBeNegative() const { return lower_ < 0; } bool Includes(int value) const { return lower_ <= value && upper_ >= value; } - bool IsMostGeneric() const { return lower_ == kMinInt && upper_ == kMaxInt; } + bool IsMostGeneric() const { + return lower_ == kMinInt && upper_ == kMaxInt && CanBeMinusZero(); + } bool IsInSmiRange() const { return lower_ >= Smi::kMinValue && upper_ <= Smi::kMaxValue; } @@ -578,9 +584,9 @@ virtual bool IsConvertibleToInteger() const { return true; } HType type() const { return type_; } - void set_type(HType type) { - ASSERT(HasNoUses()); - type_ = type; + void set_type(HType new_type) { + ASSERT(new_type.IsSubtypeOf(type_)); + type_ = new_type; } // An operation needs to override this function iff: @@ -933,8 +939,12 @@ class HBranch: public HUnaryControlInstruction { public: - HBranch(HValue* value, HBasicBlock* true_target, HBasicBlock* false_target) - : HUnaryControlInstruction(value, true_target, false_target) { + HBranch(HValue* value, + HBasicBlock* true_target, + HBasicBlock* false_target, + ToBooleanStub::Types expected_input_types = ToBooleanStub::no_types()) + : HUnaryControlInstruction(value, true_target, false_target), + expected_input_types_(expected_input_types) { ASSERT(true_target != NULL && false_target != NULL); } explicit HBranch(HValue* value) @@ -945,7 +955,14 @@ return Representation::None(); } + ToBooleanStub::Types expected_input_types() const { + return expected_input_types_; + } + DECLARE_CONCRETE_INSTRUCTION(Branch) + + private: + ToBooleanStub::Types expected_input_types_; }; @@ -1089,10 +1106,6 @@ set_representation(to); SetFlag(kUseGVN); if (is_truncating) SetFlag(kTruncatingToInt32); - if (from.IsInteger32() && to.IsTagged() && value->range() != NULL && - value->range()->IsInSmiRange()) { - set_type(HType::Smi()); - } } virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited); @@ -1104,6 +1117,8 @@ return from_; } + virtual Range* InferRange(); + virtual void PrintDataTo(StringStream* stream); DECLARE_CONCRETE_INSTRUCTION(Change) @@ -1663,12 +1678,14 @@ }; -class HJSArrayLength: public HUnaryOperation { +class HJSArrayLength: public HTemplateInstruction<2> { public: - explicit HJSArrayLength(HValue* value) : HUnaryOperation(value) { + HJSArrayLength(HValue* value, HValue* typecheck) { // The length of an array is stored as a tagged value in the array // object. It is guaranteed to be 32 bit integer, but it can be // represented as either a smi or heap number. + SetOperandAt(0, value); + SetOperandAt(1, typecheck); set_representation(Representation::Tagged()); SetFlag(kUseGVN); SetFlag(kDependsOnArrayLengths); @@ -1679,6 +1696,8 @@ return Representation::Tagged(); } + HValue* value() { return OperandAt(0); } + DECLARE_CONCRETE_INSTRUCTION(JSArrayLength) protected: @@ -1686,9 +1705,9 @@ }; -class HFixedArrayLength: public HUnaryOperation { +class HFixedArrayBaseLength: public HUnaryOperation { public: - explicit HFixedArrayLength(HValue* value) : HUnaryOperation(value) { + explicit HFixedArrayBaseLength(HValue* value) : HUnaryOperation(value) { set_representation(Representation::Tagged()); SetFlag(kUseGVN); SetFlag(kDependsOnArrayLengths); @@ -1698,28 +1717,7 @@ return Representation::Tagged(); } - DECLARE_CONCRETE_INSTRUCTION(FixedArrayLength) - - protected: - virtual bool DataEquals(HValue* other) { return true; } -}; - - -class HExternalArrayLength: public HUnaryOperation { - public: - explicit HExternalArrayLength(HValue* value) : HUnaryOperation(value) { - set_representation(Representation::Integer32()); - // The result of this instruction is idempotent as long as its inputs don't - // change. The length of a pixel array cannot change once set, so it's not - // necessary to introduce a kDependsOnArrayLengths or any other dependency. - SetFlag(kUseGVN); - } - - virtual Representation RequiredInputRepresentation(int index) const { - return Representation::Tagged(); - } - - DECLARE_CONCRETE_INSTRUCTION(ExternalArrayLength) + DECLARE_CONCRETE_INSTRUCTION(FixedArrayBaseLength) protected: virtual bool DataEquals(HValue* other) { return true; } @@ -1894,10 +1892,14 @@ }; -class HCheckMap: public HUnaryOperation { +class HCheckMap: public HTemplateInstruction<2> { public: - HCheckMap(HValue* value, Handle map) - : HUnaryOperation(value), map_(map) { + HCheckMap(HValue* value, Handle map, HValue* typecheck = NULL) + : map_(map) { + SetOperandAt(0, value); + // If callers don't depend on a typecheck, they can pass in NULL. In that + // case we use a copy of the |value| argument as a dummy value. + SetOperandAt(1, typecheck != NULL ? typecheck : value); set_representation(Representation::Tagged()); SetFlag(kUseGVN); SetFlag(kDependsOnMaps); @@ -1909,10 +1911,7 @@ virtual void PrintDataTo(StringStream* stream); virtual HType CalculateInferredType(); -#ifdef DEBUG - virtual void Verify(); -#endif - + HValue* value() { return OperandAt(0); } Handle map() const { return map_; } DECLARE_CONCRETE_INSTRUCTION(CheckMap) @@ -1980,10 +1979,6 @@ return Representation::Tagged(); } -#ifdef DEBUG - virtual void Verify(); -#endif - virtual HValue* Canonicalize(); bool is_interval_check() const { return check_ <= LAST_INTERVAL_CHECK; } @@ -2458,9 +2453,7 @@ return Representation::Integer32(); } -#ifdef DEBUG - virtual void Verify(); -#endif + virtual void PrintDataTo(StringStream* stream); HValue* index() { return OperandAt(0); } HValue* length() { return OperandAt(1); } @@ -3063,6 +3056,7 @@ HShr(HValue* context, HValue* left, HValue* right) : HBitwiseBinaryOperation(context, left, right) { } + virtual Range* InferRange(); virtual HType CalculateInferredType(); DECLARE_CONCRETE_INSTRUCTION(Shr) @@ -3415,12 +3409,12 @@ public: HLoadNamedFieldPolymorphic(HValue* context, HValue* object, - ZoneMapList* types, + SmallMapList* types, Handle name); HValue* context() { return OperandAt(0); } HValue* object() { return OperandAt(1); } - ZoneMapList* types() { return &types_; } + SmallMapList* types() { return &types_; } Handle name() { return name_; } bool need_generic() { return need_generic_; } @@ -3428,6 +3422,8 @@ return Representation::Tagged(); } + virtual void PrintDataTo(StringStream* stream); + DECLARE_CONCRETE_INSTRUCTION(LoadNamedFieldPolymorphic) static const int kMaxLoadPolymorphism = 4; @@ -3436,7 +3432,7 @@ virtual bool DataEquals(HValue* value); private: - ZoneMapList types_; + SmallMapList types_; Handle name_; bool need_generic_; }; @@ -3461,6 +3457,8 @@ return Representation::Tagged(); } + virtual void PrintDataTo(StringStream* stream); + DECLARE_CONCRETE_INSTRUCTION(LoadNamedGeneric) private: @@ -3527,7 +3525,7 @@ SetOperandAt(0, elements); SetOperandAt(1, key); set_representation(Representation::Double()); - SetFlag(kDependsOnArrayElements); + SetFlag(kDependsOnDoubleArrayElements); SetFlag(kUseGVN); } @@ -3745,7 +3743,7 @@ SetOperandAt(0, elements); SetOperandAt(1, key); SetOperandAt(2, val); - SetFlag(kChangesArrayElements); + SetFlag(kChangesDoubleArrayElements); } virtual Representation RequiredInputRepresentation(int index) const { diff -Nru libv8-3.4.14.21/src/ia32/assembler-ia32.cc libv8-3.5.10.24/src/ia32/assembler-ia32.cc --- libv8-3.4.14.21/src/ia32/assembler-ia32.cc 2011-06-15 10:58:27.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/assembler-ia32.cc 2011-08-15 13:01:23.000000000 +0000 @@ -1957,6 +1957,18 @@ } +void Assembler::roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode) { + ASSERT(CpuFeatures::IsEnabled(SSE4_1)); + EnsureSpace ensure_space(this); + EMIT(0x66); + EMIT(0x0F); + EMIT(0x3A); + EMIT(0x0B); + emit_sse_operand(dst, src); + // Mask precision exeption. + EMIT(static_cast(mode) | 0x8); +} + void Assembler::movmskpd(Register dst, XMMRegister src) { ASSERT(CpuFeatures::IsEnabled(SSE2)); EnsureSpace ensure_space(this); diff -Nru libv8-3.4.14.21/src/ia32/assembler-ia32.h libv8-3.5.10.24/src/ia32/assembler-ia32.h --- libv8-3.4.14.21/src/ia32/assembler-ia32.h 2011-07-04 14:01:31.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/assembler-ia32.h 2011-08-15 13:01:23.000000000 +0000 @@ -941,6 +941,16 @@ void andpd(XMMRegister dst, XMMRegister src); void ucomisd(XMMRegister dst, XMMRegister src); + + enum RoundingMode { + kRoundToNearest = 0x0, + kRoundDown = 0x1, + kRoundUp = 0x2, + kRoundToZero = 0x3 + }; + + void roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode); + void movmskpd(Register dst, XMMRegister src); void cmpltsd(XMMRegister dst, XMMRegister src); diff -Nru libv8-3.4.14.21/src/ia32/builtins-ia32.cc libv8-3.5.10.24/src/ia32/builtins-ia32.cc --- libv8-3.4.14.21/src/ia32/builtins-ia32.cc 2011-06-20 15:33:18.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/builtins-ia32.cc 2011-08-31 09:03:56.000000000 +0000 @@ -373,7 +373,7 @@ __ LeaveConstructFrame(); // Remove caller arguments from the stack and return. - ASSERT(kSmiTagSize == 1 && kSmiTag == 0); + STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); __ pop(ecx); __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver __ push(ecx); @@ -923,7 +923,7 @@ // Fill the FixedArray with the hole value. Inline the code if short. // Reconsider loop unfolding if kPreallocatedArrayElements gets changed. static const int kLoopUnfoldLimit = 4; - ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit); + STATIC_ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit); if (initial_capacity <= kLoopUnfoldLimit) { // Use a scratch register here to have only one reloc info when unfolding // the loop. @@ -975,7 +975,7 @@ // Allocate the JSArray object together with space for a FixedArray with the // requested elements. - ASSERT(kSmiTagSize == 1 && kSmiTag == 0); + STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize, times_half_pointer_size, // array_size is a smi. array_size, @@ -1100,7 +1100,7 @@ __ bind(&argc_one_or_more); __ cmp(eax, 1); __ j(not_equal, &argc_two_or_more); - ASSERT(kSmiTag == 0); + STATIC_ASSERT(kSmiTag == 0); __ mov(ecx, Operand(esp, (push_count + 1) * kPointerSize)); __ test(ecx, Operand(ecx)); __ j(not_zero, ¬_empty_array); @@ -1155,7 +1155,7 @@ // Handle construction of an array from a list of arguments. __ bind(&argc_two_or_more); - ASSERT(kSmiTag == 0); + STATIC_ASSERT(kSmiTag == 0); __ SmiTag(eax); // Convet argc to a smi. // eax: array_size (smi) // edi: constructor @@ -1437,7 +1437,7 @@ // Preserve the number of arguments on the stack. Must preserve eax, // ebx and ecx because these registers are used when copying the // arguments and the receiver. - ASSERT(kSmiTagSize == 1); + STATIC_ASSERT(kSmiTagSize == 1); __ lea(edi, Operand(eax, eax, times_1, kSmiTag)); __ push(edi); } @@ -1451,7 +1451,7 @@ __ leave(); // Remove caller arguments from the stack. - ASSERT(kSmiTagSize == 1 && kSmiTag == 0); + STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); __ pop(ecx); __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver __ push(ecx); diff -Nru libv8-3.4.14.21/src/ia32/code-stubs-ia32.cc libv8-3.5.10.24/src/ia32/code-stubs-ia32.cc --- libv8-3.4.14.21/src/ia32/code-stubs-ia32.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/code-stubs-ia32.cc 2011-08-31 09:03:56.000000000 +0000 @@ -236,69 +236,141 @@ } -// The stub returns zero for false, and a non-zero value for true. +// The stub expects its argument on the stack and returns its result in tos_: +// zero for false, and a non-zero value for true. void ToBooleanStub::Generate(MacroAssembler* masm) { - Label false_result, true_result, not_string; + Label patch; Factory* factory = masm->isolate()->factory(); + const Register argument = eax; const Register map = edx; - __ mov(eax, Operand(esp, 1 * kPointerSize)); + if (!types_.IsEmpty()) { + __ mov(argument, Operand(esp, 1 * kPointerSize)); + } // undefined -> false - __ cmp(eax, factory->undefined_value()); - __ j(equal, &false_result); + CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false); // Boolean -> its value - __ cmp(eax, factory->false_value()); - __ j(equal, &false_result); - __ cmp(eax, factory->true_value()); - __ j(equal, &true_result); - - // Smis: 0 -> false, all other -> true - __ test(eax, Operand(eax)); - __ j(zero, &false_result); - __ JumpIfSmi(eax, &true_result); + CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false); + CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true); // 'null' -> false. - __ cmp(eax, factory->null_value()); - __ j(equal, &false_result, Label::kNear); + CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false); - // Get the map of the heap object. - __ mov(map, FieldOperand(eax, HeapObject::kMapOffset)); + if (types_.Contains(SMI)) { + // Smis: 0 -> false, all other -> true + Label not_smi; + __ JumpIfNotSmi(argument, ¬_smi, Label::kNear); + // argument contains the correct return value already. + if (!tos_.is(argument)) { + __ mov(tos_, argument); + } + __ ret(1 * kPointerSize); + __ bind(¬_smi); + } else if (types_.NeedsMap()) { + // If we need a map later and have a Smi -> patch. + __ JumpIfSmi(argument, &patch, Label::kNear); + } + + if (types_.NeedsMap()) { + __ mov(map, FieldOperand(argument, HeapObject::kMapOffset)); + + if (types_.CanBeUndetectable()) { + __ test_b(FieldOperand(map, Map::kBitFieldOffset), + 1 << Map::kIsUndetectable); + // Undetectable -> false. + Label not_undetectable; + __ j(zero, ¬_undetectable, Label::kNear); + __ Set(tos_, Immediate(0)); + __ ret(1 * kPointerSize); + __ bind(¬_undetectable); + } + } - // Undetectable -> false. - __ test_b(FieldOperand(map, Map::kBitFieldOffset), - 1 << Map::kIsUndetectable); - __ j(not_zero, &false_result, Label::kNear); - - // JavaScript object -> true. - __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); - __ j(above_equal, &true_result, Label::kNear); - - // String value -> false iff empty. - __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); - __ j(above_equal, ¬_string, Label::kNear); - __ cmp(FieldOperand(eax, String::kLengthOffset), Immediate(0)); - __ j(zero, &false_result, Label::kNear); - __ jmp(&true_result, Label::kNear); + if (types_.Contains(SPEC_OBJECT)) { + // spec object -> true. + Label not_js_object; + __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); + __ j(below, ¬_js_object, Label::kNear); + // argument contains the correct return value already. + if (!tos_.is(argument)) { + __ Set(tos_, Immediate(1)); + } + __ ret(1 * kPointerSize); + __ bind(¬_js_object); + } - __ bind(¬_string); - // HeapNumber -> false iff +0, -0, or NaN. - __ cmp(map, factory->heap_number_map()); - __ j(not_equal, &true_result, Label::kNear); - __ fldz(); - __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset)); - __ FCmp(); - __ j(zero, &false_result, Label::kNear); - // Fall through to |true_result|. - - // Return 1/0 for true/false in tos_. - __ bind(&true_result); - __ mov(tos_, 1); - __ ret(1 * kPointerSize); - __ bind(&false_result); - __ mov(tos_, 0); - __ ret(1 * kPointerSize); + if (types_.Contains(STRING)) { + // String value -> false iff empty. + Label not_string; + __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); + __ j(above_equal, ¬_string, Label::kNear); + __ mov(tos_, FieldOperand(argument, String::kLengthOffset)); + __ ret(1 * kPointerSize); // the string length is OK as the return value + __ bind(¬_string); + } + + if (types_.Contains(HEAP_NUMBER)) { + // heap number -> false iff +0, -0, or NaN. + Label not_heap_number, false_result; + __ cmp(map, factory->heap_number_map()); + __ j(not_equal, ¬_heap_number, Label::kNear); + __ fldz(); + __ fld_d(FieldOperand(argument, HeapNumber::kValueOffset)); + __ FCmp(); + __ j(zero, &false_result, Label::kNear); + // argument contains the correct return value already. + if (!tos_.is(argument)) { + __ Set(tos_, Immediate(1)); + } + __ ret(1 * kPointerSize); + __ bind(&false_result); + __ Set(tos_, Immediate(0)); + __ ret(1 * kPointerSize); + __ bind(¬_heap_number); + } + + __ bind(&patch); + GenerateTypeTransition(masm); +} + + +void ToBooleanStub::CheckOddball(MacroAssembler* masm, + Type type, + Heap::RootListIndex value, + bool result) { + const Register argument = eax; + if (types_.Contains(type)) { + // If we see an expected oddball, return its ToBoolean value tos_. + Label different_value; + __ CompareRoot(argument, value); + __ j(not_equal, &different_value, Label::kNear); + if (!result) { + // If we have to return zero, there is no way around clearing tos_. + __ Set(tos_, Immediate(0)); + } else if (!tos_.is(argument)) { + // If we have to return non-zero, we can re-use the argument if it is the + // same register as the result, because we never see Smi-zero here. + __ Set(tos_, Immediate(1)); + } + __ ret(1 * kPointerSize); + __ bind(&different_value); + } +} + + +void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { + __ pop(ecx); // Get return address, operand is now on top of stack. + __ push(Immediate(Smi::FromInt(tos_.code()))); + __ push(Immediate(Smi::FromInt(types_.ToByte()))); + __ push(ecx); // Push return address. + // Patch the caller to an appropriate specialized stub and return the + // operation result to the caller of the stub. + __ TailCallExternalReference( + ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()), + 3, + 1); } @@ -421,10 +493,10 @@ __ cmp(Operand(scratch2), Immediate(non_smi_exponent)); // If we have a match of the int32-but-not-Smi exponent then skip some // logic. - __ j(equal, &right_exponent); + __ j(equal, &right_exponent, Label::kNear); // If the exponent is higher than that then go to slow case. This catches // numbers that don't fit in a signed int32, infinities and NaNs. - __ j(less, &normal_exponent); + __ j(less, &normal_exponent, Label::kNear); { // Handle a big exponent. The only reason we have this code is that the @@ -453,9 +525,9 @@ __ or_(ecx, Operand(scratch2)); // We have the answer in ecx, but we may need to negate it. __ test(scratch, Operand(scratch)); - __ j(positive, &done); + __ j(positive, &done, Label::kNear); __ neg(ecx); - __ jmp(&done); + __ jmp(&done, Label::kNear); } __ bind(&normal_exponent); @@ -468,7 +540,7 @@ (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift; __ sub(Operand(scratch2), Immediate(zero_exponent)); // ecx already has a Smi zero. - __ j(less, &done); + __ j(less, &done, Label::kNear); // We have a shifted exponent between 0 and 30 in scratch2. __ shr(scratch2, HeapNumber::kExponentShift); @@ -693,7 +765,7 @@ Label slow_allocate_heapnumber, heapnumber_allocated; __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber); - __ jmp(&heapnumber_allocated); + __ jmp(&heapnumber_allocated, Label::kNear); __ bind(&slow_allocate_heapnumber); __ EnterInternalFrame(); @@ -1370,14 +1442,14 @@ Register right = eax; // Test if left operand is a string. - __ JumpIfSmi(left, &call_runtime); + __ JumpIfSmi(left, &call_runtime, Label::kNear); __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx); - __ j(above_equal, &call_runtime); + __ j(above_equal, &call_runtime, Label::kNear); // Test if right operand is a string. - __ JumpIfSmi(right, &call_runtime); + __ JumpIfSmi(right, &call_runtime, Label::kNear); __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx); - __ j(above_equal, &call_runtime); + __ j(above_equal, &call_runtime, Label::kNear); StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB); GenerateRegisterArgsPush(masm); @@ -1491,7 +1563,7 @@ } else { // Check if result fits in a smi. __ cmp(eax, 0xc0000000); - __ j(negative, &non_smi_result); + __ j(negative, &non_smi_result, Label::kNear); } // Tag smi result and return. __ SmiTag(eax); @@ -1705,7 +1777,7 @@ } else { // Check if result fits in a smi. __ cmp(eax, 0xc0000000); - __ j(negative, &non_smi_result); + __ j(negative, &non_smi_result, Label::kNear); } // Tag smi result and return. __ SmiTag(eax); @@ -1904,7 +1976,7 @@ } else { // Check if result fits in a smi. __ cmp(eax, 0xc0000000); - __ j(negative, &non_smi_result); + __ j(negative, &non_smi_result, Label::kNear); } // Tag smi result and return. __ SmiTag(eax); @@ -2379,7 +2451,7 @@ Label load_arg2, done; // Test if arg1 is a Smi. - __ JumpIfNotSmi(edx, &arg1_is_object); + __ JumpIfNotSmi(edx, &arg1_is_object, Label::kNear); __ SmiUntag(edx); __ jmp(&load_arg2); @@ -2405,7 +2477,7 @@ __ bind(&load_arg2); // Test if arg2 is a Smi. - __ JumpIfNotSmi(eax, &arg2_is_object); + __ JumpIfNotSmi(eax, &arg2_is_object, Label::kNear); __ SmiUntag(eax); __ mov(ecx, eax); @@ -2795,7 +2867,7 @@ // Check that the key is a smi. Label slow; - __ JumpIfNotSmi(edx, &slow); + __ JumpIfNotSmi(edx, &slow, Label::kNear); // Check if the calling frame is an arguments adaptor frame. Label adaptor; @@ -2808,7 +2880,7 @@ // through register eax. Use unsigned comparison to get negative // check for free. __ cmp(edx, Operand(eax)); - __ j(above_equal, &slow); + __ j(above_equal, &slow, Label::kNear); // Read the argument from the stack and return it. STATIC_ASSERT(kSmiTagSize == 1); @@ -2824,7 +2896,7 @@ __ bind(&adaptor); __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset)); __ cmp(edx, Operand(ecx)); - __ j(above_equal, &slow); + __ j(above_equal, &slow, Label::kNear); // Read the argument from the stack and return it. STATIC_ASSERT(kSmiTagSize == 1); @@ -3103,11 +3175,11 @@ __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset)); __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); - __ j(equal, &adaptor_frame); + __ j(equal, &adaptor_frame, Label::kNear); // Get the length from the frame. __ mov(ecx, Operand(esp, 1 * kPointerSize)); - __ jmp(&try_allocate); + __ jmp(&try_allocate, Label::kNear); // Patch the arguments.length and the parameters pointer. __ bind(&adaptor_frame); @@ -3153,7 +3225,7 @@ // If there are no actual arguments, we're done. Label done; __ test(ecx, Operand(ecx)); - __ j(zero, &done); + __ j(zero, &done, Label::kNear); // Get the parameters pointer from the stack. __ mov(edx, Operand(esp, 2 * kPointerSize)); @@ -3299,6 +3371,8 @@ __ cmp(edx, Operand(eax)); __ j(greater, &runtime); + // Reset offset for possibly sliced string. + __ Set(edi, Immediate(0)); // ecx: RegExp data (FixedArray) // Check the representation and encoding of the subject string. Label seq_ascii_string, seq_two_byte_string, check_code; @@ -3309,36 +3383,45 @@ __ and_(ebx, kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask); STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0); - __ j(zero, &seq_two_byte_string); + __ j(zero, &seq_two_byte_string, Label::kNear); // Any other flat string must be a flat ascii string. - __ test(Operand(ebx), + __ and_(Operand(ebx), Immediate(kIsNotStringMask | kStringRepresentationMask)); - __ j(zero, &seq_ascii_string); + __ j(zero, &seq_ascii_string, Label::kNear); - // Check for flat cons string. + // Check for flat cons string or sliced string. // A flat cons string is a cons string where the second part is the empty // string. In that case the subject string is just the first part of the cons // string. Also in this case the first part of the cons string is known to be // a sequential string or an external string. - STATIC_ASSERT(kExternalStringTag != 0); - STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0); - __ test(Operand(ebx), - Immediate(kIsNotStringMask | kExternalStringTag)); - __ j(not_zero, &runtime); - // String is a cons string. - __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset)); - __ cmp(Operand(edx), factory->empty_string()); + // In the case of a sliced string its offset has to be taken into account. + Label cons_string, check_encoding; + STATIC_ASSERT(kConsStringTag < kExternalStringTag); + STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); + __ cmp(Operand(ebx), Immediate(kExternalStringTag)); + __ j(less, &cons_string); + __ j(equal, &runtime); + + // String is sliced. + __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset)); + __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset)); + // edi: offset of sliced string, smi-tagged. + // eax: parent string. + __ jmp(&check_encoding, Label::kNear); + // String is a cons string, check whether it is flat. + __ bind(&cons_string); + __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string()); __ j(not_equal, &runtime); __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset)); + __ bind(&check_encoding); __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); - // String is a cons string with empty second part. - // eax: first part of cons string. - // ebx: map of first part of cons string. - // Is first part a flat two byte string? + // eax: first part of cons string or parent of sliced string. + // ebx: map of first part of cons string or map of parent of sliced string. + // Is first part of cons or parent of slice a flat two byte string? __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset), kStringRepresentationMask | kStringEncodingMask); STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0); - __ j(zero, &seq_two_byte_string); + __ j(zero, &seq_two_byte_string, Label::kNear); // Any other flat string must be ascii. __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset), kStringRepresentationMask); @@ -3348,14 +3431,14 @@ // eax: subject string (flat ascii) // ecx: RegExp data (FixedArray) __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset)); - __ Set(edi, Immediate(1)); // Type is ascii. - __ jmp(&check_code); + __ Set(ecx, Immediate(1)); // Type is ascii. + __ jmp(&check_code, Label::kNear); __ bind(&seq_two_byte_string); // eax: subject string (flat two byte) // ecx: RegExp data (FixedArray) __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset)); - __ Set(edi, Immediate(0)); // Type is two byte. + __ Set(ecx, Immediate(0)); // Type is two byte. __ bind(&check_code); // Check that the irregexp code has been generated for the actual string @@ -3365,7 +3448,7 @@ // eax: subject string // edx: code - // edi: encoding of subject string (1 if ascii, 0 if two_byte); + // ecx: encoding of subject string (1 if ascii, 0 if two_byte); // Load used arguments before starting to push arguments for call to native // RegExp code to avoid handling changing stack height. __ mov(ebx, Operand(esp, kPreviousIndexOffset)); @@ -3374,7 +3457,7 @@ // eax: subject string // ebx: previous index // edx: code - // edi: encoding of subject string (1 if ascii 0 if two_byte); + // ecx: encoding of subject string (1 if ascii 0 if two_byte); // All checks done. Now push arguments for native regexp code. Counters* counters = masm->isolate()->counters(); __ IncrementCounter(counters->regexp_entry_native(), 1); @@ -3391,23 +3474,47 @@ __ mov(Operand(esp, 6 * kPointerSize), Immediate(1)); // Argument 6: Start (high end) of backtracking stack memory area. - __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address)); - __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); - __ mov(Operand(esp, 5 * kPointerSize), ecx); + __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address)); + __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size)); + __ mov(Operand(esp, 5 * kPointerSize), esi); // Argument 5: static offsets vector buffer. __ mov(Operand(esp, 4 * kPointerSize), Immediate(ExternalReference::address_of_static_offsets_vector( masm->isolate()))); + // Argument 2: Previous index. + __ mov(Operand(esp, 1 * kPointerSize), ebx); + + // Argument 1: Original subject string. + // The original subject is in the previous stack frame. Therefore we have to + // use ebp, which points exactly to one pointer size below the previous esp. + // (Because creating a new stack frame pushes the previous ebp onto the stack + // and thereby moves up esp by one kPointerSize.) + __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize)); + __ mov(Operand(esp, 0 * kPointerSize), esi); + + // esi: original subject string + // eax: underlying subject string + // ebx: previous index + // ecx: encoding of subject string (1 if ascii 0 if two_byte); + // edx: code // Argument 4: End of string data // Argument 3: Start of string data + // Prepare start and end index of the input. + // Load the length from the original sliced string if that is the case. + __ mov(esi, FieldOperand(esi, String::kLengthOffset)); + __ add(esi, Operand(edi)); // Calculate input end wrt offset. + __ SmiUntag(edi); + __ add(ebx, Operand(edi)); // Calculate input start wrt offset. + + // ebx: start index of the input string + // esi: end index of the input string Label setup_two_byte, setup_rest; - __ test(edi, Operand(edi)); - __ mov(edi, FieldOperand(eax, String::kLengthOffset)); + __ test(ecx, Operand(ecx)); __ j(zero, &setup_two_byte, Label::kNear); - __ SmiUntag(edi); - __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize)); + __ SmiUntag(esi); + __ lea(ecx, FieldOperand(eax, esi, times_1, SeqAsciiString::kHeaderSize)); __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize)); __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. @@ -3415,20 +3522,14 @@ __ bind(&setup_two_byte); STATIC_ASSERT(kSmiTag == 0); - STATIC_ASSERT(kSmiTagSize == 1); // edi is smi (powered by 2). - __ lea(ecx, FieldOperand(eax, edi, times_1, SeqTwoByteString::kHeaderSize)); + STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2). + __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize)); __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize)); __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. __ bind(&setup_rest); - // Argument 2: Previous index. - __ mov(Operand(esp, 1 * kPointerSize), ebx); - - // Argument 1: Subject string. - __ mov(Operand(esp, 0 * kPointerSize), eax); - // Locate the code entry and call it. __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag)); __ call(Operand(edx)); @@ -3467,7 +3568,7 @@ // by javascript code. __ cmp(eax, factory->termination_exception()); Label throw_termination_exception; - __ j(equal, &throw_termination_exception); + __ j(equal, &throw_termination_exception, Label::kNear); // Handle normal exception by following handler chain. __ Throw(eax); @@ -3750,16 +3851,16 @@ void CompareStub::Generate(MacroAssembler* masm) { ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); - Label check_unequal_objects, done; + Label check_unequal_objects; // Compare two smis if required. if (include_smi_compare_) { Label non_smi, smi_done; __ mov(ecx, Operand(edx)); __ or_(ecx, Operand(eax)); - __ JumpIfNotSmi(ecx, &non_smi); + __ JumpIfNotSmi(ecx, &non_smi, Label::kNear); __ sub(edx, Operand(eax)); // Return on the result of the subtraction. - __ j(no_overflow, &smi_done); + __ j(no_overflow, &smi_done, Label::kNear); __ not_(edx); // Correct sign in case of overflow. edx is never 0 here. __ bind(&smi_done); __ mov(eax, edx); @@ -3881,7 +3982,7 @@ __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), Immediate(masm->isolate()->factory()->heap_number_map())); // If heap number, handle it in the slow case. - __ j(equal, &slow); + __ j(equal, &slow, Label::kNear); // Return non-equal (ebx is not zero) __ mov(eax, ebx); __ ret(0); @@ -3932,7 +4033,7 @@ __ ucomisd(xmm0, xmm1); // Don't base result on EFLAGS when a NaN is involved. - __ j(parity_even, &unordered); + __ j(parity_even, &unordered, Label::kNear); // Return a result of -1, 0, or 1, based on EFLAGS. __ mov(eax, 0); // equal __ mov(ecx, Immediate(Smi::FromInt(1))); @@ -3948,12 +4049,12 @@ __ FCmp(); // Don't base result on EFLAGS when a NaN is involved. - __ j(parity_even, &unordered); + __ j(parity_even, &unordered, Label::kNear); Label below_label, above_label; // Return a result of -1, 0, or 1, based on EFLAGS. - __ j(below, &below_label); - __ j(above, &above_label); + __ j(below, &below_label, Label::kNear); + __ j(above, &above_label, Label::kNear); __ Set(eax, Immediate(0)); __ ret(0); @@ -4268,7 +4369,7 @@ // If the returned exception is RETRY_AFTER_GC continue at retry label STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); - __ j(zero, &retry); + __ j(zero, &retry, Label::kNear); // Special handling of out of memory exceptions. __ cmp(eax, reinterpret_cast(Failure::OutOfMemoryException())); @@ -4388,11 +4489,11 @@ ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, masm->isolate()); __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0)); - __ j(not_equal, ¬_outermost_js); + __ j(not_equal, ¬_outermost_js, Label::kNear); __ mov(Operand::StaticVariable(js_entry_sp), ebp); __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); Label cont; - __ jmp(&cont); + __ jmp(&cont, Label::kNear); __ bind(¬_outermost_js); __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); __ bind(&cont); @@ -4633,26 +4734,26 @@ __ bind(¬_js_object); // Before null, smi and string value checks, check that the rhs is a function // as for a non-function rhs an exception needs to be thrown. - __ JumpIfSmi(function, &slow); + __ JumpIfSmi(function, &slow, Label::kNear); __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); - __ j(not_equal, &slow); + __ j(not_equal, &slow, Label::kNear); // Null is not instance of anything. __ cmp(object, factory->null_value()); - __ j(not_equal, &object_not_null); + __ j(not_equal, &object_not_null, Label::kNear); __ Set(eax, Immediate(Smi::FromInt(1))); __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); __ bind(&object_not_null); // Smi values is not instance of anything. - __ JumpIfNotSmi(object, &object_not_null_or_smi); + __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear); __ Set(eax, Immediate(Smi::FromInt(1))); __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); __ bind(&object_not_null_or_smi); // String values is not instance of anything. Condition is_string = masm->IsObjectStringType(object, scratch, scratch); - __ j(NegateCondition(is_string), &slow); + __ j(NegateCondition(is_string), &slow, Label::kNear); __ Set(eax, Immediate(Smi::FromInt(1))); __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); @@ -4739,6 +4840,7 @@ Label flat_string; Label ascii_string; Label got_char_code; + Label sliced_string; // If the receiver is a smi trigger the non-string case. STATIC_ASSERT(kSmiTag == 0); @@ -4769,31 +4871,45 @@ __ j(zero, &flat_string); // Handle non-flat strings. - __ test(result_, Immediate(kIsConsStringMask)); - __ j(zero, &call_runtime_); + __ and_(result_, kStringRepresentationMask); + STATIC_ASSERT(kConsStringTag < kExternalStringTag); + STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); + __ cmp(result_, kExternalStringTag); + __ j(greater, &sliced_string, Label::kNear); + __ j(equal, &call_runtime_); // ConsString. // Check whether the right hand side is the empty string (i.e. if // this is really a flat string in a cons string). If that is not // the case we would rather go to the runtime system now to flatten // the string. + Label assure_seq_string; __ cmp(FieldOperand(object_, ConsString::kSecondOffset), Immediate(masm->isolate()->factory()->empty_string())); __ j(not_equal, &call_runtime_); // Get the first of the two strings and load its instance type. __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset)); + __ jmp(&assure_seq_string, Label::kNear); + + // SlicedString, unpack and add offset. + __ bind(&sliced_string); + __ add(scratch_, FieldOperand(object_, SlicedString::kOffsetOffset)); + __ mov(object_, FieldOperand(object_, SlicedString::kParentOffset)); + + // Assure that we are dealing with a sequential string. Go to runtime if not. + __ bind(&assure_seq_string); __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset)); __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); - // If the first cons component is also non-flat, then go to runtime. STATIC_ASSERT(kSeqStringTag == 0); __ test(result_, Immediate(kStringRepresentationMask)); __ j(not_zero, &call_runtime_); + __ jmp(&flat_string, Label::kNear); // Check for 1-byte or 2-byte string. __ bind(&flat_string); STATIC_ASSERT(kAsciiStringTag != 0); __ test(result_, Immediate(kStringEncodingMask)); - __ j(not_zero, &ascii_string); + __ j(not_zero, &ascii_string, Label::kNear); // 2-byte string. // Load the 2-byte character code into the result register. @@ -4801,7 +4917,7 @@ __ movzx_w(result_, FieldOperand(object_, scratch_, times_1, // Scratch is smi-tagged. SeqTwoByteString::kHeaderSize)); - __ jmp(&got_char_code); + __ jmp(&got_char_code, Label::kNear); // ASCII string. // Load the byte into the result register. @@ -5113,6 +5229,8 @@ __ and_(ecx, kStringRepresentationMask); __ cmp(ecx, kExternalStringTag); __ j(equal, &string_add_runtime); + // We cannot encounter sliced strings here since: + STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength); // Now check if both strings are ascii strings. // eax: first string // ebx: length of resulting flat string as a smi @@ -5585,7 +5703,83 @@ __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); __ Set(ecx, Immediate(2)); - __ bind(&result_longer_than_two); + if (FLAG_string_slices) { + Label copy_routine; + // If coming from the make_two_character_string path, the string + // is too short to be sliced anyways. + STATIC_ASSERT(2 < SlicedString::kMinLength); + __ jmp(©_routine); + __ bind(&result_longer_than_two); + + // eax: string + // ebx: instance type + // ecx: sub string length + // edx: from index (smi) + Label allocate_slice, sliced_string, seq_string; + __ cmp(ecx, SlicedString::kMinLength); + // Short slice. Copy instead of slicing. + __ j(less, ©_routine); + STATIC_ASSERT(kSeqStringTag == 0); + __ test(ebx, Immediate(kStringRepresentationMask)); + __ j(zero, &seq_string, Label::kNear); + STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag)); + STATIC_ASSERT(kIsIndirectStringMask != 0); + __ test(ebx, Immediate(kIsIndirectStringMask)); + // External string. Jump to runtime. + __ j(zero, &runtime); + + Factory* factory = masm->isolate()->factory(); + __ test(ebx, Immediate(kSlicedNotConsMask)); + __ j(not_zero, &sliced_string, Label::kNear); + // Cons string. Check whether it is flat, then fetch first part. + __ cmp(FieldOperand(eax, ConsString::kSecondOffset), + factory->empty_string()); + __ j(not_equal, &runtime); + __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset)); + __ jmp(&allocate_slice, Label::kNear); + + __ bind(&sliced_string); + // Sliced string. Fetch parent and correct start index by offset. + __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset)); + __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset)); + __ jmp(&allocate_slice, Label::kNear); + + __ bind(&seq_string); + // Sequential string. Just move string to the right register. + __ mov(edi, eax); + + __ bind(&allocate_slice); + // edi: underlying subject string + // ebx: instance type of original subject string + // edx: offset + // ecx: length + // Allocate new sliced string. At this point we do not reload the instance + // type including the string encoding because we simply rely on the info + // provided by the original string. It does not matter if the original + // string's encoding is wrong because we always have to recheck encoding of + // the newly created string's parent anyways due to externalized strings. + Label two_byte_slice, set_slice_header; + STATIC_ASSERT(kAsciiStringTag != 0); + __ test(ebx, Immediate(kAsciiStringTag)); + __ j(zero, &two_byte_slice, Label::kNear); + __ AllocateAsciiSlicedString(eax, ebx, no_reg, &runtime); + __ jmp(&set_slice_header, Label::kNear); + __ bind(&two_byte_slice); + __ AllocateSlicedString(eax, ebx, no_reg, &runtime); + __ bind(&set_slice_header); + __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx); + __ SmiTag(ecx); + __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx); + __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi); + __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset), + Immediate(String::kEmptyHashField)); + __ jmp(&return_eax); + + __ bind(©_routine); + } else { + __ bind(&result_longer_than_two); + } + // eax: string // ebx: instance type // ecx: result string length diff -Nru libv8-3.4.14.21/src/ia32/cpu-ia32.cc libv8-3.5.10.24/src/ia32/cpu-ia32.cc --- libv8-3.4.14.21/src/ia32/cpu-ia32.cc 2011-04-04 08:25:31.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/cpu-ia32.cc 2011-07-25 11:05:35.000000000 +0000 @@ -1,4 +1,4 @@ -// Copyright 2006-2008 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -67,7 +67,8 @@ // solution is to run valgrind with --smc-check=all, but this comes at a big // performance cost. We can notify valgrind to invalidate its cache. #ifdef VALGRIND_DISCARD_TRANSLATIONS - VALGRIND_DISCARD_TRANSLATIONS(start, size); + unsigned res = VALGRIND_DISCARD_TRANSLATIONS(start, size); + USE(res); #endif } diff -Nru libv8-3.4.14.21/src/ia32/deoptimizer-ia32.cc libv8-3.5.10.24/src/ia32/deoptimizer-ia32.cc --- libv8-3.4.14.21/src/ia32/deoptimizer-ia32.cc 2011-07-04 14:01:31.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/deoptimizer-ia32.cc 2011-08-10 11:27:35.000000000 +0000 @@ -37,7 +37,7 @@ namespace v8 { namespace internal { -int Deoptimizer::table_entry_size_ = 10; +const int Deoptimizer::table_entry_size_ = 10; int Deoptimizer::patch_size() { @@ -601,8 +601,6 @@ output_frame->SetContinuation( reinterpret_cast(continuation->entry())); } - - if (output_count_ - 1 == frame_index) iterator->Done(); } diff -Nru libv8-3.4.14.21/src/ia32/disasm-ia32.cc libv8-3.5.10.24/src/ia32/disasm-ia32.cc --- libv8-3.4.14.21/src/ia32/disasm-ia32.cc 2011-05-04 12:43:48.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/disasm-ia32.cc 2011-08-15 13:01:23.000000000 +0000 @@ -54,7 +54,7 @@ }; -static ByteMnemonic two_operands_instr[] = { +static const ByteMnemonic two_operands_instr[] = { {0x03, "add", REG_OPER_OP_ORDER}, {0x09, "or", OPER_REG_OP_ORDER}, {0x0B, "or", REG_OPER_OP_ORDER}, @@ -79,7 +79,7 @@ }; -static ByteMnemonic zero_operands_instr[] = { +static const ByteMnemonic zero_operands_instr[] = { {0xC3, "ret", UNSET_OP_ORDER}, {0xC9, "leave", UNSET_OP_ORDER}, {0x90, "nop", UNSET_OP_ORDER}, @@ -98,14 +98,14 @@ }; -static ByteMnemonic call_jump_instr[] = { +static const ByteMnemonic call_jump_instr[] = { {0xE8, "call", UNSET_OP_ORDER}, {0xE9, "jmp", UNSET_OP_ORDER}, {-1, "", UNSET_OP_ORDER} }; -static ByteMnemonic short_immediate_instr[] = { +static const ByteMnemonic short_immediate_instr[] = { {0x05, "add", UNSET_OP_ORDER}, {0x0D, "or", UNSET_OP_ORDER}, {0x15, "adc", UNSET_OP_ORDER}, @@ -117,7 +117,7 @@ }; -static const char* jump_conditional_mnem[] = { +static const char* const jump_conditional_mnem[] = { /*0*/ "jo", "jno", "jc", "jnc", /*4*/ "jz", "jnz", "jna", "ja", /*8*/ "js", "jns", "jpe", "jpo", @@ -125,7 +125,7 @@ }; -static const char* set_conditional_mnem[] = { +static const char* const set_conditional_mnem[] = { /*0*/ "seto", "setno", "setc", "setnc", /*4*/ "setz", "setnz", "setna", "seta", /*8*/ "sets", "setns", "setpe", "setpo", @@ -133,7 +133,7 @@ }; -static const char* conditional_move_mnem[] = { +static const char* const conditional_move_mnem[] = { /*0*/ "cmovo", "cmovno", "cmovc", "cmovnc", /*4*/ "cmovz", "cmovnz", "cmovna", "cmova", /*8*/ "cmovs", "cmovns", "cmovpe", "cmovpo", @@ -169,7 +169,7 @@ InstructionDesc instructions_[256]; void Clear(); void Init(); - void CopyTable(ByteMnemonic bm[], InstructionType type); + void CopyTable(const ByteMnemonic bm[], InstructionType type); void SetTableRange(InstructionType type, byte start, byte end, @@ -208,7 +208,8 @@ } -void InstructionTable::CopyTable(ByteMnemonic bm[], InstructionType type) { +void InstructionTable::CopyTable(const ByteMnemonic bm[], + InstructionType type) { for (int i = 0; bm[i].b >= 0; i++) { InstructionDesc* id = &instructions_[bm[i].b]; id->mnem = bm[i].mnem; @@ -1140,7 +1141,17 @@ } } else if (*data == 0x3A) { data++; - if (*data == 0x16) { + if (*data == 0x0B) { + data++; + int mod, regop, rm; + get_modrm(*data, &mod, ®op, &rm); + int8_t imm8 = static_cast(data[1]); + AppendToBuffer("roundsd %s,%s,%d", + NameOfXMMRegister(regop), + NameOfXMMRegister(rm), + static_cast(imm8)); + data += 2; + } else if (*data == 0x16) { data++; int mod, regop, rm; get_modrm(*data, &mod, ®op, &rm); diff -Nru libv8-3.4.14.21/src/ia32/frames-ia32.h libv8-3.5.10.24/src/ia32/frames-ia32.h --- libv8-3.4.14.21/src/ia32/frames-ia32.h 2011-05-16 12:14:13.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/frames-ia32.h 2011-08-15 13:01:23.000000000 +0000 @@ -1,4 +1,4 @@ -// Copyright 2006-2008 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -58,10 +58,11 @@ class StackHandlerConstants : public AllStatic { public: - static const int kNextOffset = 0 * kPointerSize; - static const int kFPOffset = 1 * kPointerSize; - static const int kStateOffset = 2 * kPointerSize; - static const int kPCOffset = 3 * kPointerSize; + static const int kNextOffset = 0 * kPointerSize; + static const int kContextOffset = 1 * kPointerSize; + static const int kFPOffset = 2 * kPointerSize; + static const int kStateOffset = 3 * kPointerSize; + static const int kPCOffset = 4 * kPointerSize; static const int kSize = kPCOffset + kPointerSize; }; diff -Nru libv8-3.4.14.21/src/ia32/full-codegen-ia32.cc libv8-3.5.10.24/src/ia32/full-codegen-ia32.cc --- libv8-3.4.14.21/src/ia32/full-codegen-ia32.cc 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/full-codegen-ia32.cc 2011-10-18 08:10:33.000000000 +0000 @@ -46,7 +46,6 @@ static unsigned GetPropertyId(Property* property) { - if (property->is_synthetic()) return AstNode::kNoNumber; return property->id(); } @@ -166,6 +165,11 @@ } } + set_stack_height(2 + scope()->num_stack_slots()); + if (FLAG_verify_stack_height) { + verify_stack_height(); + } + bool function_in_register = true; // Possibly allocate a local context. @@ -358,6 +362,15 @@ } +void FullCodeGenerator::verify_stack_height() { + ASSERT(FLAG_verify_stack_height); + __ sub(Operand(ebp), Immediate(kPointerSize * stack_height())); + __ cmp(ebp, Operand(esp)); + __ Assert(equal, "Full codegen stack height not as expected."); + __ add(Operand(ebp), Immediate(kPointerSize * stack_height())); +} + + void FullCodeGenerator::EffectContext::Plug(Slot* slot) const { } @@ -372,6 +385,7 @@ MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register()); // Memory operands can be pushed directly. __ push(slot_operand); + codegen()->increment_stack_height(); } @@ -425,6 +439,7 @@ } else { __ push(Immediate(lit)); } + codegen()->increment_stack_height(); } @@ -462,6 +477,7 @@ Register reg) const { ASSERT(count > 0); __ Drop(count); + codegen()->decrement_stack_height(count); } @@ -471,6 +487,7 @@ ASSERT(count > 0); __ Drop(count); __ Move(result_register(), reg); + codegen()->decrement_stack_height(count); } @@ -479,6 +496,7 @@ ASSERT(count > 0); if (count > 1) __ Drop(count - 1); __ mov(Operand(esp, 0), reg); + codegen()->decrement_stack_height(count - 1); } @@ -490,6 +508,7 @@ __ Move(result_register(), reg); codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); codegen()->DoTest(this); + codegen()->decrement_stack_height(count); } @@ -523,6 +542,7 @@ __ bind(materialize_false); __ push(Immediate(isolate()->factory()->false_value())); __ bind(&done); + codegen()->increment_stack_height(); } @@ -550,6 +570,7 @@ ? isolate()->factory()->true_value() : isolate()->factory()->false_value(); __ push(Immediate(value)); + codegen()->increment_stack_height(); } @@ -572,7 +593,7 @@ Label* fall_through) { ToBooleanStub stub(result_register()); __ push(result_register()); - __ CallStub(&stub); + __ CallStub(&stub, condition->test_id()); __ test(result_register(), Operand(result_register())); // The stub returns nonzero for true. Split(not_zero, if_true, if_false, fall_through); @@ -668,97 +689,73 @@ Comment cmnt(masm_, "[ Declaration"); ASSERT(variable != NULL); // Must have been resolved. Slot* slot = variable->AsSlot(); - Property* prop = variable->AsProperty(); - - if (slot != NULL) { - switch (slot->type()) { - case Slot::PARAMETER: - case Slot::LOCAL: - if (mode == Variable::CONST) { - __ mov(Operand(ebp, SlotOffset(slot)), - Immediate(isolate()->factory()->the_hole_value())); - } else if (function != NULL) { - VisitForAccumulatorValue(function); - __ mov(Operand(ebp, SlotOffset(slot)), result_register()); - } - break; - - case Slot::CONTEXT: - // We bypass the general EmitSlotSearch because we know more about - // this specific context. - - // The variable in the decl always resides in the current function - // context. - ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); - if (FLAG_debug_code) { - // Check that we're not inside a with or catch context. - __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset)); - __ cmp(ebx, isolate()->factory()->with_context_map()); - __ Check(not_equal, "Declaration in with context."); - __ cmp(ebx, isolate()->factory()->catch_context_map()); - __ Check(not_equal, "Declaration in catch context."); - } - if (mode == Variable::CONST) { - __ mov(ContextOperand(esi, slot->index()), - Immediate(isolate()->factory()->the_hole_value())); - // No write barrier since the hole value is in old space. - } else if (function != NULL) { - VisitForAccumulatorValue(function); - __ mov(ContextOperand(esi, slot->index()), result_register()); - int offset = Context::SlotOffset(slot->index()); - __ mov(ebx, esi); - __ RecordWrite(ebx, offset, result_register(), ecx); - } - break; - - case Slot::LOOKUP: { - __ push(esi); - __ push(Immediate(variable->name())); - // Declaration nodes are always introduced in one of two modes. - ASSERT(mode == Variable::VAR || mode == Variable::CONST); - PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY; - __ push(Immediate(Smi::FromInt(attr))); - // Push initial value, if any. - // Note: For variables we must not push an initial value (such as - // 'undefined') because we may have a (legal) redeclaration and we - // must not destroy the current value. - if (mode == Variable::CONST) { - __ push(Immediate(isolate()->factory()->the_hole_value())); - } else if (function != NULL) { - VisitForStackValue(function); - } else { - __ push(Immediate(Smi::FromInt(0))); // No initial value! - } - __ CallRuntime(Runtime::kDeclareContextSlot, 4); - break; + ASSERT(slot != NULL); + switch (slot->type()) { + case Slot::PARAMETER: + case Slot::LOCAL: + if (function != NULL) { + VisitForAccumulatorValue(function); + __ mov(Operand(ebp, SlotOffset(slot)), result_register()); + } else if (mode == Variable::CONST || mode == Variable::LET) { + __ mov(Operand(ebp, SlotOffset(slot)), + Immediate(isolate()->factory()->the_hole_value())); } - } + break; - } else if (prop != NULL) { - // A const declaration aliasing a parameter is an illegal redeclaration. - ASSERT(mode != Variable::CONST); - if (function != NULL) { - // We are declaring a function that rewrites to a property. - // Use (keyed) IC to set the initial value. We cannot visit the - // rewrite because it's shared and we risk recording duplicate AST - // IDs for bailouts from optimized code. - ASSERT(prop->obj()->AsVariableProxy() != NULL); - { AccumulatorValueContext for_object(this); - EmitVariableLoad(prop->obj()->AsVariableProxy()); + case Slot::CONTEXT: + // We bypass the general EmitSlotSearch because we know more about + // this specific context. + + // The variable in the decl always resides in the current function + // context. + ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); + if (FLAG_debug_code) { + // Check that we're not inside a with or catch context. + __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset)); + __ cmp(ebx, isolate()->factory()->with_context_map()); + __ Check(not_equal, "Declaration in with context."); + __ cmp(ebx, isolate()->factory()->catch_context_map()); + __ Check(not_equal, "Declaration in catch context."); } + if (function != NULL) { + VisitForAccumulatorValue(function); + __ mov(ContextOperand(esi, slot->index()), result_register()); + int offset = Context::SlotOffset(slot->index()); + __ mov(ebx, esi); + __ RecordWrite(ebx, offset, result_register(), ecx); + } else if (mode == Variable::CONST || mode == Variable::LET) { + __ mov(ContextOperand(esi, slot->index()), + Immediate(isolate()->factory()->the_hole_value())); + // No write barrier since the hole value is in old space. + } + break; - __ push(eax); - VisitForAccumulatorValue(function); - __ pop(edx); - - ASSERT(prop->key()->AsLiteral() != NULL && - prop->key()->AsLiteral()->handle()->IsSmi()); - __ SafeSet(ecx, Immediate(prop->key()->AsLiteral()->handle())); - - Handle ic = is_strict_mode() - ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() - : isolate()->builtins()->KeyedStoreIC_Initialize(); - __ call(ic); + case Slot::LOOKUP: { + __ push(esi); + __ push(Immediate(variable->name())); + // Declaration nodes are always introduced in one of two modes. + ASSERT(mode == Variable::VAR || + mode == Variable::CONST || + mode == Variable::LET); + PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE; + __ push(Immediate(Smi::FromInt(attr))); + // Push initial value, if any. + // Note: For variables we must not push an initial value (such as + // 'undefined') because we may have a (legal) redeclaration and we + // must not destroy the current value. + increment_stack_height(3); + if (function != NULL) { + VisitForStackValue(function); + } else if (mode == Variable::CONST || mode == Variable::LET) { + __ push(Immediate(isolate()->factory()->the_hole_value())); + increment_stack_height(); + } else { + __ push(Immediate(Smi::FromInt(0))); // No initial value! + increment_stack_height(); + } + __ CallRuntime(Runtime::kDeclareContextSlot, 4); + decrement_stack_height(4); + break; } } } @@ -785,6 +782,7 @@ Breakable nested_statement(this, stmt); SetStatementPosition(stmt); + int switch_clause_stack_height = stack_height(); // Keep the switch value on the stack until a case matches. VisitForStackValue(stmt->tag()); PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); @@ -844,11 +842,12 @@ __ bind(&next_test); __ Drop(1); // Switch value is no longer needed. if (default_clause == NULL) { - __ jmp(nested_statement.break_target()); + __ jmp(nested_statement.break_label()); } else { __ jmp(default_clause->body_target()); } + set_stack_height(switch_clause_stack_height); // Compile all the case bodies. for (int i = 0; i < clauses->length(); i++) { Comment cmnt(masm_, "[ Case body"); @@ -858,7 +857,7 @@ VisitStatements(clause->statements()); } - __ bind(nested_statement.break_target()); + __ bind(nested_statement.break_label()); PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); } @@ -890,6 +889,7 @@ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); __ bind(&done_convert); __ push(eax); + increment_stack_height(); // Check cache validity in generated code. This is a fast case for // the JSObject::IsSimpleEnum cache validity checks. If we cannot @@ -973,11 +973,13 @@ __ push(eax); // Fixed array length (as smi). __ push(Immediate(Smi::FromInt(0))); // Initial index. + // 1 ~ The object has already been pushed. + increment_stack_height(ForIn::kElementCount - 1); // Generate code for doing the condition check. __ bind(&loop); __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index. __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length. - __ j(above_equal, loop_statement.break_target()); + __ j(above_equal, loop_statement.break_label()); // Get the current entry of the array into register ebx. __ mov(ebx, Operand(esp, 2 * kPointerSize)); @@ -1001,7 +1003,7 @@ __ push(ebx); // Current entry. __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); __ test(eax, Operand(eax)); - __ j(equal, loop_statement.continue_target()); + __ j(equal, loop_statement.continue_label()); __ mov(ebx, Operand(eax)); // Update the 'each' property or variable from the possibly filtered @@ -1018,16 +1020,17 @@ // Generate code for going to the next element by incrementing the // index (smi) stored on top of the stack. - __ bind(loop_statement.continue_target()); + __ bind(loop_statement.continue_label()); __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1))); EmitStackCheck(stmt); __ jmp(&loop); // Remove the pointers stored on the stack. - __ bind(loop_statement.break_target()); + __ bind(loop_statement.break_label()); __ add(Operand(esp), Immediate(5 * kPointerSize)); + decrement_stack_height(ForIn::kElementCount); // Exit and decrement the loop depth. __ bind(&exit); decrement_loop_depth(); @@ -1265,6 +1268,18 @@ __ mov(eax, isolate()->factory()->undefined_value()); __ bind(&done); context()->Plug(eax); + } else if (var->mode() == Variable::LET) { + // Let bindings may be the hole value if they have not been initialized. + // Throw a type error in this case. + Label done; + MemOperand slot_operand = EmitSlotSearch(slot, eax); + __ mov(eax, slot_operand); + __ cmp(eax, isolate()->factory()->the_hole_value()); + __ j(not_equal, &done, Label::kNear); + __ push(Immediate(var->name())); + __ CallRuntime(Runtime::kThrowReferenceError, 1); + __ bind(&done); + context()->Plug(eax); } else { context()->Plug(slot); } @@ -1363,6 +1378,7 @@ if (!result_saved) { __ push(eax); // Save result on the stack result_saved = true; + increment_stack_height(); } switch (property->kind()) { case ObjectLiteral::Property::MATERIALIZED_LITERAL: @@ -1387,6 +1403,7 @@ // Fall through. case ObjectLiteral::Property::PROTOTYPE: __ push(Operand(esp, 0)); // Duplicate receiver. + increment_stack_height(); VisitForStackValue(key); VisitForStackValue(value); if (property->emit_store()) { @@ -1395,16 +1412,20 @@ } else { __ Drop(3); } + decrement_stack_height(3); break; case ObjectLiteral::Property::SETTER: case ObjectLiteral::Property::GETTER: __ push(Operand(esp, 0)); // Duplicate receiver. + increment_stack_height(); VisitForStackValue(key); __ push(Immediate(property->kind() == ObjectLiteral::Property::SETTER ? Smi::FromInt(1) : Smi::FromInt(0))); + increment_stack_height(); VisitForStackValue(value); __ CallRuntime(Runtime::kDefineAccessor, 4); + decrement_stack_height(4); break; default: UNREACHABLE(); } @@ -1467,6 +1488,7 @@ if (!result_saved) { __ push(eax); result_saved = true; + increment_stack_height(); } VisitForAccumulatorValue(subexpr); @@ -1495,7 +1517,9 @@ // Invalid left-hand sides are rewritten to have a 'throw ReferenceError' // on the left-hand side. if (!expr->target()->IsValidLeftHandSide()) { - VisitForEffect(expr->target()); + ASSERT(expr->target()->AsThrow() != NULL); + VisitInCurrentContext(expr->target()); // Throw does not plug the context + context()->Plug(eax); return; } @@ -1520,6 +1544,7 @@ // We need the receiver both on the stack and in the accumulator. VisitForAccumulatorValue(property->obj()); __ push(result_register()); + increment_stack_height(); } else { VisitForStackValue(property->obj()); } @@ -1530,6 +1555,7 @@ VisitForAccumulatorValue(property->key()); __ mov(edx, Operand(esp, 0)); __ push(eax); + increment_stack_height(); } else { VisitForStackValue(property->obj()); VisitForStackValue(property->key()); @@ -1541,7 +1567,8 @@ // For compound assignments we need another deoptimization point after the // variable/property load. if (expr->is_compound()) { - { AccumulatorValueContext context(this); + AccumulatorValueContext result_context(this); + { AccumulatorValueContext left_operand_context(this); switch (assign_type) { case VARIABLE: EmitVariableLoad(expr->target()->AsVariableProxy()); @@ -1560,13 +1587,13 @@ Token::Value op = expr->binary_op(); __ push(eax); // Left operand goes on the stack. + increment_stack_height(); VisitForAccumulatorValue(expr->value()); OverwriteMode mode = expr->value()->ResultOverwriteAllowed() ? OVERWRITE_RIGHT : NO_OVERWRITE; SetSourcePosition(expr->position() + 1); - AccumulatorValueContext context(this); if (ShouldInlineSmiCase(op)) { EmitInlineSmiBinaryOp(expr->binary_operation(), op, @@ -1630,6 +1657,7 @@ // stack. Right operand is in eax. Label smi_case, done, stub_call; __ pop(edx); + decrement_stack_height(); __ mov(ecx, eax); __ or_(eax, Operand(edx)); JumpPatchSite patch_site(masm_); @@ -1721,6 +1749,7 @@ Token::Value op, OverwriteMode mode) { __ pop(edx); + decrement_stack_height(); BinaryOpStub stub(op, mode); JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); @@ -1733,7 +1762,9 @@ // Invalid left-hand sides are rewritten to have a 'throw // ReferenceError' on the left-hand side. if (!expr->IsValidLeftHandSide()) { - VisitForEffect(expr); + ASSERT(expr->AsThrow() != NULL); + VisitInCurrentContext(expr); // Throw does not plug the context + context()->Plug(eax); return; } @@ -1757,9 +1788,11 @@ } case NAMED_PROPERTY: { __ push(eax); // Preserve value. + increment_stack_height(); VisitForAccumulatorValue(prop->obj()); __ mov(edx, eax); __ pop(eax); // Restore value. + decrement_stack_height(); __ mov(ecx, prop->key()->AsLiteral()->handle()); Handle ic = is_strict_mode() ? isolate()->builtins()->StoreIC_Initialize_Strict() @@ -1769,21 +1802,14 @@ } case KEYED_PROPERTY: { __ push(eax); // Preserve value. - if (prop->is_synthetic()) { - ASSERT(prop->obj()->AsVariableProxy() != NULL); - ASSERT(prop->key()->AsLiteral() != NULL); - { AccumulatorValueContext for_object(this); - EmitVariableLoad(prop->obj()->AsVariableProxy()); - } - __ mov(edx, eax); - __ SafeSet(ecx, Immediate(prop->key()->AsLiteral()->handle())); - } else { - VisitForStackValue(prop->obj()); - VisitForAccumulatorValue(prop->key()); - __ mov(ecx, eax); - __ pop(edx); - } + increment_stack_height(); + VisitForStackValue(prop->obj()); + VisitForAccumulatorValue(prop->key()); + __ mov(ecx, eax); + __ pop(edx); + decrement_stack_height(); __ pop(eax); // Restore value. + decrement_stack_height(); Handle ic = is_strict_mode() ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() : isolate()->builtins()->KeyedStoreIC_Initialize(); @@ -1841,6 +1867,57 @@ } __ bind(&skip); + } else if (var->mode() == Variable::LET && op != Token::INIT_LET) { + // Perform the assignment for non-const variables. Const assignments + // are simply skipped. + Slot* slot = var->AsSlot(); + switch (slot->type()) { + case Slot::PARAMETER: + case Slot::LOCAL: { + Label assign; + // Check for an initialized let binding. + __ mov(edx, Operand(ebp, SlotOffset(slot))); + __ cmp(edx, isolate()->factory()->the_hole_value()); + __ j(not_equal, &assign); + __ push(Immediate(var->name())); + __ CallRuntime(Runtime::kThrowReferenceError, 1); + // Perform the assignment. + __ bind(&assign); + __ mov(Operand(ebp, SlotOffset(slot)), eax); + break; + } + + case Slot::CONTEXT: { + // Let variables may be the hole value if they have not been + // initialized. Throw a type error in this case. + Label assign; + MemOperand target = EmitSlotSearch(slot, ecx); + // Check for an initialized let binding. + __ mov(edx, target); + __ cmp(edx, isolate()->factory()->the_hole_value()); + __ j(not_equal, &assign, Label::kNear); + __ push(Immediate(var->name())); + __ CallRuntime(Runtime::kThrowReferenceError, 1); + // Perform the assignment. + __ bind(&assign); + __ mov(target, eax); + // The value of the assignment is in eax. RecordWrite clobbers its + // register arguments. + __ mov(edx, eax); + int offset = Context::SlotOffset(slot->index()); + __ RecordWrite(ecx, offset, edx, ebx); + break; + } + + case Slot::LOOKUP: + // Call the runtime for the assignment. + __ push(eax); // Value. + __ push(esi); // Context. + __ push(Immediate(var->name())); + __ push(Immediate(Smi::FromInt(strict_mode_flag()))); + __ CallRuntime(Runtime::kStoreContextSlot, 4); + break; + } } else if (var->mode() != Variable::CONST) { // Perform the assignment for non-const variables. Const assignments // are simply skipped. @@ -1900,6 +1977,7 @@ __ mov(edx, Operand(esp, 0)); } else { __ pop(edx); + decrement_stack_height(); } Handle ic = is_strict_mode() ? isolate()->builtins()->StoreIC_Initialize_Strict() @@ -1913,6 +1991,7 @@ __ CallRuntime(Runtime::kToFastProperties, 1); __ pop(eax); __ Drop(1); + decrement_stack_height(); } PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); context()->Plug(eax); @@ -1934,10 +2013,12 @@ } __ pop(ecx); + decrement_stack_height(); if (expr->ends_initialization_block()) { __ mov(edx, Operand(esp, 0)); // Leave receiver on the stack for later. } else { __ pop(edx); + decrement_stack_height(); } // Record source code position before IC call. SetSourcePosition(expr->position()); @@ -1953,6 +2034,7 @@ __ push(edx); __ CallRuntime(Runtime::kToFastProperties, 1); __ pop(eax); + decrement_stack_height(); } PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); @@ -1972,6 +2054,7 @@ VisitForStackValue(expr->obj()); VisitForAccumulatorValue(expr->key()); __ pop(edx); + decrement_stack_height(); EmitKeyedPropertyLoad(expr); context()->Plug(eax); } @@ -1999,6 +2082,7 @@ RecordJSReturnSite(expr); // Restore context register. __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); + decrement_stack_height(arg_count + 1); context()->Plug(eax); } @@ -2013,6 +2097,7 @@ __ pop(ecx); __ push(eax); __ push(ecx); + increment_stack_height(); // Load the arguments. ZoneList* args = expr->arguments(); @@ -2032,6 +2117,7 @@ RecordJSReturnSite(expr); // Restore context register. __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); + decrement_stack_height(arg_count + 1); context()->DropAndPlug(1, eax); // Drop the key still on the stack. } @@ -2053,6 +2139,8 @@ RecordJSReturnSite(expr); // Restore context register. __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); + + decrement_stack_height(arg_count + 1); context()->DropAndPlug(1, eax); } @@ -2100,7 +2188,7 @@ VisitForStackValue(fun); // Reserved receiver slot. __ push(Immediate(isolate()->factory()->undefined_value())); - + increment_stack_height(); // Push the arguments. for (int i = 0; i < arg_count; i++) { VisitForStackValue(args->at(i)); @@ -2144,10 +2232,12 @@ RecordJSReturnSite(expr); // Restore context register. __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); + decrement_stack_height(arg_count + 1); // Function is left on the stack. context()->DropAndPlug(1, eax); } else if (var != NULL && !var->is_this() && var->is_global()) { // Push global object as receiver for the call IC. __ push(GlobalObjectOperand()); + increment_stack_height(); EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); } else if (var != NULL && var->AsSlot() != NULL && var->AsSlot()->type() == Slot::LOOKUP) { @@ -2170,7 +2260,9 @@ __ push(Immediate(var->name())); __ CallRuntime(Runtime::kLoadContextSlot, 2); __ push(eax); // Function. + increment_stack_height(); __ push(edx); // Receiver. + increment_stack_height(); // If fast case code has been generated, emit code to push the // function and receiver and have the slow path jump around this @@ -2179,7 +2271,7 @@ Label call; __ jmp(&call); __ bind(&done); - // Push function. + // Push function. Stack height already incremented in slow case above. __ push(eax); // The receiver is implicitly the global receiver. Indicate this // by passing the hole to the call function stub. @@ -2203,38 +2295,10 @@ EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); } else { // Call to a keyed property. - // For a synthetic property use keyed load IC followed by function call, - // for a regular property use EmitKeyedCallWithIC. - if (prop->is_synthetic()) { - // Do not visit the object and key subexpressions (they are shared - // by all occurrences of the same rewritten parameter). - ASSERT(prop->obj()->AsVariableProxy() != NULL); - ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL); - Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot(); - MemOperand operand = EmitSlotSearch(slot, edx); - __ mov(edx, operand); - - ASSERT(prop->key()->AsLiteral() != NULL); - ASSERT(prop->key()->AsLiteral()->handle()->IsSmi()); - __ mov(eax, prop->key()->AsLiteral()->handle()); - - // Record source code position for IC call. - SetSourcePosition(prop->position()); - - Handle ic = isolate()->builtins()->KeyedLoadIC_Initialize(); - __ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop)); - // Push result (function). - __ push(eax); - // Push Global receiver. - __ mov(ecx, GlobalObjectOperand()); - __ push(FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset)); - EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); - } else { - { PreservePositionScope scope(masm()->positions_recorder()); - VisitForStackValue(prop->obj()); - } - EmitKeyedCallWithIC(expr, prop->key()); + { PreservePositionScope scope(masm()->positions_recorder()); + VisitForStackValue(prop->obj()); } + EmitKeyedCallWithIC(expr, prop->key()); } } else { { PreservePositionScope scope(masm()->positions_recorder()); @@ -2243,6 +2307,7 @@ // Load global receiver object. __ mov(ebx, GlobalObjectOperand()); __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset)); + increment_stack_height(); // Emit function call. EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); } @@ -2283,6 +2348,8 @@ Handle construct_builtin = isolate()->builtins()->JSConstructCall(); __ call(construct_builtin, RelocInfo::CONSTRUCT_CALL); + + decrement_stack_height(arg_count + 1); context()->Plug(eax); } @@ -2595,6 +2662,7 @@ &if_true, &if_false, &fall_through); __ pop(ebx); + decrement_stack_height(); __ cmp(eax, Operand(ebx)); PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); Split(equal, if_true, if_false, fall_through); @@ -2709,6 +2777,7 @@ VisitForStackValue(args->at(1)); VisitForStackValue(args->at(2)); __ CallRuntime(Runtime::kLog, 2); + decrement_stack_height(2); } // Finally, we're expected to leave a value on the top of the stack. __ mov(eax, isolate()->factory()->undefined_value()); @@ -2774,6 +2843,7 @@ VisitForStackValue(args->at(1)); VisitForStackValue(args->at(2)); __ CallStub(&stub); + decrement_stack_height(3); context()->Plug(eax); } @@ -2787,6 +2857,7 @@ VisitForStackValue(args->at(2)); VisitForStackValue(args->at(3)); __ CallStub(&stub); + decrement_stack_height(4); context()->Plug(eax); } @@ -2821,6 +2892,7 @@ } else { __ CallRuntime(Runtime::kMath_pow, 2); } + decrement_stack_height(2); context()->Plug(eax); } @@ -2831,6 +2903,7 @@ VisitForStackValue(args->at(0)); // Load the object. VisitForAccumulatorValue(args->at(1)); // Load the value. __ pop(ebx); // eax = value. ebx = object. + decrement_stack_height(); Label done; // If the object is a smi, return the value. @@ -2860,6 +2933,7 @@ NumberToStringStub stub; __ CallStub(&stub); + decrement_stack_height(); context()->Plug(eax); } @@ -2894,6 +2968,7 @@ Register result = edx; __ pop(object); + decrement_stack_height(); Label need_conversion; Label index_out_of_range; @@ -2942,6 +3017,7 @@ Register result = eax; __ pop(object); + decrement_stack_height(); Label need_conversion; Label index_out_of_range; @@ -2986,6 +3062,7 @@ StringAddStub stub(NO_STRING_ADD_FLAGS); __ CallStub(&stub); + decrement_stack_height(2); context()->Plug(eax); } @@ -2998,6 +3075,7 @@ StringCompareStub stub; __ CallStub(&stub); + decrement_stack_height(2); context()->Plug(eax); } @@ -3009,6 +3087,7 @@ ASSERT(args->length() == 1); VisitForStackValue(args->at(0)); __ CallStub(&stub); + decrement_stack_height(); context()->Plug(eax); } @@ -3020,6 +3099,7 @@ ASSERT(args->length() == 1); VisitForStackValue(args->at(0)); __ CallStub(&stub); + decrement_stack_height(); context()->Plug(eax); } @@ -3031,6 +3111,7 @@ ASSERT(args->length() == 1); VisitForStackValue(args->at(0)); __ CallStub(&stub); + decrement_stack_height(); context()->Plug(eax); } @@ -3040,6 +3121,7 @@ ASSERT(args->length() == 1); VisitForStackValue(args->at(0)); __ CallRuntime(Runtime::kMath_sqrt, 1); + decrement_stack_height(); context()->Plug(eax); } @@ -3059,6 +3141,7 @@ __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); + decrement_stack_height(arg_count + 1); context()->Plug(eax); } @@ -3071,6 +3154,7 @@ VisitForStackValue(args->at(1)); VisitForStackValue(args->at(2)); __ CallStub(&stub); + decrement_stack_height(3); context()->Plug(eax); } @@ -3144,6 +3228,7 @@ __ CallRuntime(Runtime::kSwapElements, 3); __ bind(&done); + decrement_stack_height(3); context()->Plug(eax); } @@ -3177,7 +3262,7 @@ Label done, not_found; // tmp now holds finger offset as a smi. - ASSERT(kSmiTag == 0 && kSmiTagSize == 1); + STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset)); __ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp)); __ j(not_equal, ¬_found); @@ -3229,6 +3314,7 @@ __ mov(eax, Immediate(isolate()->factory()->true_value())); __ bind(&done); + decrement_stack_height(); context()->Plug(eax); } @@ -3532,43 +3618,11 @@ __ add(Operand(esp), Immediate(3 * kPointerSize)); __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); + decrement_stack_height(); context()->Plug(eax); } -void FullCodeGenerator::EmitIsNativeOrStrictMode(ZoneList* args) { - ASSERT(args->length() == 1); - - // Load the function into eax. - VisitForAccumulatorValue(args->at(0)); - - // Prepare for the test. - Label materialize_true, materialize_false; - Label* if_true = NULL; - Label* if_false = NULL; - Label* fall_through = NULL; - context()->PrepareTest(&materialize_true, &materialize_false, - &if_true, &if_false, &fall_through); - - // Test for strict mode function. - __ mov(ecx, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset)); - __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset), - 1 << SharedFunctionInfo::kStrictModeBitWithinByte); - __ j(not_equal, if_true); - - // Test for native function. - __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset), - 1 << SharedFunctionInfo::kNativeBitWithinByte); - __ j(not_equal, if_true); - - // Not native or strict-mode function. - __ jmp(if_false); - - PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); - context()->Plug(if_true, if_false); -} - - void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { Handle name = expr->name(); if (name->length() > 0 && name->Get(0) == '_') { @@ -3584,6 +3638,7 @@ // Prepare for calling JS runtime function. __ mov(eax, GlobalObjectOperand()); __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset)); + increment_stack_height(); } // Push the arguments ("left-to-right"). @@ -3606,6 +3661,11 @@ // Call the C runtime function. __ CallRuntime(expr->function(), arg_count); } + decrement_stack_height(arg_count); + if (expr->is_jsruntime()) { + decrement_stack_height(); + } + context()->Plug(eax); } @@ -3618,17 +3678,12 @@ Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); if (prop != NULL) { - if (prop->is_synthetic()) { - // Result of deleting parameters is false, even when they rewrite - // to accesses on the arguments object. - context()->Plug(false); - } else { - VisitForStackValue(prop->obj()); - VisitForStackValue(prop->key()); - __ push(Immediate(Smi::FromInt(strict_mode_flag()))); - __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); - context()->Plug(eax); - } + VisitForStackValue(prop->obj()); + VisitForStackValue(prop->key()); + __ push(Immediate(Smi::FromInt(strict_mode_flag()))); + __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); + decrement_stack_height(2); + context()->Plug(eax); } else if (var != NULL) { // Delete of an unqualified identifier is disallowed in strict mode // but "delete this" is. @@ -3696,6 +3751,7 @@ VisitForTypeofValue(expr->expression()); } __ CallRuntime(Runtime::kTypeof, 1); + decrement_stack_height(); context()->Plug(eax); break; } @@ -3728,7 +3784,6 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr, const char* comment) { - // TODO(svenpanne): Allowing format strings in Comment would be nice here... Comment cmt(masm_, comment); bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); UnaryOverwriteMode overwrite = @@ -3750,7 +3805,10 @@ // Invalid left-hand sides are rewritten to have a 'throw ReferenceError' // as the left-hand side. if (!expr->expression()->IsValidLeftHandSide()) { - VisitForEffect(expr->expression()); + ASSERT(expr->expression()->AsThrow() != NULL); + VisitInCurrentContext(expr->expression()); + // Visiting Throw does not plug the context. + context()->Plug(eax); return; } @@ -3775,17 +3833,20 @@ // Reserve space for result of postfix operation. if (expr->is_postfix() && !context()->IsEffect()) { __ push(Immediate(Smi::FromInt(0))); + increment_stack_height(); } if (assign_type == NAMED_PROPERTY) { // Put the object both on the stack and in the accumulator. VisitForAccumulatorValue(prop->obj()); __ push(eax); + increment_stack_height(); EmitNamedPropertyLoad(prop); } else { VisitForStackValue(prop->obj()); VisitForAccumulatorValue(prop->key()); __ mov(edx, Operand(esp, 0)); __ push(eax); + increment_stack_height(); EmitKeyedPropertyLoad(prop); } } @@ -3816,6 +3877,7 @@ switch (assign_type) { case VARIABLE: __ push(eax); + increment_stack_height(); break; case NAMED_PROPERTY: __ mov(Operand(esp, kPointerSize), eax); @@ -3889,6 +3951,7 @@ case NAMED_PROPERTY: { __ mov(ecx, prop->key()->AsLiteral()->handle()); __ pop(edx); + decrement_stack_height(); Handle ic = is_strict_mode() ? isolate()->builtins()->StoreIC_Initialize_Strict() : isolate()->builtins()->StoreIC_Initialize(); @@ -3906,6 +3969,8 @@ case KEYED_PROPERTY: { __ pop(ecx); __ pop(edx); + decrement_stack_height(); + decrement_stack_height(); Handle ic = is_strict_mode() ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() : isolate()->builtins()->KeyedStoreIC_Initialize(); @@ -3993,6 +4058,10 @@ __ j(equal, if_true); __ cmp(eax, isolate()->factory()->false_value()); Split(equal, if_true, if_false, fall_through); + } else if (FLAG_harmony_typeof && + check->Equals(isolate()->heap()->null_symbol())) { + __ cmp(eax, isolate()->factory()->null_value()); + Split(equal, if_true, if_false, fall_through); } else if (check->Equals(isolate()->heap()->undefined_symbol())) { __ cmp(eax, isolate()->factory()->undefined_value()); __ j(equal, if_true); @@ -4008,8 +4077,10 @@ Split(above_equal, if_true, if_false, fall_through); } else if (check->Equals(isolate()->heap()->object_symbol())) { __ JumpIfSmi(eax, if_false); - __ cmp(eax, isolate()->factory()->null_value()); - __ j(equal, if_true); + if (!FLAG_harmony_typeof) { + __ cmp(eax, isolate()->factory()->null_value()); + __ j(equal, if_true); + } __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx); __ j(below, if_false); __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); @@ -4063,6 +4134,7 @@ case Token::IN: VisitForStackValue(expr->right()); __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); + decrement_stack_height(2); PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); __ cmp(eax, isolate()->factory()->true_value()); Split(equal, if_true, if_false, fall_through); @@ -4072,6 +4144,7 @@ VisitForStackValue(expr->right()); InstanceofStub stub(InstanceofStub::kNoFlags); __ CallStub(&stub); + decrement_stack_height(2); PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); __ test(eax, Operand(eax)); // The stub returns 0 for true. @@ -4082,11 +4155,8 @@ default: { VisitForAccumulatorValue(expr->right()); Condition cc = no_condition; - bool strict = false; switch (op) { case Token::EQ_STRICT: - strict = true; - // Fall through case Token::EQ: cc = equal; __ pop(edx); @@ -4116,6 +4186,7 @@ default: UNREACHABLE(); } + decrement_stack_height(); bool inline_smi_code = ShouldInlineSmiCase(op); JumpPatchSite patch_site(masm_); @@ -4231,8 +4302,8 @@ ASSERT(!result_register().is(edx)); __ pop(edx); __ sub(Operand(edx), Immediate(masm_->CodeObject())); - ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); - ASSERT_EQ(0, kSmiTag); + STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); + STATIC_ASSERT(kSmiTag == 0); __ SmiTag(edx); __ push(edx); // Store result register while executing finally block. @@ -4253,6 +4324,34 @@ #undef __ +#define __ ACCESS_MASM(masm()) + +FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( + int* stack_depth, + int* context_length) { + // The macros used here must preserve the result register. + + // Because the handler block contains the context of the finally + // code, we can restore it directly from there for the finally code + // rather than iteratively unwinding contexts via their previous + // links. + __ Drop(*stack_depth); // Down to the handler block. + if (*context_length > 0) { + // Restore the context to its dedicated register and the stack. + __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset)); + __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); + } + __ PopTryHandler(); + __ call(finally_entry_); + + *stack_depth = 0; + *context_length = 0; + return previous_; +} + + +#undef __ + } } // namespace v8::internal #endif // V8_TARGET_ARCH_IA32 diff -Nru libv8-3.4.14.21/src/ia32/ic-ia32.cc libv8-3.5.10.24/src/ia32/ic-ia32.cc --- libv8-3.4.14.21/src/ia32/ic-ia32.cc 2011-07-11 11:41:22.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/ic-ia32.cc 2011-08-31 09:03:56.000000000 +0000 @@ -324,7 +324,7 @@ __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset)); __ j(above_equal, out_of_range); // Fast case: Do the load. - ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0)); + STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0)); __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize)); __ cmp(Operand(scratch), Immediate(FACTORY->the_hole_value())); // In case the loaded value is the_hole we have to consult GetProperty @@ -358,7 +358,7 @@ __ j(zero, index_string); // Is the string a symbol? - ASSERT(kSymbolTag != 0); + STATIC_ASSERT(kSymbolTag != 0); __ test_b(FieldOperand(map, Map::kInstanceTypeOffset), kIsSymbolMask); __ j(zero, not_symbol); } diff -Nru libv8-3.4.14.21/src/ia32/lithium-codegen-ia32.cc libv8-3.5.10.24/src/ia32/lithium-codegen-ia32.cc --- libv8-3.4.14.21/src/ia32/lithium-codegen-ia32.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/lithium-codegen-ia32.cc 2011-08-31 09:03:56.000000000 +0000 @@ -1211,17 +1211,11 @@ } -void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) { +void LCodeGen::DoFixedArrayBaseLength( + LFixedArrayBaseLength* instr) { Register result = ToRegister(instr->result()); Register array = ToRegister(instr->InputAt(0)); - __ mov(result, FieldOperand(array, FixedArray::kLengthOffset)); -} - - -void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) { - Register result = ToRegister(instr->result()); - Register array = ToRegister(instr->InputAt(0)); - __ mov(result, FieldOperand(array, ExternalArray::kLengthOffset)); + __ mov(result, FieldOperand(array, FixedArrayBase::kLengthOffset)); } @@ -1393,44 +1387,98 @@ } else { ASSERT(r.IsTagged()); Register reg = ToRegister(instr->InputAt(0)); - if (instr->hydrogen()->value()->type().IsBoolean()) { + HType type = instr->hydrogen()->value()->type(); + if (type.IsBoolean()) { __ cmp(reg, factory()->true_value()); EmitBranch(true_block, false_block, equal); + } else if (type.IsSmi()) { + __ test(reg, Operand(reg)); + EmitBranch(true_block, false_block, not_equal); } else { Label* true_label = chunk_->GetAssemblyLabel(true_block); Label* false_label = chunk_->GetAssemblyLabel(false_block); - __ cmp(reg, factory()->undefined_value()); - __ j(equal, false_label); - __ cmp(reg, factory()->true_value()); - __ j(equal, true_label); - __ cmp(reg, factory()->false_value()); - __ j(equal, false_label); - __ test(reg, Operand(reg)); - __ j(equal, false_label); - __ JumpIfSmi(reg, true_label); + ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); + // Avoid deopts in the case where we've never executed this path before. + if (expected.IsEmpty()) expected = ToBooleanStub::all_types(); + + if (expected.Contains(ToBooleanStub::UNDEFINED)) { + // undefined -> false. + __ cmp(reg, factory()->undefined_value()); + __ j(equal, false_label); + } + if (expected.Contains(ToBooleanStub::BOOLEAN)) { + // true -> true. + __ cmp(reg, factory()->true_value()); + __ j(equal, true_label); + // false -> false. + __ cmp(reg, factory()->false_value()); + __ j(equal, false_label); + } + if (expected.Contains(ToBooleanStub::NULL_TYPE)) { + // 'null' -> false. + __ cmp(reg, factory()->null_value()); + __ j(equal, false_label); + } + + if (expected.Contains(ToBooleanStub::SMI)) { + // Smis: 0 -> false, all other -> true. + __ test(reg, Operand(reg)); + __ j(equal, false_label); + __ JumpIfSmi(reg, true_label); + } else if (expected.NeedsMap()) { + // If we need a map later and have a Smi -> deopt. + __ test(reg, Immediate(kSmiTagMask)); + DeoptimizeIf(zero, instr->environment()); + } + + Register map = no_reg; // Keep the compiler happy. + if (expected.NeedsMap()) { + map = ToRegister(instr->TempAt(0)); + ASSERT(!map.is(reg)); + __ mov(map, FieldOperand(reg, HeapObject::kMapOffset)); + + if (expected.CanBeUndetectable()) { + // Undetectable -> false. + __ test_b(FieldOperand(map, Map::kBitFieldOffset), + 1 << Map::kIsUndetectable); + __ j(not_zero, false_label); + } + } - // Test for double values. Zero is false. - Label call_stub; - __ cmp(FieldOperand(reg, HeapObject::kMapOffset), - factory()->heap_number_map()); - __ j(not_equal, &call_stub, Label::kNear); - __ fldz(); - __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset)); - __ FCmp(); - __ j(zero, false_label); - __ jmp(true_label); - - // The conversion stub doesn't cause garbage collections so it's - // safe to not record a safepoint after the call. - __ bind(&call_stub); - ToBooleanStub stub(eax); - __ pushad(); - __ push(reg); - __ CallStub(&stub); - __ test(eax, Operand(eax)); - __ popad(); - EmitBranch(true_block, false_block, not_zero); + if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) { + // spec object -> true. + __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); + __ j(above_equal, true_label); + } + + if (expected.Contains(ToBooleanStub::STRING)) { + // String value -> false iff empty. + Label not_string; + __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); + __ j(above_equal, ¬_string, Label::kNear); + __ cmp(FieldOperand(reg, String::kLengthOffset), Immediate(0)); + __ j(not_zero, true_label); + __ jmp(false_label); + __ bind(¬_string); + } + + if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) { + // heap number -> false iff +0, -0, or NaN. + Label not_heap_number; + __ cmp(FieldOperand(reg, HeapObject::kMapOffset), + factory()->heap_number_map()); + __ j(not_equal, ¬_heap_number, Label::kNear); + __ fldz(); + __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset)); + __ FCmp(); + __ j(zero, false_label); + __ jmp(true_label); + __ bind(¬_heap_number); + } + + // We've seen something for the first time -> deopt. + DeoptimizeIf(no_condition, instr->environment()); } } } @@ -2211,16 +2259,13 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) { - Register elements = ToRegister(instr->elements()); - Register key = ToRegister(instr->key()); Register result = ToRegister(instr->result()); - ASSERT(result.is(elements)); // Load the result. - __ mov(result, FieldOperand(elements, - key, - times_pointer_size, - FixedArray::kHeaderSize)); + __ mov(result, + BuildFastArrayOperand(instr->elements(), instr->key(), + JSObject::FAST_ELEMENTS, + FixedArray::kHeaderSize - kHeapObjectTag)); // Check for the hole value. if (instr->hydrogen()->RequiresHoleCheck()) { @@ -2232,7 +2277,6 @@ void LCodeGen::DoLoadKeyedFastDoubleElement( LLoadKeyedFastDoubleElement* instr) { - Register elements = ToRegister(instr->elements()); XMMRegister result = ToDoubleRegister(instr->result()); if (instr->hydrogen()->RequiresHoleCheck()) { @@ -2254,22 +2298,22 @@ Operand LCodeGen::BuildFastArrayOperand( - LOperand* external_pointer, + LOperand* elements_pointer, LOperand* key, JSObject::ElementsKind elements_kind, uint32_t offset) { - Register external_pointer_reg = ToRegister(external_pointer); + Register elements_pointer_reg = ToRegister(elements_pointer); int shift_size = ElementsKindToShiftSize(elements_kind); if (key->IsConstantOperand()) { int constant_value = ToInteger32(LConstantOperand::cast(key)); if (constant_value & 0xF0000000) { Abort("array index constant value too big"); } - return Operand(external_pointer_reg, + return Operand(elements_pointer_reg, constant_value * (1 << shift_size) + offset); } else { ScaleFactor scale_factor = static_cast(shift_size); - return Operand(external_pointer_reg, ToRegister(key), scale_factor, offset); + return Operand(elements_pointer_reg, ToRegister(key), scale_factor, offset); } } @@ -2666,23 +2710,53 @@ XMMRegister xmm_scratch = xmm0; Register output_reg = ToRegister(instr->result()); XMMRegister input_reg = ToDoubleRegister(instr->value()); - __ xorps(xmm_scratch, xmm_scratch); // Zero the register. - __ ucomisd(input_reg, xmm_scratch); - if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { - DeoptimizeIf(below_equal, instr->environment()); + if (CpuFeatures::IsSupported(SSE4_1)) { + CpuFeatures::Scope scope(SSE4_1); + if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { + // Deoptimize on negative zero. + Label non_zero; + __ xorps(xmm_scratch, xmm_scratch); // Zero the register. + __ ucomisd(input_reg, xmm_scratch); + __ j(not_equal, &non_zero, Label::kNear); + __ movmskpd(output_reg, input_reg); + __ test(output_reg, Immediate(1)); + DeoptimizeIf(not_zero, instr->environment()); + __ bind(&non_zero); + } + __ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown); + __ cvttsd2si(output_reg, Operand(xmm_scratch)); + // Overflow is signalled with minint. + __ cmp(output_reg, 0x80000000u); + DeoptimizeIf(equal, instr->environment()); } else { + Label done; + // Deoptimize on negative numbers. + __ xorps(xmm_scratch, xmm_scratch); // Zero the register. + __ ucomisd(input_reg, xmm_scratch); DeoptimizeIf(below, instr->environment()); - } - // Use truncating instruction (OK because input is positive). - __ cvttsd2si(output_reg, Operand(input_reg)); + if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { + // Check for negative zero. + Label positive_sign; + __ j(above, &positive_sign, Label::kNear); + __ movmskpd(output_reg, input_reg); + __ test(output_reg, Immediate(1)); + DeoptimizeIf(not_zero, instr->environment()); + __ Set(output_reg, Immediate(0)); + __ jmp(&done, Label::kNear); + __ bind(&positive_sign); + } - // Overflow is signalled with minint. - __ cmp(output_reg, 0x80000000u); - DeoptimizeIf(equal, instr->environment()); -} + // Use truncating instruction (OK because input is positive). + __ cvttsd2si(output_reg, Operand(input_reg)); + // Overflow is signalled with minint. + __ cmp(output_reg, 0x80000000u); + DeoptimizeIf(equal, instr->environment()); + __ bind(&done); + } +} void LCodeGen::DoMathRound(LUnaryMathOperation* instr) { XMMRegister xmm_scratch = xmm0; @@ -2693,13 +2767,11 @@ // xmm_scratch = 0.5 ExternalReference one_half = ExternalReference::address_of_one_half(); __ movdbl(xmm_scratch, Operand::StaticVariable(one_half)); - __ ucomisd(xmm_scratch, input_reg); __ j(above, &below_half); // input = input + 0.5 __ addsd(input_reg, xmm_scratch); - // Compute Math.floor(value + 0.5). // Use truncating instruction (OK because input is positive). __ cvttsd2si(output_reg, Operand(input_reg)); @@ -3018,8 +3090,14 @@ void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { - __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); - DeoptimizeIf(above_equal, instr->environment()); + if (instr->index()->IsConstantOperand()) { + __ cmp(ToOperand(instr->length()), + ToImmediate(LConstantOperand::cast(instr->index()))); + DeoptimizeIf(below_equal, instr->environment()); + } else { + __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); + DeoptimizeIf(above_equal, instr->environment()); + } } @@ -3097,7 +3175,6 @@ void LCodeGen::DoStoreKeyedFastDoubleElement( LStoreKeyedFastDoubleElement* instr) { XMMRegister value = ToDoubleRegister(instr->value()); - Register elements = ToRegister(instr->elements()); Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; Label have_value; @@ -3140,95 +3217,79 @@ }; Register string = ToRegister(instr->string()); - Register index = no_reg; - int const_index = -1; - if (instr->index()->IsConstantOperand()) { - const_index = ToInteger32(LConstantOperand::cast(instr->index())); - STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); - if (!Smi::IsValid(const_index)) { - // Guaranteed to be out of bounds because of the assert above. - // So the bounds check that must dominate this instruction must - // have deoptimized already. - if (FLAG_debug_code) { - __ Abort("StringCharCodeAt: out of bounds index."); - } - // No code needs to be generated. - return; - } - } else { - index = ToRegister(instr->index()); - } + Register index = ToRegister(instr->index()); Register result = ToRegister(instr->result()); DeferredStringCharCodeAt* deferred = new DeferredStringCharCodeAt(this, instr); - Label flat_string, ascii_string, done; - // Fetch the instance type of the receiver into result register. __ mov(result, FieldOperand(string, HeapObject::kMapOffset)); __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset)); - // We need special handling for non-flat strings. - STATIC_ASSERT(kSeqStringTag == 0); - __ test(result, Immediate(kStringRepresentationMask)); - __ j(zero, &flat_string, Label::kNear); - - // Handle non-flat strings. - __ test(result, Immediate(kIsConsStringMask)); - __ j(zero, deferred->entry()); + // We need special handling for indirect strings. + Label check_sequential; + __ test(result, Immediate(kIsIndirectStringMask)); + __ j(zero, &check_sequential, Label::kNear); + + // Dispatch on the indirect string shape: slice or cons. + Label cons_string; + __ test(result, Immediate(kSlicedNotConsMask)); + __ j(zero, &cons_string, Label::kNear); + + // Handle slices. + Label indirect_string_loaded; + __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset)); + __ SmiUntag(result); + __ add(index, Operand(result)); + __ mov(string, FieldOperand(string, SlicedString::kParentOffset)); + __ jmp(&indirect_string_loaded, Label::kNear); - // ConsString. + // Handle conses. // Check whether the right hand side is the empty string (i.e. if // this is really a flat string in a cons string). If that is not // the case we would rather go to the runtime system now to flatten // the string. + __ bind(&cons_string); __ cmp(FieldOperand(string, ConsString::kSecondOffset), Immediate(factory()->empty_string())); __ j(not_equal, deferred->entry()); - // Get the first of the two strings and load its instance type. __ mov(string, FieldOperand(string, ConsString::kFirstOffset)); + + __ bind(&indirect_string_loaded); __ mov(result, FieldOperand(string, HeapObject::kMapOffset)); __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset)); - // If the first cons component is also non-flat, then go to runtime. + + // Check whether the string is sequential. The only non-sequential + // shapes we support have just been unwrapped above. + __ bind(&check_sequential); STATIC_ASSERT(kSeqStringTag == 0); __ test(result, Immediate(kStringRepresentationMask)); __ j(not_zero, deferred->entry()); - // Check for ASCII or two-byte string. - __ bind(&flat_string); + // Dispatch on the encoding: ASCII or two-byte. + Label ascii_string; STATIC_ASSERT(kAsciiStringTag != 0); __ test(result, Immediate(kStringEncodingMask)); __ j(not_zero, &ascii_string, Label::kNear); // Two-byte string. // Load the two-byte character code into the result register. + Label done; STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); - if (instr->index()->IsConstantOperand()) { - __ movzx_w(result, - FieldOperand(string, - SeqTwoByteString::kHeaderSize + - (kUC16Size * const_index))); - } else { - __ movzx_w(result, FieldOperand(string, - index, - times_2, - SeqTwoByteString::kHeaderSize)); - } + __ movzx_w(result, FieldOperand(string, + index, + times_2, + SeqTwoByteString::kHeaderSize)); __ jmp(&done, Label::kNear); // ASCII string. // Load the byte into the result register. __ bind(&ascii_string); - if (instr->index()->IsConstantOperand()) { - __ movzx_b(result, FieldOperand(string, - SeqAsciiString::kHeaderSize + const_index)); - } else { - __ movzx_b(result, FieldOperand(string, - index, - times_1, - SeqAsciiString::kHeaderSize)); - } + __ movzx_b(result, FieldOperand(string, + index, + times_1, + SeqAsciiString::kHeaderSize)); __ bind(&done); __ bind(deferred->exit()); } @@ -4111,6 +4172,10 @@ __ cmp(input, factory()->false_value()); final_branch_condition = equal; + } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) { + __ cmp(input, factory()->null_value()); + final_branch_condition = equal; + } else if (type_name->Equals(heap()->undefined_symbol())) { __ cmp(input, factory()->undefined_value()); __ j(equal, true_label); @@ -4129,8 +4194,10 @@ } else if (type_name->Equals(heap()->object_symbol())) { __ JumpIfSmi(input, false_label); - __ cmp(input, factory()->null_value()); - __ j(equal, true_label); + if (!FLAG_harmony_typeof) { + __ cmp(input, factory()->null_value()); + __ j(equal, true_label); + } __ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input); __ j(below, false_label); __ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); diff -Nru libv8-3.4.14.21/src/ia32/lithium-codegen-ia32.h libv8-3.5.10.24/src/ia32/lithium-codegen-ia32.h --- libv8-3.4.14.21/src/ia32/lithium-codegen-ia32.h 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/lithium-codegen-ia32.h 2011-08-17 14:33:23.000000000 +0000 @@ -222,7 +222,7 @@ Register ToRegister(int index) const; XMMRegister ToDoubleRegister(int index) const; int ToInteger32(LConstantOperand* op) const; - Operand BuildFastArrayOperand(LOperand* external_pointer, + Operand BuildFastArrayOperand(LOperand* elements_pointer, LOperand* key, JSObject::ElementsKind elements_kind, uint32_t offset); diff -Nru libv8-3.4.14.21/src/ia32/lithium-ia32.cc libv8-3.5.10.24/src/ia32/lithium-ia32.cc --- libv8-3.4.14.21/src/ia32/lithium-ia32.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/lithium-ia32.cc 2011-09-20 11:34:48.000000000 +0000 @@ -706,7 +706,9 @@ LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) { HEnvironment* hydrogen_env = current_block_->last_environment(); - instr->set_environment(CreateEnvironment(hydrogen_env)); + int argument_index_accumulator = 0; + instr->set_environment(CreateEnvironment(hydrogen_env, + &argument_index_accumulator)); return instr; } @@ -993,10 +995,13 @@ } -LEnvironment* LChunkBuilder::CreateEnvironment(HEnvironment* hydrogen_env) { +LEnvironment* LChunkBuilder::CreateEnvironment( + HEnvironment* hydrogen_env, + int* argument_index_accumulator) { if (hydrogen_env == NULL) return NULL; - LEnvironment* outer = CreateEnvironment(hydrogen_env->outer()); + LEnvironment* outer = + CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator); int ast_id = hydrogen_env->ast_id(); ASSERT(ast_id != AstNode::kNoNumber); int value_count = hydrogen_env->length(); @@ -1006,7 +1011,6 @@ argument_count_, value_count, outer); - int argument_index = 0; for (int i = 0; i < value_count; ++i) { if (hydrogen_env->is_special_index(i)) continue; @@ -1015,7 +1019,7 @@ if (value->IsArgumentsObject()) { op = NULL; } else if (value->IsPushArgument()) { - op = new LArgument(argument_index++); + op = new LArgument((*argument_index_accumulator)++); } else { op = UseAny(value); } @@ -1041,7 +1045,13 @@ : instr->SecondSuccessor(); return new LGoto(successor->block_id()); } - return new LBranch(UseRegisterAtStart(v)); + ToBooleanStub::Types expected = instr->expected_input_types(); + // We need a temporary register when we have to access the map *or* we have + // no type info yet, in which case we handle all cases (including the ones + // involving maps). + bool needs_temp = expected.NeedsMap() || expected.IsEmpty(); + LOperand* temp = needs_temp ? TempRegister() : NULL; + return AssignEnvironment(new LBranch(UseRegister(v), temp)); } @@ -1532,16 +1542,10 @@ } -LInstruction* LChunkBuilder::DoFixedArrayLength(HFixedArrayLength* instr) { +LInstruction* LChunkBuilder::DoFixedArrayBaseLength( + HFixedArrayBaseLength* instr) { LOperand* array = UseRegisterAtStart(instr->value()); - return DefineAsRegister(new LFixedArrayLength(array)); -} - - -LInstruction* LChunkBuilder::DoExternalArrayLength( - HExternalArrayLength* instr) { - LOperand* array = UseRegisterAtStart(instr->value()); - return DefineAsRegister(new LExternalArrayLength(array)); + return DefineAsRegister(new LFixedArrayBaseLength(array)); } @@ -1559,8 +1563,9 @@ LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) { - return AssignEnvironment(new LBoundsCheck(UseRegisterAtStart(instr->index()), - UseAtStart(instr->length()))); + return AssignEnvironment(new LBoundsCheck( + UseRegisterOrConstantAtStart(instr->index()), + UseAtStart(instr->length()))); } @@ -1881,9 +1886,9 @@ ASSERT(instr->representation().IsTagged()); ASSERT(instr->key()->representation().IsInteger32()); LOperand* obj = UseRegisterAtStart(instr->object()); - LOperand* key = UseRegisterAtStart(instr->key()); + LOperand* key = UseRegisterOrConstantAtStart(instr->key()); LLoadKeyedFastElement* result = new LLoadKeyedFastElement(obj, key); - return AssignEnvironment(DefineSameAsFirst(result)); + return AssignEnvironment(DefineAsRegister(result)); } @@ -2057,8 +2062,8 @@ LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) { - LOperand* string = UseRegister(instr->string()); - LOperand* index = UseRegisterOrConstant(instr->index()); + LOperand* string = UseTempRegister(instr->string()); + LOperand* index = UseTempRegister(instr->index()); LOperand* context = UseAny(instr->context()); LStringCharCodeAt* result = new LStringCharCodeAt(context, string, index); return AssignEnvironment(AssignPointerMap(DefineAsRegister(result))); diff -Nru libv8-3.4.14.21/src/ia32/lithium-ia32.h libv8-3.5.10.24/src/ia32/lithium-ia32.h --- libv8-3.4.14.21/src/ia32/lithium-ia32.h 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/lithium-ia32.h 2011-09-20 11:34:48.000000000 +0000 @@ -86,8 +86,7 @@ V(DivI) \ V(DoubleToI) \ V(ElementsKind) \ - V(ExternalArrayLength) \ - V(FixedArrayLength) \ + V(FixedArrayBaseLength) \ V(FunctionLiteral) \ V(GetCachedArrayIndex) \ V(GlobalObject) \ @@ -876,10 +875,11 @@ }; -class LBranch: public LControlInstruction<1, 0> { +class LBranch: public LControlInstruction<1, 1> { public: - explicit LBranch(LOperand* value) { + explicit LBranch(LOperand* value, LOperand* temp) { inputs_[0] = value; + temps_[0] = temp; } DECLARE_CONCRETE_INSTRUCTION(Branch, "branch") @@ -921,25 +921,15 @@ }; -class LExternalArrayLength: public LTemplateInstruction<1, 1, 0> { - public: - explicit LExternalArrayLength(LOperand* value) { - inputs_[0] = value; - } - - DECLARE_CONCRETE_INSTRUCTION(ExternalArrayLength, "external-array-length") - DECLARE_HYDROGEN_ACCESSOR(ExternalArrayLength) -}; - - -class LFixedArrayLength: public LTemplateInstruction<1, 1, 0> { +class LFixedArrayBaseLength: public LTemplateInstruction<1, 1, 0> { public: - explicit LFixedArrayLength(LOperand* value) { + explicit LFixedArrayBaseLength(LOperand* value) { inputs_[0] = value; } - DECLARE_CONCRETE_INSTRUCTION(FixedArrayLength, "fixed-array-length") - DECLARE_HYDROGEN_ACCESSOR(FixedArrayLength) + DECLARE_CONCRETE_INSTRUCTION(FixedArrayBaseLength, + "fixed-array-base-length") + DECLARE_HYDROGEN_ACCESSOR(FixedArrayBaseLength) }; @@ -2247,14 +2237,18 @@ template LInstruction* DefineFixedDouble(LTemplateInstruction<1, I, T>* instr, XMMRegister reg); + // Assigns an environment to an instruction. An instruction which can + // deoptimize must have an environment. LInstruction* AssignEnvironment(LInstruction* instr); + // Assigns a pointer map to an instruction. An instruction which can + // trigger a GC or a lazy deoptimization must have a pointer map. LInstruction* AssignPointerMap(LInstruction* instr); enum CanDeoptimize { CAN_DEOPTIMIZE_EAGERLY, CANNOT_DEOPTIMIZE_EAGERLY }; - // By default we assume that instruction sequences generated for calls - // cannot deoptimize eagerly and we do not attach environment to this - // instruction. + // Marks a call for the register allocator. Assigns a pointer map to + // support GC and lazy deoptimization. Assigns an environment to support + // eager deoptimization if CAN_DEOPTIMIZE_EAGERLY. LInstruction* MarkAsCall( LInstruction* instr, HInstruction* hinstr, @@ -2265,7 +2259,8 @@ LInstruction* instr, int ast_id); void ClearInstructionPendingDeoptimizationEnvironment(); - LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env); + LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env, + int* argument_index_accumulator); void VisitInstruction(HInstruction* current); diff -Nru libv8-3.4.14.21/src/ia32/macro-assembler-ia32.cc libv8-3.5.10.24/src/ia32/macro-assembler-ia32.cc --- libv8-3.4.14.21/src/ia32/macro-assembler-ia32.cc 2011-07-18 09:50:57.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/macro-assembler-ia32.cc 2011-08-31 09:03:56.000000000 +0000 @@ -69,8 +69,8 @@ // Compute number of region covering addr. See Page::GetRegionNumberForAddress // method for more details. - and_(addr, Page::kPageAlignmentMask); shr(addr, Page::kRegionSizeLog2); + and_(addr, Page::kPageAlignmentMask >> Page::kRegionSizeLog2); // Set dirty mark for region. // Bit tests with a memory operand should be avoided on Intel processors, @@ -148,7 +148,7 @@ Label done; // Skip barrier if writing a smi. - ASSERT_EQ(0, kSmiTag); + STATIC_ASSERT(kSmiTag == 0); JumpIfSmi(value, &done, Label::kNear); InNewSpace(object, value, equal, &done, Label::kNear); @@ -166,8 +166,8 @@ // Array access: calculate the destination address in the same manner as // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset // into an array of words. - ASSERT_EQ(1, kSmiTagSize); - ASSERT_EQ(0, kSmiTag); + STATIC_ASSERT(kSmiTagSize == 1); + STATIC_ASSERT(kSmiTag == 0); lea(dst, Operand(object, dst, times_half_pointer_size, FixedArray::kHeaderSize - kHeapObjectTag)); } @@ -193,7 +193,7 @@ Label done; // Skip barrier if writing a smi. - ASSERT_EQ(0, kSmiTag); + STATIC_ASSERT(kSmiTag == 0); JumpIfSmi(value, &done, Label::kNear); InNewSpace(object, value, equal, &done); @@ -263,6 +263,13 @@ } +void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) { + // see ROOT_ACCESSOR macro in factory.h + Handle value(&isolate()->heap()->roots_address()[index]); + cmp(with, value); +} + + void MacroAssembler::CmpObjectType(Register heap_object, InstanceType type, Register map) { @@ -319,7 +326,7 @@ Register instance_type) { mov(map, FieldOperand(heap_object, HeapObject::kMapOffset)); movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); - ASSERT(kNotStringTag != 0); + STATIC_ASSERT(kNotStringTag != 0); test(instance_type, Immediate(kIsNotStringMask)); return zero; } @@ -535,7 +542,12 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location, HandlerType type) { // Adjust this code if not the case. - ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); + STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize); // The pc (return address) is already on TOS. if (try_location == IN_JAVASCRIPT) { if (type == TRY_CATCH_HANDLER) { @@ -544,6 +556,7 @@ push(Immediate(StackHandler::TRY_FINALLY)); } push(ebp); + push(esi); } else { ASSERT(try_location == IN_JS_ENTRY); // The frame pointer does not point to a JS frame so we save NULL @@ -551,6 +564,7 @@ // before dereferencing it to restore the context. push(Immediate(StackHandler::ENTRY)); push(Immediate(0)); // NULL frame pointer. + push(Immediate(Smi::FromInt(0))); // No context. } // Save the current handler as the next handler. push(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address, @@ -563,7 +577,7 @@ void MacroAssembler::PopTryHandler() { - ASSERT_EQ(0, StackHandlerConstants::kNextOffset); + STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); pop(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address, isolate()))); add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize)); @@ -572,8 +586,12 @@ void MacroAssembler::Throw(Register value) { // Adjust this code if not the case. - STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); - + STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); + STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize); // eax must hold the exception. if (!value.is(eax)) { mov(eax, value); @@ -584,24 +602,21 @@ isolate()); mov(esp, Operand::StaticVariable(handler_address)); - // Restore next handler and frame pointer, discard handler state. - STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); + // Restore next handler, context, and frame pointer; discard handler state. pop(Operand::StaticVariable(handler_address)); - STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize); - pop(ebp); - pop(edx); // Remove state. + pop(esi); // Context. + pop(ebp); // Frame pointer. + pop(edx); // State. - // Before returning we restore the context from the frame pointer if - // not NULL. The frame pointer is NULL in the exception handler of - // a JS entry frame. - Set(esi, Immediate(0)); // Tentatively set context pointer to NULL. + // If the handler is a JS frame, restore the context to the frame. + // (edx == ENTRY) == (ebp == 0) == (esi == 0), so we could test any + // of them. Label skip; - cmp(ebp, 0); + cmp(Operand(edx), Immediate(StackHandler::ENTRY)); j(equal, &skip, Label::kNear); - mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); + mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); bind(&skip); - STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); ret(0); } @@ -609,7 +624,12 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, Register value) { // Adjust this code if not the case. - STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); + STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize); // eax must hold the exception. if (!value.is(eax)) { @@ -635,7 +655,6 @@ bind(&done); // Set the top handler address to next handler past the current ENTRY handler. - STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); pop(Operand::StaticVariable(handler_address)); if (type == OUT_OF_MEMORY) { @@ -653,15 +672,14 @@ mov(Operand::StaticVariable(pending_exception), eax); } - // Clear the context pointer. + // Discard the context saved in the handler and clear the context pointer. + pop(edx); Set(esi, Immediate(0)); // Restore fp from handler and discard handler state. - STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize); pop(ebp); pop(edx); // State. - STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); ret(0); } @@ -1190,6 +1208,42 @@ } +void MacroAssembler::AllocateSlicedString(Register result, + Register scratch1, + Register scratch2, + Label* gc_required) { + // Allocate heap number in new space. + AllocateInNewSpace(SlicedString::kSize, + result, + scratch1, + scratch2, + gc_required, + TAG_OBJECT); + + // Set the map. The other fields are left uninitialized. + mov(FieldOperand(result, HeapObject::kMapOffset), + Immediate(isolate()->factory()->sliced_string_map())); +} + + +void MacroAssembler::AllocateAsciiSlicedString(Register result, + Register scratch1, + Register scratch2, + Label* gc_required) { + // Allocate heap number in new space. + AllocateInNewSpace(SlicedString::kSize, + result, + scratch1, + scratch2, + gc_required, + TAG_OBJECT); + + // Set the map. The other fields are left uninitialized. + mov(FieldOperand(result, HeapObject::kMapOffset), + Immediate(isolate()->factory()->sliced_ascii_string_map())); +} + + // Copy memory, byte-by-byte, from source to destination. Not optimized for // long or aligned copies. The contents of scratch and length are destroyed. // Source and destination are incremented by length. @@ -2148,7 +2202,7 @@ Register scratch2, Label* failure) { // Check that both objects are not smis. - ASSERT_EQ(0, kSmiTag); + STATIC_ASSERT(kSmiTag == 0); mov(scratch1, Operand(object1)); and_(scratch1, Operand(object2)); JumpIfSmi(scratch1, failure); diff -Nru libv8-3.4.14.21/src/ia32/macro-assembler-ia32.h libv8-3.5.10.24/src/ia32/macro-assembler-ia32.h --- libv8-3.4.14.21/src/ia32/macro-assembler-ia32.h 2011-07-11 11:41:22.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/macro-assembler-ia32.h 2011-08-31 09:03:56.000000000 +0000 @@ -209,6 +209,9 @@ void SafeSet(Register dst, const Immediate& x); void SafePush(const Immediate& x); + // Compare a register against a known root, e.g. undefined, null, true, ... + void CompareRoot(Register with, Heap::RootListIndex index); + // Compare object type for heap object. // Incoming register is heap_object and outgoing register is map. void CmpObjectType(Register heap_object, InstanceType type, Register map); @@ -272,8 +275,8 @@ // Smi tagging support. void SmiTag(Register reg) { - ASSERT(kSmiTag == 0); - ASSERT(kSmiTagSize == 1); + STATIC_ASSERT(kSmiTag == 0); + STATIC_ASSERT(kSmiTagSize == 1); add(reg, Operand(reg)); } void SmiUntag(Register reg) { @@ -282,9 +285,9 @@ // Modifies the register even if it does not contain a Smi! void SmiUntag(Register reg, Label* is_smi) { - ASSERT(kSmiTagSize == 1); + STATIC_ASSERT(kSmiTagSize == 1); sar(reg, kSmiTagSize); - ASSERT(kSmiTag == 0); + STATIC_ASSERT(kSmiTag == 0); j(not_carry, is_smi); } @@ -443,6 +446,17 @@ Register scratch2, Label* gc_required); + // Allocate a raw sliced string object. Only the map field of the result is + // initialized. + void AllocateSlicedString(Register result, + Register scratch1, + Register scratch2, + Label* gc_required); + void AllocateAsciiSlicedString(Register result, + Register scratch1, + Register scratch2, + Label* gc_required); + // Copy memory, byte-by-byte, from source to destination. Not optimized for // long or aligned copies. // The contents of index and scratch are destroyed. diff -Nru libv8-3.4.14.21/src/ia32/regexp-macro-assembler-ia32.cc libv8-3.5.10.24/src/ia32/regexp-macro-assembler-ia32.cc --- libv8-3.4.14.21/src/ia32/regexp-macro-assembler-ia32.cc 2011-05-16 12:14:13.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/regexp-macro-assembler-ia32.cc 2011-08-31 09:03:56.000000000 +0000 @@ -1065,12 +1065,13 @@ } // Prepare for possible GC. - HandleScope handles; + HandleScope handles(isolate); Handle code_handle(re_code); Handle subject(frame_entry(re_frame, kInputString)); + // Current string. - bool is_ascii = subject->IsAsciiRepresentation(); + bool is_ascii = subject->IsAsciiRepresentationUnderneath(); ASSERT(re_code->instruction_start() <= *return_address); ASSERT(*return_address <= @@ -1079,7 +1080,7 @@ MaybeObject* result = Execution::HandleStackGuardInterrupt(); if (*code_handle != re_code) { // Return address no longer valid - int delta = *code_handle - re_code; + int delta = code_handle->address() - re_code->address(); // Overwrite the return address on the stack. *return_address += delta; } @@ -1088,8 +1089,20 @@ return EXCEPTION; } + Handle subject_tmp = subject; + int slice_offset = 0; + + // Extract the underlying string and the slice offset. + if (StringShape(*subject_tmp).IsCons()) { + subject_tmp = Handle(ConsString::cast(*subject_tmp)->first()); + } else if (StringShape(*subject_tmp).IsSliced()) { + SlicedString* slice = SlicedString::cast(*subject_tmp); + subject_tmp = Handle(slice->parent()); + slice_offset = slice->offset(); + } + // String might have changed. - if (subject->IsAsciiRepresentation() != is_ascii) { + if (subject_tmp->IsAsciiRepresentation() != is_ascii) { // If we changed between an ASCII and an UC16 string, the specialized // code cannot be used, and we need to restart regexp matching from // scratch (including, potentially, compiling a new version of the code). @@ -1100,8 +1113,8 @@ // be a sequential or external string with the same content. // Update the start and end pointers in the stack frame to the current // location (whether it has actually moved or not). - ASSERT(StringShape(*subject).IsSequential() || - StringShape(*subject).IsExternal()); + ASSERT(StringShape(*subject_tmp).IsSequential() || + StringShape(*subject_tmp).IsExternal()); // The original start address of the characters to match. const byte* start_address = frame_entry(re_frame, kInputStart); @@ -1109,13 +1122,14 @@ // Find the current start address of the same character at the current string // position. int start_index = frame_entry(re_frame, kStartIndex); - const byte* new_address = StringCharacterPosition(*subject, start_index); + const byte* new_address = StringCharacterPosition(*subject_tmp, + start_index + slice_offset); if (start_address != new_address) { // If there is a difference, update the object pointer and start and end // addresses in the RegExp stack frame to match the new value. const byte* end_address = frame_entry(re_frame, kInputEnd); - int byte_length = end_address - start_address; + int byte_length = static_cast(end_address - start_address); frame_entry(re_frame, kInputString) = *subject; frame_entry(re_frame, kInputStart) = new_address; frame_entry(re_frame, kInputEnd) = new_address + byte_length; diff -Nru libv8-3.4.14.21/src/ia32/stub-cache-ia32.cc libv8-3.5.10.24/src/ia32/stub-cache-ia32.cc --- libv8-3.4.14.21/src/ia32/stub-cache-ia32.cc 2011-07-18 09:50:57.000000000 +0000 +++ libv8-3.5.10.24/src/ia32/stub-cache-ia32.cc 2011-08-31 09:03:56.000000000 +0000 @@ -273,7 +273,7 @@ // Check that the object is a string. __ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); - ASSERT(kNotStringTag != 0); + STATIC_ASSERT(kNotStringTag != 0); __ test(scratch, Immediate(kNotStringTag)); __ j(not_zero, non_string_object); } @@ -3400,37 +3400,37 @@ __ JumpIfNotSmi(eax, &miss_force_generic); // Check that the index is in range. - __ mov(ecx, eax); - __ SmiUntag(ecx); // Untag the index. __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); - __ cmp(ecx, FieldOperand(ebx, ExternalArray::kLengthOffset)); + __ cmp(eax, FieldOperand(ebx, ExternalArray::kLengthOffset)); // Unsigned comparison catches both negative and too-large values. __ j(above_equal, &miss_force_generic); __ mov(ebx, FieldOperand(ebx, ExternalArray::kExternalPointerOffset)); // ebx: base pointer of external storage switch (elements_kind) { case JSObject::EXTERNAL_BYTE_ELEMENTS: - __ movsx_b(eax, Operand(ebx, ecx, times_1, 0)); + __ SmiUntag(eax); // Untag the index. + __ movsx_b(eax, Operand(ebx, eax, times_1, 0)); break; case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS: case JSObject::EXTERNAL_PIXEL_ELEMENTS: - __ movzx_b(eax, Operand(ebx, ecx, times_1, 0)); + __ SmiUntag(eax); // Untag the index. + __ movzx_b(eax, Operand(ebx, eax, times_1, 0)); break; case JSObject::EXTERNAL_SHORT_ELEMENTS: - __ movsx_w(eax, Operand(ebx, ecx, times_2, 0)); + __ movsx_w(eax, Operand(ebx, eax, times_1, 0)); break; case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS: - __ movzx_w(eax, Operand(ebx, ecx, times_2, 0)); + __ movzx_w(eax, Operand(ebx, eax, times_1, 0)); break; case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS: case JSObject::EXTERNAL_INT_ELEMENTS: - __ mov(ecx, Operand(ebx, ecx, times_4, 0)); + __ mov(ecx, Operand(ebx, eax, times_2, 0)); break; case JSObject::EXTERNAL_FLOAT_ELEMENTS: - __ fld_s(Operand(ebx, ecx, times_4, 0)); + __ fld_s(Operand(ebx, eax, times_2, 0)); break; case JSObject::EXTERNAL_DOUBLE_ELEMENTS: - __ fld_d(Operand(ebx, ecx, times_8, 0)); + __ fld_d(Operand(ebx, eax, times_4, 0)); break; default: UNREACHABLE(); @@ -3556,9 +3556,7 @@ // Check that the index is in range. __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset)); - __ mov(ebx, ecx); - __ SmiUntag(ebx); - __ cmp(ebx, FieldOperand(edi, ExternalArray::kLengthOffset)); + __ cmp(ecx, FieldOperand(edi, ExternalArray::kLengthOffset)); // Unsigned comparison catches both negative and too-large values. __ j(above_equal, &slow); @@ -3568,7 +3566,6 @@ // edx: receiver // ecx: key // edi: elements array - // ebx: untagged index if (elements_kind == JSObject::EXTERNAL_PIXEL_ELEMENTS) { __ JumpIfNotSmi(eax, &slow); } else { @@ -3576,44 +3573,39 @@ } // smi case - __ mov(ecx, eax); // Preserve the value in eax. Key is no longer needed. - __ SmiUntag(ecx); + __ mov(ebx, eax); // Preserve the value in eax as the return value. + __ SmiUntag(ebx); __ mov(edi, FieldOperand(edi, ExternalArray::kExternalPointerOffset)); - // ecx: base pointer of external storage + // edi: base pointer of external storage switch (elements_kind) { case JSObject::EXTERNAL_PIXEL_ELEMENTS: - { // Clamp the value to [0..255]. - Label done; - __ test(ecx, Immediate(0xFFFFFF00)); - __ j(zero, &done, Label::kNear); - __ setcc(negative, ecx); // 1 if negative, 0 if positive. - __ dec_b(ecx); // 0 if negative, 255 if positive. - __ bind(&done); - } - __ mov_b(Operand(edi, ebx, times_1, 0), ecx); + __ ClampUint8(ebx); + __ SmiUntag(ecx); + __ mov_b(Operand(edi, ecx, times_1, 0), ebx); break; case JSObject::EXTERNAL_BYTE_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS: - __ mov_b(Operand(edi, ebx, times_1, 0), ecx); + __ SmiUntag(ecx); + __ mov_b(Operand(edi, ecx, times_1, 0), ebx); break; case JSObject::EXTERNAL_SHORT_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS: - __ mov_w(Operand(edi, ebx, times_2, 0), ecx); + __ mov_w(Operand(edi, ecx, times_1, 0), ebx); break; case JSObject::EXTERNAL_INT_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS: - __ mov(Operand(edi, ebx, times_4, 0), ecx); + __ mov(Operand(edi, ecx, times_2, 0), ebx); break; case JSObject::EXTERNAL_FLOAT_ELEMENTS: case JSObject::EXTERNAL_DOUBLE_ELEMENTS: // Need to perform int-to-float conversion. - __ push(ecx); + __ push(ebx); __ fild_s(Operand(esp, 0)); - __ pop(ecx); + __ pop(ebx); if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) { - __ fstp_s(Operand(edi, ebx, times_4, 0)); + __ fstp_s(Operand(edi, ecx, times_2, 0)); } else { // elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS. - __ fstp_d(Operand(edi, ebx, times_8, 0)); + __ fstp_d(Operand(edi, ecx, times_4, 0)); } break; default: @@ -3629,7 +3621,6 @@ // edx: receiver // ecx: key // edi: elements array - // ebx: untagged index __ cmp(FieldOperand(eax, HeapObject::kMapOffset), Immediate(masm->isolate()->factory()->heap_number_map())); __ j(not_equal, &slow); @@ -3638,15 +3629,14 @@ // +/-Infinity into integer arrays basically undefined. For more // reproducible behavior, convert these to zero. __ mov(edi, FieldOperand(edi, ExternalArray::kExternalPointerOffset)); - // ebx: untagged index // edi: base pointer of external storage if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) { __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset)); - __ fstp_s(Operand(edi, ebx, times_4, 0)); + __ fstp_s(Operand(edi, ecx, times_2, 0)); __ ret(0); } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) { __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset)); - __ fstp_d(Operand(edi, ebx, times_8, 0)); + __ fstp_d(Operand(edi, ecx, times_4, 0)); __ ret(0); } else { // Perform float-to-int conversion with truncation (round-to-zero) @@ -3661,27 +3651,20 @@ elements_kind != JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS) { ASSERT(CpuFeatures::IsSupported(SSE2)); CpuFeatures::Scope scope(SSE2); - __ cvttsd2si(ecx, FieldOperand(eax, HeapNumber::kValueOffset)); + __ cvttsd2si(ebx, FieldOperand(eax, HeapNumber::kValueOffset)); // ecx: untagged integer value switch (elements_kind) { case JSObject::EXTERNAL_PIXEL_ELEMENTS: - { // Clamp the value to [0..255]. - Label done; - __ test(ecx, Immediate(0xFFFFFF00)); - __ j(zero, &done, Label::kNear); - __ setcc(negative, ecx); // 1 if negative, 0 if positive. - __ dec_b(ecx); // 0 if negative, 255 if positive. - __ bind(&done); - } - __ mov_b(Operand(edi, ebx, times_1, 0), ecx); - break; + __ ClampUint8(ebx); + // Fall through. case JSObject::EXTERNAL_BYTE_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS: - __ mov_b(Operand(edi, ebx, times_1, 0), ecx); + __ SmiUntag(ecx); + __ mov_b(Operand(edi, ecx, times_1, 0), ebx); break; case JSObject::EXTERNAL_SHORT_ELEMENTS: case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS: - __ mov_w(Operand(edi, ebx, times_2, 0), ecx); + __ mov_w(Operand(edi, ecx, times_1, 0), ebx); break; default: UNREACHABLE(); @@ -3698,7 +3681,7 @@ __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset)); __ sub(Operand(esp), Immediate(2 * kPointerSize)); __ fisttp_d(Operand(esp, 0)); - __ pop(ecx); + __ pop(ebx); __ add(Operand(esp), Immediate(kPointerSize)); } else { ASSERT(CpuFeatures::IsSupported(SSE2)); @@ -3709,15 +3692,15 @@ // Note: we could do better for signed int arrays. __ movd(xmm0, FieldOperand(eax, HeapNumber::kValueOffset)); // We will need the key if we have to make the slow runtime call. - __ push(ecx); - __ LoadPowerOf2(xmm1, ecx, 31); - __ pop(ecx); + __ push(ebx); + __ LoadPowerOf2(xmm1, ebx, 31); + __ pop(ebx); __ ucomisd(xmm1, xmm0); __ j(above_equal, &slow); - __ cvttsd2si(ecx, Operand(xmm0)); + __ cvttsd2si(ebx, Operand(xmm0)); } - // ecx: untagged integer value - __ mov(Operand(edi, ebx, times_4, 0), ecx); + // ebx: untagged integer value + __ mov(Operand(edi, ecx, times_2, 0), ebx); } __ ret(0); // Return original value. } @@ -3981,10 +3964,12 @@ __ bind(&smi_value); // Value is a smi. convert to a double and store. - __ SmiUntag(eax); - __ push(eax); + // Preserve original value. + __ mov(edx, eax); + __ SmiUntag(edx); + __ push(edx); __ fild_s(Operand(esp, 0)); - __ pop(eax); + __ pop(edx); __ fstp_d(FieldOperand(edi, ecx, times_4, FixedDoubleArray::kHeaderSize)); __ ret(0); diff -Nru libv8-3.4.14.21/src/ic.cc libv8-3.5.10.24/src/ic.cc --- libv8-3.4.14.21/src/ic.cc 2011-07-11 11:41:22.000000000 +0000 +++ libv8-3.5.10.24/src/ic.cc 2011-08-01 11:41:52.000000000 +0000 @@ -88,7 +88,8 @@ // function and the original code. JSFunction* function = JSFunction::cast(frame->function()); function->PrintName(); - int code_offset = address() - js_code->instruction_start(); + int code_offset = + static_cast(address() - js_code->instruction_start()); PrintF("+%d", code_offset); } else { PrintF(""); @@ -309,6 +310,7 @@ case Code::UNARY_OP_IC: case Code::BINARY_OP_IC: case Code::COMPARE_IC: + case Code::TO_BOOLEAN_IC: // Clearing these is tricky and does not // make any performance difference. return; @@ -842,14 +844,6 @@ } -#ifdef DEBUG -#define TRACE_IC_NAMED(msg, name) \ - if (FLAG_trace_ic) PrintF(msg, *(name)->ToCString()) -#else -#define TRACE_IC_NAMED(msg, name) -#endif - - MaybeObject* LoadIC::Load(State state, Handle object, Handle name) { @@ -2506,6 +2500,31 @@ } +RUNTIME_FUNCTION(MaybeObject*, ToBoolean_Patch) { + ASSERT(args.length() == 3); + + HandleScope scope(isolate); + Handle object = args.at(0); + Register tos = Register::from_code(args.smi_at(1)); + ToBooleanStub::Types old_types(args.smi_at(2)); + + ToBooleanStub::Types new_types(old_types); + bool to_boolean_value = new_types.Record(object); + old_types.TraceTransition(new_types); + + ToBooleanStub stub(tos, new_types); + Handle code = stub.GetCode(); + ToBooleanIC ic(isolate); + ic.patch(*code); + return Smi::FromInt(to_boolean_value ? 1 : 0); +} + + +void ToBooleanIC::patch(Code* code) { + set_target(code); +} + + static const Address IC_utilities[] = { #define ADDR(name) FUNCTION_ADDR(name), IC_UTIL_LIST(ADDR) diff -Nru libv8-3.4.14.21/src/ic.h libv8-3.5.10.24/src/ic.h --- libv8-3.4.14.21/src/ic.h 2011-07-11 11:41:22.000000000 +0000 +++ libv8-3.5.10.24/src/ic.h 2011-07-25 11:05:35.000000000 +0000 @@ -59,7 +59,8 @@ ICU(StoreInterceptorProperty) \ ICU(UnaryOp_Patch) \ ICU(BinaryOp_Patch) \ - ICU(CompareIC_Miss) + ICU(CompareIC_Miss) \ + ICU(ToBoolean_Patch) // // IC is the base class for LoadIC, StoreIC, CallIC, KeyedLoadIC, // and KeyedStoreIC. @@ -720,6 +721,15 @@ Token::Value op_; }; + +class ToBooleanIC: public IC { + public: + explicit ToBooleanIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { } + + void patch(Code* code); +}; + + // Helper for BinaryOpIC and CompareIC. void PatchInlinedSmiCode(Address address); diff -Nru libv8-3.4.14.21/src/interpreter-irregexp.cc libv8-3.5.10.24/src/interpreter-irregexp.cc --- libv8-3.4.14.21/src/interpreter-irregexp.cc 2011-03-23 11:19:56.000000000 +0000 +++ libv8-3.5.10.24/src/interpreter-irregexp.cc 2011-08-24 12:02:41.000000000 +0000 @@ -1,4 +1,4 @@ -// Copyright 2008 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -635,8 +635,9 @@ AssertNoAllocation a; const byte* code_base = code_array->GetDataStartAddress(); uc16 previous_char = '\n'; - if (subject->IsAsciiRepresentation()) { - Vector subject_vector = subject->ToAsciiVector(); + String::FlatContent subject_content = subject->GetFlatContent(); + if (subject_content.IsAscii()) { + Vector subject_vector = subject_content.ToAsciiVector(); if (start_position != 0) previous_char = subject_vector[start_position - 1]; return RawMatch(isolate, code_base, @@ -645,7 +646,8 @@ start_position, previous_char); } else { - Vector subject_vector = subject->ToUC16Vector(); + ASSERT(subject_content.IsTwoByte()); + Vector subject_vector = subject_content.ToUC16Vector(); if (start_position != 0) previous_char = subject_vector[start_position - 1]; return RawMatch(isolate, code_base, diff -Nru libv8-3.4.14.21/src/isolate.cc libv8-3.5.10.24/src/isolate.cc --- libv8-3.4.14.21/src/isolate.cc 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/isolate.cc 2011-08-22 11:03:23.000000000 +0000 @@ -76,6 +76,10 @@ ThreadLocalTop::ThreadLocalTop() { InitializeInternal(); + // This flag may be set using v8::V8::IgnoreOutOfMemoryException() + // before an isolate is initialized. The initialize methods below do + // not touch it to preserve its value. + ignore_out_of_memory_ = false; } @@ -382,7 +386,6 @@ if (Thread::GetThreadLocal(isolate_key_) == NULL) { Thread::SetThreadLocal(isolate_key_, default_isolate_); } - CHECK(default_isolate_->PreInit()); } @@ -654,6 +657,7 @@ incomplete_message_ = &accumulator; PrintStack(&accumulator); accumulator.OutputToStdOut(); + InitializeLoggingAndCounters(); accumulator.Log(); incomplete_message_ = NULL; stack_trace_nesting_level_ = 0; @@ -1331,6 +1335,7 @@ if (list_ == data) list_ = data->next_; if (data->next_ != NULL) data->next_->prev_ = data->prev_; if (data->prev_ != NULL) data->prev_->next_ = data->next_; + delete data; } @@ -1375,11 +1380,15 @@ bootstrapper_(NULL), runtime_profiler_(NULL), compilation_cache_(NULL), - counters_(new Counters()), + counters_(NULL), code_range_(NULL), + // Must be initialized early to allow v8::SetResourceConstraints calls. break_access_(OS::CreateMutex()), - logger_(new Logger()), - stats_table_(new StatsTable()), + debugger_initialized_(false), + // Must be initialized early to allow v8::Debug calls. + debugger_access_(OS::CreateMutex()), + logger_(NULL), + stats_table_(NULL), stub_cache_(NULL), deoptimizer_data_(NULL), capture_stack_trace_for_uncaught_exceptions_(false), @@ -1510,7 +1519,7 @@ logger_->TearDown(); // The default isolate is re-initializable due to legacy API. - state_ = PREINITIALIZED; + state_ = UNINITIALIZED; } } @@ -1525,6 +1534,12 @@ Isolate::~Isolate() { TRACE_ISOLATE(destructor); + // Has to be called while counters_ are still alive. + zone_.DeleteKeptSegment(); + + delete[] assembler_spare_buffer_; + assembler_spare_buffer_ = NULL; + delete unicode_cache_; unicode_cache_ = NULL; @@ -1558,6 +1573,8 @@ handle_scope_implementer_ = NULL; delete break_access_; break_access_ = NULL; + delete debugger_access_; + debugger_access_ = NULL; delete compilation_cache_; compilation_cache_ = NULL; @@ -1583,6 +1600,9 @@ delete global_handles_; global_handles_ = NULL; + delete external_reference_table_; + external_reference_table_ = NULL; + #ifdef ENABLE_DEBUGGER_SUPPORT delete debugger_; debugger_ = NULL; @@ -1592,58 +1612,6 @@ } -bool Isolate::PreInit() { - if (state_ != UNINITIALIZED) return true; - - TRACE_ISOLATE(preinit); - - ASSERT(Isolate::Current() == this); -#ifdef ENABLE_DEBUGGER_SUPPORT - debug_ = new Debug(this); - debugger_ = new Debugger(this); -#endif - - memory_allocator_ = new MemoryAllocator(); - memory_allocator_->isolate_ = this; - code_range_ = new CodeRange(); - code_range_->isolate_ = this; - - // Safe after setting Heap::isolate_, initializing StackGuard and - // ensuring that Isolate::Current() == this. - heap_.SetStackLimits(); - -#ifdef DEBUG - DisallowAllocationFailure disallow_allocation_failure; -#endif - -#define C(name) isolate_addresses_[Isolate::k_##name] = \ - reinterpret_cast
(name()); - ISOLATE_ADDRESS_LIST(C) -#undef C - - string_tracker_ = new StringTracker(); - string_tracker_->isolate_ = this; - compilation_cache_ = new CompilationCache(this); - transcendental_cache_ = new TranscendentalCache(); - keyed_lookup_cache_ = new KeyedLookupCache(); - context_slot_cache_ = new ContextSlotCache(); - descriptor_lookup_cache_ = new DescriptorLookupCache(); - unicode_cache_ = new UnicodeCache(); - pc_to_code_cache_ = new PcToCodeCache(this); - write_input_buffer_ = new StringInputBuffer(); - global_handles_ = new GlobalHandles(this); - bootstrapper_ = new Bootstrapper(); - handle_scope_implementer_ = new HandleScopeImplementer(this); - stub_cache_ = new StubCache(this); - ast_sentinels_ = new AstSentinels(); - regexp_stack_ = new RegExpStack(); - regexp_stack_->isolate_ = this; - - state_ = PREINITIALIZED; - return true; -} - - void Isolate::InitializeThreadLocal() { thread_local_top_.isolate_ = this; thread_local_top_.Initialize(); @@ -1680,19 +1648,71 @@ } +void Isolate::InitializeLoggingAndCounters() { + if (logger_ == NULL) { + logger_ = new Logger; + } + if (counters_ == NULL) { + counters_ = new Counters; + } +} + + +void Isolate::InitializeDebugger() { +#ifdef ENABLE_DEBUGGER_SUPPORT + ScopedLock lock(debugger_access_); + if (NoBarrier_Load(&debugger_initialized_)) return; + InitializeLoggingAndCounters(); + debug_ = new Debug(this); + debugger_ = new Debugger(this); + Release_Store(&debugger_initialized_, true); +#endif +} + + bool Isolate::Init(Deserializer* des) { ASSERT(state_ != INITIALIZED); - + ASSERT(Isolate::Current() == this); TRACE_ISOLATE(init); - bool create_heap_objects = des == NULL; - #ifdef DEBUG // The initialization process does not handle memory exhaustion. DisallowAllocationFailure disallow_allocation_failure; #endif - if (state_ == UNINITIALIZED && !PreInit()) return false; + InitializeLoggingAndCounters(); + + InitializeDebugger(); + + memory_allocator_ = new MemoryAllocator(this); + code_range_ = new CodeRange(this); + + // Safe after setting Heap::isolate_, initializing StackGuard and + // ensuring that Isolate::Current() == this. + heap_.SetStackLimits(); + +#define C(name) isolate_addresses_[Isolate::k_##name] = \ + reinterpret_cast
(name()); + ISOLATE_ADDRESS_LIST(C) +#undef C + + string_tracker_ = new StringTracker(); + string_tracker_->isolate_ = this; + compilation_cache_ = new CompilationCache(this); + transcendental_cache_ = new TranscendentalCache(); + keyed_lookup_cache_ = new KeyedLookupCache(); + context_slot_cache_ = new ContextSlotCache(); + descriptor_lookup_cache_ = new DescriptorLookupCache(); + unicode_cache_ = new UnicodeCache(); + pc_to_code_cache_ = new PcToCodeCache(this); + write_input_buffer_ = new StringInputBuffer(); + global_handles_ = new GlobalHandles(this); + bootstrapper_ = new Bootstrapper(); + handle_scope_implementer_ = new HandleScopeImplementer(this); + stub_cache_ = new StubCache(this); + ast_sentinels_ = new AstSentinels(); + regexp_stack_ = new RegExpStack(); + regexp_stack_->isolate_ = this; // Enable logging before setting up the heap logger_->Setup(); @@ -1715,7 +1735,8 @@ stack_guard_.InitThread(lock); } - // Setup the object heap + // Setup the object heap. + const bool create_heap_objects = (des == NULL); ASSERT(!heap_.HasBeenSetup()); if (!heap_.Setup(create_heap_objects)) { V8::SetFatalError(); @@ -1775,6 +1796,16 @@ } +// Initialized lazily to allow early +// v8::V8::SetAddHistogramSampleFunction calls. +StatsTable* Isolate::stats_table() { + if (stats_table_ == NULL) { + stats_table_ = new StatsTable; + } + return stats_table_; +} + + void Isolate::Enter() { Isolate* current_isolate = NULL; PerIsolateThreadData* current_data = CurrentPerIsolateThreadData(); @@ -1814,8 +1845,6 @@ SetIsolateThreadLocals(this, data); - CHECK(PreInit()); - // In case it's the first time some thread enters the isolate. set_thread_id(data->thread_id()); } diff -Nru libv8-3.4.14.21/src/isolate.h libv8-3.5.10.24/src/isolate.h --- libv8-3.4.14.21/src/isolate.h 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/isolate.h 2011-08-10 11:27:35.000000000 +0000 @@ -256,6 +256,9 @@ // Call back function to report unsafe JS accesses. v8::FailedAccessCheckCallback failed_access_check_callback_; + // Whether out of memory exceptions should be ignored. + bool ignore_out_of_memory_; + private: void InitializeInternal(); @@ -446,6 +449,13 @@ return reinterpret_cast(Thread::GetThreadLocal(isolate_key_)); } + // Usually called by Init(), but can be called early e.g. to allow + // testing components that require logging but not the whole + // isolate. + // + // Safe to call more than once. + void InitializeLoggingAndCounters(); + bool Init(Deserializer* des); bool IsInitialized() { return state_ == INITIALIZED; } @@ -498,10 +508,12 @@ // switched to non-legacy behavior). static void EnterDefaultIsolate(); - // Debug. // Mutex for serializing access to break control structures. Mutex* break_access() { return break_access_; } + // Mutex for serializing access to debugger. + Mutex* debugger_access() { return debugger_access_; } + Address get_address_from_id(AddressId id); // Access to top context (where the current function object was created). @@ -661,6 +673,12 @@ // Tells whether the current context has experienced an out of memory // exception. bool is_out_of_memory(); + bool ignore_out_of_memory() { + return thread_local_top_.ignore_out_of_memory_; + } + void set_ignore_out_of_memory(bool value) { + thread_local_top_.ignore_out_of_memory_ = value; + } void PrintCurrentStackTrace(FILE* out); void PrintStackTrace(FILE* out, char* thread_data); @@ -769,14 +787,24 @@ #undef GLOBAL_CONTEXT_FIELD_ACCESSOR Bootstrapper* bootstrapper() { return bootstrapper_; } - Counters* counters() { return counters_; } + Counters* counters() { + // Call InitializeLoggingAndCounters() if logging is needed before + // the isolate is fully initialized. + ASSERT(counters_ != NULL); + return counters_; + } CodeRange* code_range() { return code_range_; } RuntimeProfiler* runtime_profiler() { return runtime_profiler_; } CompilationCache* compilation_cache() { return compilation_cache_; } - Logger* logger() { return logger_; } + Logger* logger() { + // Call InitializeLoggingAndCounters() if logging is needed before + // the isolate is fully initialized. + ASSERT(logger_ != NULL); + return logger_; + } StackGuard* stack_guard() { return &stack_guard_; } Heap* heap() { return &heap_; } - StatsTable* stats_table() { return stats_table_; } + StatsTable* stats_table(); StubCache* stub_cache() { return stub_cache_; } DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; } ThreadLocalTop* thread_local_top() { return &thread_local_top_; } @@ -877,8 +905,14 @@ void PreallocatedStorageInit(size_t size); #ifdef ENABLE_DEBUGGER_SUPPORT - Debugger* debugger() { return debugger_; } - Debug* debug() { return debug_; } + Debugger* debugger() { + if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger(); + return debugger_; + } + Debug* debug() { + if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger(); + return debug_; + } #endif inline bool DebuggerHasBreakPoints(); @@ -1010,8 +1044,6 @@ static Isolate* default_isolate_; static ThreadDataTable* thread_data_table_; - bool PreInit(); - void Deinit(); static void SetIsolateThreadLocals(Isolate* isolate, @@ -1019,7 +1051,6 @@ enum State { UNINITIALIZED, // Some components may not have been allocated. - PREINITIALIZED, // Components have been allocated but not initialized. INITIALIZED // All components are fully initialized. }; @@ -1063,6 +1094,8 @@ void PropagatePendingExceptionToExternalTryCatch(); + void InitializeDebugger(); + int stack_trace_nesting_level_; StringStream* incomplete_message_; // The preallocated memory thread singleton. @@ -1076,6 +1109,8 @@ Counters* counters_; CodeRange* code_range_; Mutex* break_access_; + Atomic32 debugger_initialized_; + Mutex* debugger_access_; Heap heap_; Logger* logger_; StackGuard stack_guard_; @@ -1165,6 +1200,7 @@ friend class Simulator; friend class StackGuard; friend class ThreadId; + friend class TestMemoryAllocatorScope; friend class v8::Isolate; friend class v8::Locker; friend class v8::Unlocker; diff -Nru libv8-3.4.14.21/src/json.js libv8-3.5.10.24/src/json.js --- libv8-3.4.14.21/src/json.js 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/json.js 2011-08-29 10:41:00.000000000 +0000 @@ -237,7 +237,7 @@ } } stack.pop(); - builder.push("]"); + builder.push("]"); } diff -Nru libv8-3.4.14.21/src/json-parser.h libv8-3.5.10.24/src/json-parser.h --- libv8-3.4.14.21/src/json-parser.h 2011-08-08 06:50:42.000000000 +0000 +++ libv8-3.5.10.24/src/json-parser.h 2011-08-10 11:27:35.000000000 +0000 @@ -166,7 +166,8 @@ template Handle JsonParser::ParseJson(Handle source) { isolate_ = source->map()->isolate(); - source_ = Handle(source->TryFlattenGetString()); + FlattenString(source); + source_ = source; source_length_ = source_->length(); // Optimized fast case where we only have ASCII characters. diff -Nru libv8-3.4.14.21/src/jsregexp.cc libv8-3.5.10.24/src/jsregexp.cc --- libv8-3.4.14.21/src/jsregexp.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/jsregexp.cc 2011-08-29 17:44:42.000000000 +0000 @@ -212,19 +212,7 @@ RegExpImpl::SetCapture(array, 1, to); } - /* template , typename PatternChar> -static int ReStringMatch(Vector sub_vector, - Vector pat_vector, - int start_index) { - - int pattern_length = pat_vector.length(); - if (pattern_length == 0) return start_index; - - int subject_length = sub_vector.length(); - if (start_index + pattern_length > subject_length) return -1; - return SearchString(sub_vector, pat_vector, start_index); -} - */ + Handle RegExpImpl::AtomExec(Handle re, Handle subject, int index, @@ -236,38 +224,41 @@ if (!subject->IsFlat()) FlattenString(subject); AssertNoAllocation no_heap_allocation; // ensure vectors stay valid - // Extract flattened substrings of cons strings before determining asciiness. - String* seq_sub = *subject; - if (seq_sub->IsConsString()) seq_sub = ConsString::cast(seq_sub)->first(); String* needle = String::cast(re->DataAt(JSRegExp::kAtomPatternIndex)); int needle_len = needle->length(); + ASSERT(needle->IsFlat()); if (needle_len != 0) { - if (index + needle_len > subject->length()) - return isolate->factory()->null_value(); + if (index + needle_len > subject->length()) { + return isolate->factory()->null_value(); + } + String::FlatContent needle_content = needle->GetFlatContent(); + String::FlatContent subject_content = subject->GetFlatContent(); + ASSERT(needle_content.IsFlat()); + ASSERT(subject_content.IsFlat()); // dispatch on type of strings - index = (needle->IsAsciiRepresentation() - ? (seq_sub->IsAsciiRepresentation() + index = (needle_content.IsAscii() + ? (subject_content.IsAscii() ? SearchString(isolate, - seq_sub->ToAsciiVector(), - needle->ToAsciiVector(), + subject_content.ToAsciiVector(), + needle_content.ToAsciiVector(), index) : SearchString(isolate, - seq_sub->ToUC16Vector(), - needle->ToAsciiVector(), + subject_content.ToUC16Vector(), + needle_content.ToAsciiVector(), index)) - : (seq_sub->IsAsciiRepresentation() + : (subject_content.IsAscii() ? SearchString(isolate, - seq_sub->ToAsciiVector(), - needle->ToUC16Vector(), + subject_content.ToAsciiVector(), + needle_content.ToUC16Vector(), index) : SearchString(isolate, - seq_sub->ToUC16Vector(), - needle->ToUC16Vector(), + subject_content.ToUC16Vector(), + needle_content.ToUC16Vector(), index))); - if (index == -1) return FACTORY->null_value(); + if (index == -1) return isolate->factory()->null_value(); } ASSERT(last_match_info->HasFastElements()); @@ -355,10 +346,7 @@ JSRegExp::Flags flags = re->GetFlags(); Handle pattern(re->Pattern()); - if (!pattern->IsFlat()) { - FlattenString(pattern); - } - + if (!pattern->IsFlat()) FlattenString(pattern); RegExpCompileData compile_data; FlatStringReader reader(isolate, pattern); if (!RegExpParser::ParseRegExp(&reader, flags.is_multiline(), @@ -442,22 +430,12 @@ int RegExpImpl::IrregexpPrepare(Handle regexp, Handle subject) { - if (!subject->IsFlat()) { - FlattenString(subject); - } + if (!subject->IsFlat()) FlattenString(subject); + // Check the asciiness of the underlying storage. - bool is_ascii; - { - AssertNoAllocation no_gc; - String* sequential_string = *subject; - if (subject->IsConsString()) { - sequential_string = ConsString::cast(*subject)->first(); - } - is_ascii = sequential_string->IsAsciiRepresentation(); - } - if (!EnsureCompiledIrregexp(regexp, is_ascii)) { - return -1; - } + bool is_ascii = subject->IsAsciiRepresentationUnderneath(); + if (!EnsureCompiledIrregexp(regexp, is_ascii)) return -1; + #ifdef V8_INTERPRETED_REGEXP // Byte-code regexp needs space allocated for all its registers. return IrregexpNumberOfRegisters(FixedArray::cast(regexp->data())); @@ -482,15 +460,11 @@ ASSERT(index <= subject->length()); ASSERT(subject->IsFlat()); - // A flat ASCII string might have a two-byte first part. - if (subject->IsConsString()) { - subject = Handle(ConsString::cast(*subject)->first(), isolate); - } + bool is_ascii = subject->IsAsciiRepresentationUnderneath(); #ifndef V8_INTERPRETED_REGEXP ASSERT(output.length() >= (IrregexpNumberOfCaptures(*irregexp) + 1) * 2); do { - bool is_ascii = subject->IsAsciiRepresentation(); EnsureCompiledIrregexp(regexp, is_ascii); Handle code(IrregexpNativeCode(*irregexp, is_ascii), isolate); NativeRegExpMacroAssembler::Result res = @@ -518,13 +492,13 @@ // being internal and external, and even between being ASCII and UC16, // but the characters are always the same). IrregexpPrepare(regexp, subject); + is_ascii = subject->IsAsciiRepresentationUnderneath(); } while (true); UNREACHABLE(); return RE_EXCEPTION; #else // V8_INTERPRETED_REGEXP ASSERT(output.length() >= IrregexpNumberOfRegisters(*irregexp)); - bool is_ascii = subject->IsAsciiRepresentation(); // We must have done EnsureCompiledIrregexp, so we can get the number of // registers. int* register_vector = output.start(); diff -Nru libv8-3.4.14.21/src/liveobjectlist.cc libv8-3.5.10.24/src/liveobjectlist.cc --- libv8-3.4.14.21/src/liveobjectlist.cc 2011-07-06 11:27:02.000000000 +0000 +++ libv8-3.5.10.24/src/liveobjectlist.cc 2011-08-17 14:33:23.000000000 +0000 @@ -36,11 +36,12 @@ #include "global-handles.h" #include "heap.h" #include "inspector.h" +#include "isolate.h" #include "list-inl.h" #include "liveobjectlist-inl.h" #include "string-stream.h" -#include "top.h" #include "v8utils.h" +#include "v8conversions.h" namespace v8 { namespace internal { @@ -109,7 +110,7 @@ \ v(Context, "meta: Context") \ v(ByteArray, "meta: ByteArray") \ - v(PixelArray, "meta: PixelArray") \ + v(ExternalPixelArray, "meta: PixelArray") \ v(ExternalArray, "meta: ExternalArray") \ v(FixedArray, "meta: FixedArray") \ v(String, "String") \ @@ -211,8 +212,9 @@ static bool InSpace(AllocationSpace space, HeapObject *heap_obj) { + Heap* heap = ISOLATE->heap(); if (space != LO_SPACE) { - return Heap::InSpace(heap_obj, space); + return heap->InSpace(heap_obj, space); } // This is an optimization to speed up the check for an object in the LO @@ -224,11 +226,11 @@ int first_space = static_cast(FIRST_SPACE); int last_space = static_cast(LO_SPACE); for (int sp = first_space; sp < last_space; sp++) { - if (Heap::InSpace(heap_obj, static_cast(sp))) { + if (heap->InSpace(heap_obj, static_cast(sp))) { return false; } } - SLOW_ASSERT(Heap::InSpace(heap_obj, LO_SPACE)); + SLOW_ASSERT(heap->InSpace(heap_obj, LO_SPACE)); return true; } @@ -285,7 +287,7 @@ void LolFilter::InitTypeFilter(Handle filter_obj) { - Handle type_sym = Factory::LookupAsciiSymbol("type"); + Handle type_sym = FACTORY->LookupAsciiSymbol("type"); MaybeObject* maybe_result = filter_obj->GetProperty(*type_sym); Object* type_obj; if (maybe_result->ToObject(&type_obj)) { @@ -301,7 +303,7 @@ void LolFilter::InitSpaceFilter(Handle filter_obj) { - Handle space_sym = Factory::LookupAsciiSymbol("space"); + Handle space_sym = FACTORY->LookupAsciiSymbol("space"); MaybeObject* maybe_result = filter_obj->GetProperty(*space_sym); Object* space_obj; if (maybe_result->ToObject(&space_obj)) { @@ -317,7 +319,7 @@ void LolFilter::InitPropertyFilter(Handle filter_obj) { - Handle prop_sym = Factory::LookupAsciiSymbol("prop"); + Handle prop_sym = FACTORY->LookupAsciiSymbol("prop"); MaybeObject* maybe_result = filter_obj->GetProperty(*prop_sym); Object* prop_obj; if (maybe_result->ToObject(&prop_obj)) { @@ -571,7 +573,9 @@ Handle detail, Handle desc, Handle error) { - detail = Factory::NewJSObject(Top::object_function()); + Isolate* isolate = Isolate::Current(); + Factory* factory = isolate->factory(); + detail = factory->NewJSObject(isolate->object_function()); if (detail->IsFailure()) { error = detail; return false; @@ -586,7 +590,7 @@ desc_str = buffer; size = obj->Size(); } - desc = Factory::NewStringFromAscii(CStrVector(desc_str)); + desc = factory->NewStringFromAscii(CStrVector(desc_str)); if (desc->IsFailure()) { error = desc; return false; @@ -663,10 +667,13 @@ int index = 0; int count = 0; + Isolate* isolate = Isolate::Current(); + Factory* factory = isolate->factory(); + // Prefetch some needed symbols. - Handle id_sym = Factory::LookupAsciiSymbol("id"); - Handle desc_sym = Factory::LookupAsciiSymbol("desc"); - Handle size_sym = Factory::LookupAsciiSymbol("size"); + Handle id_sym = factory->LookupAsciiSymbol("id"); + Handle desc_sym = factory->LookupAsciiSymbol("desc"); + Handle size_sym = factory->LookupAsciiSymbol("size"); // Fill the array with the lol object details. Handle detail; @@ -1089,7 +1096,9 @@ // Captures a current snapshot of all objects in the heap. MaybeObject* LiveObjectList::Capture() { - HandleScope scope; + Isolate* isolate = Isolate::Current(); + Factory* factory = isolate->factory(); + HandleScope scope(isolate); // Count the number of objects in the heap. int total_count = CountHeapObjects(); @@ -1139,11 +1148,11 @@ #endif } - Handle id_sym = Factory::LookupAsciiSymbol("id"); - Handle count_sym = Factory::LookupAsciiSymbol("count"); - Handle size_sym = Factory::LookupAsciiSymbol("size"); + Handle id_sym = factory->LookupAsciiSymbol("id"); + Handle count_sym = factory->LookupAsciiSymbol("count"); + Handle size_sym = factory->LookupAsciiSymbol("size"); - Handle result = Factory::NewJSObject(Top::object_function()); + Handle result = factory->NewJSObject(isolate->object_function()); if (result->IsFailure()) return Object::cast(*result); { MaybeObject* maybe_result = result->SetProperty(*id_sym, @@ -1259,7 +1268,10 @@ int start, int dump_limit, LolFilter* filter) { - HandleScope scope; + Isolate* isolate = Isolate::Current(); + Factory* factory = isolate->factory(); + + HandleScope scope(isolate); // Calculate the number of entries of the dump. int count = -1; @@ -1277,7 +1289,7 @@ } // Allocate an array to hold the result. - Handle elements_arr = Factory::NewFixedArray(dump_limit); + Handle elements_arr = factory->NewFixedArray(dump_limit); if (elements_arr->IsFailure()) return Object::cast(*elements_arr); // Fill in the dump. @@ -1292,11 +1304,11 @@ MaybeObject* maybe_result; // Allocate the result body. - Handle body = Factory::NewJSObject(Top::object_function()); + Handle body = factory->NewJSObject(isolate->object_function()); if (body->IsFailure()) return Object::cast(*body); // Set the updated body.count. - Handle count_sym = Factory::LookupAsciiSymbol("count"); + Handle count_sym = factory->LookupAsciiSymbol("count"); maybe_result = body->SetProperty(*count_sym, Smi::FromInt(count), NONE, @@ -1305,7 +1317,7 @@ // Set the updated body.size if appropriate. if (size >= 0) { - Handle size_sym = Factory::LookupAsciiSymbol("size"); + Handle size_sym = factory->LookupAsciiSymbol("size"); maybe_result = body->SetProperty(*size_sym, Smi::FromInt(size), NONE, @@ -1314,7 +1326,7 @@ } // Set body.first_index. - Handle first_sym = Factory::LookupAsciiSymbol("first_index"); + Handle first_sym = factory->LookupAsciiSymbol("first_index"); maybe_result = body->SetProperty(*first_sym, Smi::FromInt(start), NONE, @@ -1322,12 +1334,12 @@ if (maybe_result->IsFailure()) return maybe_result; // Allocate the JSArray of the elements. - Handle elements = Factory::NewJSObject(Top::array_function()); + Handle elements = factory->NewJSObject(isolate->array_function()); if (elements->IsFailure()) return Object::cast(*elements); Handle::cast(elements)->SetContent(*elements_arr); // Set body.elements. - Handle elements_sym = Factory::LookupAsciiSymbol("elements"); + Handle elements_sym = factory->LookupAsciiSymbol("elements"); maybe_result = body->SetProperty(*elements_sym, *elements, NONE, @@ -1381,6 +1393,9 @@ LiveObjectSummary summary(filter); writer->Write(&summary); + Isolate* isolate = Isolate::Current(); + Factory* factory = isolate->factory(); + // The result body will look like this: // body: { // count: , @@ -1398,21 +1413,21 @@ // } // Prefetch some needed symbols. - Handle desc_sym = Factory::LookupAsciiSymbol("desc"); - Handle count_sym = Factory::LookupAsciiSymbol("count"); - Handle size_sym = Factory::LookupAsciiSymbol("size"); - Handle summary_sym = Factory::LookupAsciiSymbol("summary"); + Handle desc_sym = factory->LookupAsciiSymbol("desc"); + Handle count_sym = factory->LookupAsciiSymbol("count"); + Handle size_sym = factory->LookupAsciiSymbol("size"); + Handle summary_sym = factory->LookupAsciiSymbol("summary"); // Allocate the summary array. int entries_count = summary.GetNumberOfEntries(); Handle summary_arr = - Factory::NewFixedArray(entries_count); + factory->NewFixedArray(entries_count); if (summary_arr->IsFailure()) return Object::cast(*summary_arr); int idx = 0; for (int i = 0; i < LiveObjectSummary::kNumberOfEntries; i++) { // Allocate the summary record. - Handle detail = Factory::NewJSObject(Top::object_function()); + Handle detail = factory->NewJSObject(isolate->object_function()); if (detail->IsFailure()) return Object::cast(*detail); // Fill in the summary record. @@ -1420,7 +1435,7 @@ int count = summary.Count(type); if (count) { const char* desc_cstr = GetObjectTypeDesc(type); - Handle desc = Factory::LookupAsciiSymbol(desc_cstr); + Handle desc = factory->LookupAsciiSymbol(desc_cstr); int size = summary.Size(type); maybe_result = detail->SetProperty(*desc_sym, @@ -1444,12 +1459,13 @@ } // Wrap the summary fixed array in a JS array. - Handle summary_obj = Factory::NewJSObject(Top::array_function()); + Handle summary_obj = + factory->NewJSObject(isolate->array_function()); if (summary_obj->IsFailure()) return Object::cast(*summary_obj); Handle::cast(summary_obj)->SetContent(*summary_arr); // Create the body object. - Handle body = Factory::NewJSObject(Top::object_function()); + Handle body = factory->NewJSObject(isolate->object_function()); if (body->IsFailure()) return Object::cast(*body); // Fill out the body object. @@ -1470,9 +1486,9 @@ if (is_tracking_roots) { int found_root = summary.found_root(); int found_weak_root = summary.found_weak_root(); - Handle root_sym = Factory::LookupAsciiSymbol("found_root"); + Handle root_sym = factory->LookupAsciiSymbol("found_root"); Handle weak_root_sym = - Factory::LookupAsciiSymbol("found_weak_root"); + factory->LookupAsciiSymbol("found_weak_root"); maybe_result = body->SetProperty(*root_sym, Smi::FromInt(found_root), NONE, @@ -1499,7 +1515,10 @@ // Note: only dumps the section starting at start_idx and only up to // dump_limit entries. MaybeObject* LiveObjectList::Info(int start_idx, int dump_limit) { - HandleScope scope; + Isolate* isolate = Isolate::Current(); + Factory* factory = isolate->factory(); + + HandleScope scope(isolate); MaybeObject* maybe_result; int total_count = LiveObjectList::list_count(); @@ -1519,13 +1538,13 @@ } // Allocate an array to hold the result. - Handle list = Factory::NewFixedArray(dump_count); + Handle list = factory->NewFixedArray(dump_count); if (list->IsFailure()) return Object::cast(*list); // Prefetch some needed symbols. - Handle id_sym = Factory::LookupAsciiSymbol("id"); - Handle count_sym = Factory::LookupAsciiSymbol("count"); - Handle size_sym = Factory::LookupAsciiSymbol("size"); + Handle id_sym = factory->LookupAsciiSymbol("id"); + Handle count_sym = factory->LookupAsciiSymbol("count"); + Handle size_sym = factory->LookupAsciiSymbol("size"); // Fill the array with the lol details. int idx = 0; @@ -1543,7 +1562,8 @@ int size; count = lol->GetTotalObjCountAndSize(&size); - Handle detail = Factory::NewJSObject(Top::object_function()); + Handle detail = + factory->NewJSObject(isolate->object_function()); if (detail->IsFailure()) return Object::cast(*detail); maybe_result = detail->SetProperty(*id_sym, @@ -1568,10 +1588,10 @@ } // Return the result as a JS array. - Handle lols = Factory::NewJSObject(Top::array_function()); + Handle lols = factory->NewJSObject(isolate->array_function()); Handle::cast(lols)->SetContent(*list); - Handle result = Factory::NewJSObject(Top::object_function()); + Handle result = factory->NewJSObject(isolate->object_function()); if (result->IsFailure()) return Object::cast(*result); maybe_result = result->SetProperty(*count_sym, @@ -1580,14 +1600,14 @@ kNonStrictMode); if (maybe_result->IsFailure()) return maybe_result; - Handle first_sym = Factory::LookupAsciiSymbol("first_index"); + Handle first_sym = factory->LookupAsciiSymbol("first_index"); maybe_result = result->SetProperty(*first_sym, Smi::FromInt(start_idx), NONE, kNonStrictMode); if (maybe_result->IsFailure()) return maybe_result; - Handle lists_sym = Factory::LookupAsciiSymbol("lists"); + Handle lists_sym = factory->LookupAsciiSymbol("lists"); maybe_result = result->SetProperty(*lists_sym, *lols, NONE, @@ -1618,7 +1638,7 @@ if (element != NULL) { return Object::cast(element->obj_); } - return Heap::undefined_value(); + return HEAP->undefined_value(); } @@ -1639,8 +1659,11 @@ SmartPointer addr_str = address->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL); + Isolate* isolate = Isolate::Current(); + // Extract the address value from the string. - int value = static_cast(StringToInt(*address, 16)); + int value = + static_cast(StringToInt(isolate->unicode_cache(), *address, 16)); Object* obj = reinterpret_cast(value); return Smi::FromInt(GetObjId(obj)); } @@ -1760,10 +1783,13 @@ Handle desc; Handle retainer; + Isolate* isolate = Isolate::Current(); + Factory* factory = isolate->factory(); + // Prefetch some needed symbols. - Handle id_sym = Factory::LookupAsciiSymbol("id"); - Handle desc_sym = Factory::LookupAsciiSymbol("desc"); - Handle size_sym = Factory::LookupAsciiSymbol("size"); + Handle id_sym = factory->LookupAsciiSymbol("id"); + Handle desc_sym = factory->LookupAsciiSymbol("desc"); + Handle size_sym = factory->LookupAsciiSymbol("size"); NoHandleAllocation ha; int count = 0; @@ -1774,7 +1800,7 @@ // Iterate roots. LolVisitor lol_visitor(*target, target); - Heap::IterateStrongRoots(&lol_visitor, VISIT_ALL); + isolate->heap()->IterateStrongRoots(&lol_visitor, VISIT_ALL); if (!AddRootRetainerIfFound(lol_visitor, filter, summary, @@ -1794,7 +1820,7 @@ } lol_visitor.reset(); - Heap::IterateWeakRoots(&lol_visitor, VISIT_ALL); + isolate->heap()->IterateWeakRoots(&lol_visitor, VISIT_ALL); if (!AddRootRetainerIfFound(lol_visitor, filter, summary, @@ -1903,11 +1929,15 @@ int start, int dump_limit, Handle filter_obj) { - HandleScope scope; + Isolate* isolate = Isolate::Current(); + Factory* factory = isolate->factory(); + Heap* heap = isolate->heap(); + + HandleScope scope(isolate); // Get the target object. HeapObject* heap_obj = HeapObject::cast(GetObj(obj_id)); - if (heap_obj == Heap::undefined_value()) { + if (heap_obj == heap->undefined_value()) { return heap_obj; } @@ -1915,7 +1945,7 @@ // Get the constructor function for context extension and arguments array. JSObject* arguments_boilerplate = - Top::context()->global_context()->arguments_boilerplate(); + isolate->context()->global_context()->arguments_boilerplate(); JSFunction* arguments_function = JSFunction::cast(arguments_boilerplate->map()->constructor()); @@ -1937,7 +1967,7 @@ // Set body.id. Handle body = Handle(JSObject::cast(body_obj)); - Handle id_sym = Factory::LookupAsciiSymbol("id"); + Handle id_sym = factory->LookupAsciiSymbol("id"); maybe_result = body->SetProperty(*id_sym, Smi::FromInt(obj_id), NONE, @@ -1952,13 +1982,17 @@ Object* LiveObjectList::PrintObj(int obj_id) { Object* obj = GetObj(obj_id); if (!obj) { - return Heap::undefined_value(); + return HEAP->undefined_value(); } EmbeddedVector temp_filename; static int temp_count = 0; const char* path_prefix = "."; + Isolate* isolate = Isolate::Current(); + Factory* factory = isolate->factory(); + Heap* heap = isolate->heap(); + if (FLAG_lol_workdir) { path_prefix = FLAG_lol_workdir; } @@ -1987,13 +2021,13 @@ if (resource->exists() && !resource->is_empty()) { ASSERT(resource->IsAscii()); Handle dump_string = - Factory::NewExternalStringFromAscii(resource); - ExternalStringTable::AddString(*dump_string); + factory->NewExternalStringFromAscii(resource); + heap->external_string_table()->AddString(*dump_string); return *dump_string; } else { delete resource; } - return Heap::undefined_value(); + return HEAP->undefined_value(); } @@ -2081,6 +2115,10 @@ FILE* f = OS::FOpen(temp_filename.start(), "w+"); + Isolate* isolate = Isolate::Current(); + Factory* factory = isolate->factory(); + Heap* heap = isolate->heap(); + // Save the previous verbosity. bool prev_verbosity = FLAG_use_verbose_printer; FLAG_use_verbose_printer = false; @@ -2096,15 +2134,14 @@ // Check for ObjectGroups that references this object. // TODO(mlam): refactor this to be more modular. { - List* groups = GlobalHandles::ObjectGroups(); + List* groups = isolate->global_handles()->object_groups(); for (int i = 0; i < groups->length(); i++) { ObjectGroup* group = groups->at(i); if (group == NULL) continue; bool found_group = false; - List& objects = group->objects_; - for (int j = 0; j < objects.length(); j++) { - Object* object = *objects[j]; + for (size_t j = 0; j < group->length_; j++) { + Object* object = *(group->objects_[j]); HeapObject* hobj = HeapObject::cast(object); if (obj2 == hobj) { found_group = true; @@ -2117,8 +2154,8 @@ "obj %p is a member of object group %p {\n", reinterpret_cast(obj2), reinterpret_cast(group)); - for (int j = 0; j < objects.length(); j++) { - Object* object = *objects[j]; + for (size_t j = 0; j < group->length_; j++) { + Object* object = *(group->objects_[j]); if (!object->IsHeapObject()) continue; HeapObject* hobj = HeapObject::cast(object); @@ -2143,12 +2180,12 @@ } PrintF(f, "path from roots to obj %p\n", reinterpret_cast(obj2)); - Heap::IterateRoots(&tracer, VISIT_ONLY_STRONG); + heap->IterateRoots(&tracer, VISIT_ONLY_STRONG); found = tracer.found(); if (!found) { PrintF(f, " No paths found. Checking symbol tables ...\n"); - SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table(); + SymbolTable* symbol_table = HEAP->raw_unchecked_symbol_table(); tracer.VisitPointers(reinterpret_cast(&symbol_table), reinterpret_cast(&symbol_table)+1); found = tracer.found(); @@ -2161,7 +2198,7 @@ if (!found) { PrintF(f, " No paths found. Checking weak roots ...\n"); // Check weak refs next. - GlobalHandles::IterateWeakRoots(&tracer); + isolate->global_handles()->IterateWeakRoots(&tracer); found = tracer.found(); } @@ -2191,13 +2228,13 @@ if (resource->exists() && !resource->is_empty()) { ASSERT(resource->IsAscii()); Handle path_string = - Factory::NewExternalStringFromAscii(resource); - ExternalStringTable::AddString(*path_string); + factory->NewExternalStringFromAscii(resource); + heap->external_string_table()->AddString(*path_string); return *path_string; } else { delete resource; } - return Heap::undefined_value(); + return heap->undefined_value(); } @@ -2210,13 +2247,13 @@ HeapObject* obj1 = NULL; if (obj_id1 != 0) { obj1 = HeapObject::cast(GetObj(obj_id1)); - if (obj1 == Heap::undefined_value()) { + if (obj1 == HEAP->undefined_value()) { return obj1; } } HeapObject* obj2 = HeapObject::cast(GetObj(obj_id2)); - if (obj2 == Heap::undefined_value()) { + if (obj2 == HEAP->undefined_value()) { return obj2; } @@ -2570,12 +2607,13 @@ void LiveObjectList::VerifyNotInFromSpace() { OS::Print("VerifyNotInFromSpace() ...\n"); LolIterator it(NULL, last()); + Heap* heap = ISOLATE->heap(); int i = 0; for (it.Init(); !it.Done(); it.Next()) { HeapObject* heap_obj = it.Obj(); - if (Heap::InFromSpace(heap_obj)) { + if (heap->InFromSpace(heap_obj)) { OS::Print(" ERROR: VerifyNotInFromSpace: [%d] obj %p in From space %p\n", - i++, heap_obj, Heap::new_space()->FromSpaceLow()); + i++, heap_obj, heap->new_space()->FromSpaceLow()); } } } diff -Nru libv8-3.4.14.21/src/liveobjectlist.h libv8-3.5.10.24/src/liveobjectlist.h --- libv8-3.4.14.21/src/liveobjectlist.h 2011-03-23 11:19:56.000000000 +0000 +++ libv8-3.5.10.24/src/liveobjectlist.h 2011-08-17 14:33:23.000000000 +0000 @@ -237,10 +237,10 @@ // to live new space objects, and not actually keep them alive. void UpdatePointer(Object** p) { Object* object = *p; - if (!Heap::InNewSpace(object)) return; + if (!HEAP->InNewSpace(object)) return; HeapObject* heap_obj = HeapObject::cast(object); - ASSERT(Heap::InFromSpace(heap_obj)); + ASSERT(HEAP->InFromSpace(heap_obj)); // We use the first word (where the map pointer usually is) of a heap // object to record the forwarding pointer. A forwarding pointer can diff -Nru libv8-3.4.14.21/src/log.cc libv8-3.5.10.24/src/log.cc --- libv8-3.4.14.21/src/log.cc 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/log.cc 2011-07-25 11:05:35.000000000 +0000 @@ -1400,6 +1400,7 @@ case Code::UNARY_OP_IC: // fall through case Code::BINARY_OP_IC: // fall through case Code::COMPARE_IC: // fall through + case Code::TO_BOOLEAN_IC: // fall through case Code::STUB: description = CodeStub::MajorName(CodeStub::GetMajorKey(code_object), true); diff -Nru libv8-3.4.14.21/src/log-utils.cc libv8-3.5.10.24/src/log-utils.cc --- libv8-3.4.14.21/src/log-utils.cc 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/log-utils.cc 2011-08-10 11:27:35.000000000 +0000 @@ -34,7 +34,7 @@ namespace internal { -const char* Log::kLogToTemporaryFile = "&"; +const char* const Log::kLogToTemporaryFile = "&"; Log::Log(Logger* logger) @@ -86,8 +86,6 @@ if (open_log_file) { if (strcmp(FLAG_logfile, "-") == 0) { OpenStdout(); - } else if (strcmp(FLAG_logfile, "*") == 0) { - // Does nothing for now. Will be removed. } else if (strcmp(FLAG_logfile, kLogToTemporaryFile) == 0) { OpenTemporaryFile(); } else { diff -Nru libv8-3.4.14.21/src/log-utils.h libv8-3.5.10.24/src/log-utils.h --- libv8-3.4.14.21/src/log-utils.h 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/log-utils.h 2011-08-10 11:27:35.000000000 +0000 @@ -59,7 +59,7 @@ // This mode is only used in tests, as temporary files are automatically // deleted on close and thus can't be accessed afterwards. - static const char* kLogToTemporaryFile; + static const char* const kLogToTemporaryFile; private: explicit Log(Logger* logger); diff -Nru libv8-3.4.14.21/src/macros.py libv8-3.5.10.24/src/macros.py --- libv8-3.4.14.21/src/macros.py 2011-06-27 13:21:41.000000000 +0000 +++ libv8-3.5.10.24/src/macros.py 2011-07-28 09:15:17.000000000 +0000 @@ -44,7 +44,7 @@ const kApiConstructorOffset = 2; const kApiPrototypeTemplateOffset = 5; const kApiParentTemplateOffset = 6; -const kApiPrototypeAttributesOffset = 15; +const kApiFlagOffset = 14; const NO_HINT = 0; const NUMBER_HINT = 1; @@ -65,6 +65,7 @@ # For apinatives.js const kUninitialized = -1; +const kReadOnlyPrototypeBit = 3; # For FunctionTemplateInfo, matches objects.h # Note: kDayZeroInJulianDay = ToJulianDay(1970, 0, 1). const kInvalidDate = 'Invalid Date'; diff -Nru libv8-3.4.14.21/src/mark-compact.cc libv8-3.5.10.24/src/mark-compact.cc --- libv8-3.4.14.21/src/mark-compact.cc 2011-07-13 14:21:21.000000000 +0000 +++ libv8-3.5.10.24/src/mark-compact.cc 2011-08-29 10:41:00.000000000 +0000 @@ -64,13 +64,15 @@ live_bytes_(0), #endif heap_(NULL), - code_flusher_(NULL) { } + code_flusher_(NULL), + encountered_weak_maps_(NULL) { } void MarkCompactCollector::CollectGarbage() { // Make sure that Prepare() has been called. The individual steps below will // update the state as they proceed. ASSERT(state_ == PREPARE_GC); + ASSERT(encountered_weak_maps_ == Smi::FromInt(0)); // Prepare has selected whether to compact the old generation or not. // Tell the tracer. @@ -80,6 +82,8 @@ if (FLAG_collect_maps) ClearNonLiveTransitions(); + ClearWeakMaps(); + SweepLargeObjectSpace(); if (IsCompacting()) { @@ -390,6 +394,10 @@ ConsString::BodyDescriptor, void>::Visit); + table_.Register(kVisitSlicedString, + &FixedBodyVisitor::Visit); table_.Register(kVisitFixedArray, &FlexibleBodyVisitor StructObjectVisitor; + static void VisitJSWeakMap(Map* map, HeapObject* object) { + MarkCompactCollector* collector = map->heap()->mark_compact_collector(); + JSWeakMap* weak_map = reinterpret_cast(object); + + // Enqueue weak map in linked list of encountered weak maps. + ASSERT(weak_map->next() == Smi::FromInt(0)); + weak_map->set_next(collector->encountered_weak_maps()); + collector->set_encountered_weak_maps(weak_map); + + // Skip visiting the backing hash table containing the mappings. + int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object); + BodyVisitorBase::IteratePointers( + map->heap(), + object, + JSWeakMap::BodyDescriptor::kStartOffset, + JSWeakMap::kTableOffset); + BodyVisitorBase::IteratePointers( + map->heap(), + object, + JSWeakMap::kTableOffset + kPointerSize, + object_size); + + // Mark the backing hash table without pushing it on the marking stack. + ASSERT(!weak_map->unchecked_table()->IsMarked()); + ASSERT(weak_map->unchecked_table()->map()->IsMarked()); + collector->SetMark(weak_map->unchecked_table()); + } + static void VisitCode(Map* map, HeapObject* object) { reinterpret_cast(object)->CodeIterateBody( map->heap()); @@ -1369,20 +1407,26 @@ // marking stack have been marked, or are overflowed in the heap. void MarkCompactCollector::EmptyMarkingStack() { while (!marking_stack_.is_empty()) { - HeapObject* object = marking_stack_.Pop(); - ASSERT(object->IsHeapObject()); - ASSERT(heap()->Contains(object)); - ASSERT(object->IsMarked()); - ASSERT(!object->IsOverflowed()); - - // Because the object is marked, we have to recover the original map - // pointer and use it to mark the object's body. - MapWord map_word = object->map_word(); - map_word.ClearMark(); - Map* map = map_word.ToMap(); - MarkObject(map); - - StaticMarkingVisitor::IterateBody(map, object); + while (!marking_stack_.is_empty()) { + HeapObject* object = marking_stack_.Pop(); + ASSERT(object->IsHeapObject()); + ASSERT(heap()->Contains(object)); + ASSERT(object->IsMarked()); + ASSERT(!object->IsOverflowed()); + + // Because the object is marked, we have to recover the original map + // pointer and use it to mark the object's body. + MapWord map_word = object->map_word(); + map_word.ClearMark(); + Map* map = map_word.ToMap(); + MarkObject(map); + + StaticMarkingVisitor::IterateBody(map, object); + } + + // Process encountered weak maps, mark objects only reachable by those + // weak maps and repeat until fix-point is reached. + ProcessWeakMaps(); } } @@ -1735,6 +1779,45 @@ } } + +void MarkCompactCollector::ProcessWeakMaps() { + Object* weak_map_obj = encountered_weak_maps(); + while (weak_map_obj != Smi::FromInt(0)) { + ASSERT(HeapObject::cast(weak_map_obj)->IsMarked()); + JSWeakMap* weak_map = reinterpret_cast(weak_map_obj); + ObjectHashTable* table = weak_map->unchecked_table(); + for (int i = 0; i < table->Capacity(); i++) { + if (HeapObject::cast(table->KeyAt(i))->IsMarked()) { + Object* value = table->get(table->EntryToValueIndex(i)); + StaticMarkingVisitor::MarkObjectByPointer(heap(), &value); + table->set_unchecked(heap(), + table->EntryToValueIndex(i), + value, + UPDATE_WRITE_BARRIER); + } + } + weak_map_obj = weak_map->next(); + } +} + + +void MarkCompactCollector::ClearWeakMaps() { + Object* weak_map_obj = encountered_weak_maps(); + while (weak_map_obj != Smi::FromInt(0)) { + ASSERT(HeapObject::cast(weak_map_obj)->IsMarked()); + JSWeakMap* weak_map = reinterpret_cast(weak_map_obj); + ObjectHashTable* table = weak_map->unchecked_table(); + for (int i = 0; i < table->Capacity(); i++) { + if (!HeapObject::cast(table->KeyAt(i))->IsMarked()) { + table->RemoveEntry(i, heap()); + } + } + weak_map_obj = weak_map->next(); + weak_map->set_next(Smi::FromInt(0)); + } + set_encountered_weak_maps(Smi::FromInt(0)); +} + // ------------------------------------------------------------------------- // Phase 2: Encode forwarding addresses. // When compacting, forwarding addresses for objects in old space and map diff -Nru libv8-3.4.14.21/src/mark-compact.h libv8-3.5.10.24/src/mark-compact.h --- libv8-3.4.14.21/src/mark-compact.h 2011-07-04 14:01:31.000000000 +0000 +++ libv8-3.5.10.24/src/mark-compact.h 2011-08-10 11:27:35.000000000 +0000 @@ -193,6 +193,11 @@ inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; } void EnableCodeFlushing(bool enable); + inline Object* encountered_weak_maps() { return encountered_weak_maps_; } + inline void set_encountered_weak_maps(Object* weak_map) { + encountered_weak_maps_ = weak_map; + } + private: MarkCompactCollector(); ~MarkCompactCollector(); @@ -329,6 +334,16 @@ // We replace them with a null descriptor, with the same key. void ClearNonLiveTransitions(); + // Mark all values associated with reachable keys in weak maps encountered + // so far. This might push new object or even new weak maps onto the + // marking stack. + void ProcessWeakMaps(); + + // After all reachable objects have been marked those weak map entries + // with an unreachable key are removed from all encountered weak maps. + // The linked list of all encountered weak maps is destroyed. + void ClearWeakMaps(); + // ----------------------------------------------------------------------- // Phase 2: Sweeping to clear mark bits and free non-live objects for // a non-compacting collection, or else computing and encoding @@ -499,6 +514,7 @@ Heap* heap_; MarkingStack marking_stack_; CodeFlusher* code_flusher_; + Object* encountered_weak_maps_; friend class Heap; friend class OverflowedObjectsScanner; diff -Nru libv8-3.4.14.21/src/messages.js libv8-3.5.10.24/src/messages.js --- libv8-3.4.14.21/src/messages.js 2011-09-12 11:26:01.000000000 +0000 +++ libv8-3.5.10.24/src/messages.js 2011-09-12 11:24:28.000000000 +0000 @@ -198,17 +198,19 @@ non_extensible_proto: ["%0", " is not extensible"], handler_non_object: ["Proxy.", "%0", " called with non-object as handler"], handler_trap_missing: ["Proxy handler ", "%0", " has no '", "%1", "' trap"], + handler_trap_must_be_callable: ["Proxy handler ", "%0", " has non-callable '", "%1", "' trap"], handler_returned_false: ["Proxy handler ", "%0", " returned false for '", "%1", "' trap"], handler_returned_undefined: ["Proxy handler ", "%0", " returned undefined for '", "%1", "' trap"], proxy_prop_not_configurable: ["Trap ", "%1", " of proxy handler ", "%0", " returned non-configurable descriptor for property ", "%2"], proxy_non_object_prop_names: ["Trap ", "%1", " returned non-object ", "%0"], proxy_repeated_prop_name: ["Trap ", "%1", " returned repeated property name ", "%2"], + invalid_weakmap_key: ["Invalid value used as weak map key"], // RangeError invalid_array_length: ["Invalid array length"], stack_overflow: ["Maximum call stack size exceeded"], // SyntaxError unable_to_parse: ["Parse error"], - duplicate_regexp_flag: ["Duplicate RegExp flag ", "%0"], + invalid_regexp_flags: ["Invalid flags supplied to RegExp constructor '", "%0", "'"], invalid_regexp: ["Invalid RegExp pattern /", "%0", "/"], illegal_break: ["Illegal break statement"], illegal_continue: ["Illegal continue statement"], @@ -248,8 +250,9 @@ strict_cannot_assign: ["Cannot assign to read only '", "%0", "' in strict mode"], strict_poison_pill: ["'caller', 'callee', and 'arguments' properties may not be accessed on strict mode functions or the arguments objects for calls to them"], strict_caller: ["Illegal access to a strict mode caller function."], + unprotected_let: ["Illegal let declaration in unprotected statement context."], cant_prevent_ext_external_array_elements: ["Cannot prevent extension of an object with external array elements"], - redef_external_array_element: ["Cannot redefine a property of an object"] + redef_external_array_element: ["Cannot redefine a property of an object with external array elements"], }; } var message_type = %MessageGetType(message); diff -Nru libv8-3.4.14.21/src/mips/assembler-mips.cc libv8-3.5.10.24/src/mips/assembler-mips.cc --- libv8-3.4.14.21/src/mips/assembler-mips.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/mips/assembler-mips.cc 2011-08-31 09:03:56.000000000 +0000 @@ -780,10 +780,10 @@ void Assembler::next(Label* L) { ASSERT(L->is_linked()); int link = target_at(L->pos()); - ASSERT(link > 0 || link == kEndOfChain); if (link == kEndOfChain) { L->Unuse(); - } else if (link > 0) { + } else { + ASSERT(link >= 0); L->link_to(link); } } diff -Nru libv8-3.4.14.21/src/mips/assembler-mips.h libv8-3.5.10.24/src/mips/assembler-mips.h --- libv8-3.4.14.21/src/mips/assembler-mips.h 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/mips/assembler-mips.h 2011-08-29 10:41:00.000000000 +0000 @@ -127,38 +127,38 @@ const Register no_reg = { -1 }; -const Register zero_reg = { 0 }; -const Register at = { 1 }; -const Register v0 = { 2 }; -const Register v1 = { 3 }; -const Register a0 = { 4 }; +const Register zero_reg = { 0 }; // Always zero. +const Register at = { 1 }; // at: Reserved for synthetic instructions. +const Register v0 = { 2 }; // v0, v1: Used when returning multiple values +const Register v1 = { 3 }; // from subroutines. +const Register a0 = { 4 }; // a0 - a4: Used to pass non-FP parameters. const Register a1 = { 5 }; const Register a2 = { 6 }; const Register a3 = { 7 }; -const Register t0 = { 8 }; -const Register t1 = { 9 }; -const Register t2 = { 10 }; +const Register t0 = { 8 }; // t0 - t9: Can be used without reservation, act +const Register t1 = { 9 }; // as temporary registers and are allowed to +const Register t2 = { 10 }; // be destroyed by subroutines. const Register t3 = { 11 }; const Register t4 = { 12 }; const Register t5 = { 13 }; const Register t6 = { 14 }; const Register t7 = { 15 }; -const Register s0 = { 16 }; -const Register s1 = { 17 }; -const Register s2 = { 18 }; -const Register s3 = { 19 }; -const Register s4 = { 20 }; +const Register s0 = { 16 }; // s0 - s7: Subroutine register variables. +const Register s1 = { 17 }; // Subroutines that write to these registers +const Register s2 = { 18 }; // must restore their values before exiting so +const Register s3 = { 19 }; // that the caller can expect the values to be +const Register s4 = { 20 }; // preserved. const Register s5 = { 21 }; const Register s6 = { 22 }; const Register s7 = { 23 }; const Register t8 = { 24 }; const Register t9 = { 25 }; -const Register k0 = { 26 }; -const Register k1 = { 27 }; -const Register gp = { 28 }; -const Register sp = { 29 }; -const Register s8_fp = { 30 }; -const Register ra = { 31 }; +const Register k0 = { 26 }; // k0, k1: Reserved for system calls and +const Register k1 = { 27 }; // interrupt handlers. +const Register gp = { 28 }; // gp: Reserved. +const Register sp = { 29 }; // sp: Stack pointer. +const Register s8_fp = { 30 }; // fp: Frame pointer. +const Register ra = { 31 }; // ra: Return address pointer. int ToNumber(Register reg); diff -Nru libv8-3.4.14.21/src/mips/code-stubs-mips.cc libv8-3.5.10.24/src/mips/code-stubs-mips.cc --- libv8-3.4.14.21/src/mips/code-stubs-mips.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/mips/code-stubs-mips.cc 2011-08-31 09:03:56.000000000 +0000 @@ -2506,7 +2506,7 @@ CpuFeatures::Scope scope(FPU); __ mtc1(a2, f0); if (op_ == Token::SHR) { - __ Cvt_d_uw(f0, f0); + __ Cvt_d_uw(f0, f0, f22); } else { __ cvt_d_w(f0, f0); } @@ -2920,7 +2920,7 @@ } else { // The result must be interpreted as an unsigned 32-bit integer. __ mtc1(a2, double_scratch); - __ Cvt_d_uw(double_scratch, double_scratch); + __ Cvt_d_uw(double_scratch, double_scratch, single_scratch); } // Store the result. @@ -3693,10 +3693,10 @@ // args // Save callee saved registers on the stack. - __ MultiPush((kCalleeSaved | ra.bit()) & ~sp.bit()); + __ MultiPush(kCalleeSaved | ra.bit()); // Load argv in s0 register. - __ lw(s0, MemOperand(sp, kNumCalleeSaved * kPointerSize + + __ lw(s0, MemOperand(sp, (kNumCalleeSaved + 1) * kPointerSize + StandardFrameConstants::kCArgsSlotsSize)); // We build an EntryFrame. @@ -3830,7 +3830,7 @@ __ addiu(sp, sp, -EntryFrameConstants::kCallerFPOffset); // Restore callee saved registers from the stack. - __ MultiPop((kCalleeSaved | ra.bit()) & ~sp.bit()); + __ MultiPop(kCalleeSaved | ra.bit()); // Return. __ Jump(ra); } @@ -4517,6 +4517,9 @@ __ Addu(a2, a2, Operand(RegExpImpl::kLastMatchOverhead)); __ sra(at, a0, kSmiTagSize); // Untag length for comparison. __ Branch(&runtime, gt, a2, Operand(at)); + + // Reset offset for possibly sliced string. + __ mov(t0, zero_reg); // subject: Subject string // regexp_data: RegExp data (FixedArray) // Check the representation and encoding of the subject string. @@ -4524,29 +4527,41 @@ __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); // First check for flat string. - __ And(at, a0, Operand(kIsNotStringMask | kStringRepresentationMask)); + __ And(a1, a0, Operand(kIsNotStringMask | kStringRepresentationMask)); STATIC_ASSERT((kStringTag | kSeqStringTag) == 0); - __ Branch(&seq_string, eq, at, Operand(zero_reg)); + __ Branch(&seq_string, eq, a1, Operand(zero_reg)); // subject: Subject string // a0: instance type if Subject string // regexp_data: RegExp data (FixedArray) - // Check for flat cons string. + // Check for flat cons string or sliced string. // A flat cons string is a cons string where the second part is the empty // string. In that case the subject string is just the first part of the cons // string. Also in this case the first part of the cons string is known to be // a sequential string or an external string. - STATIC_ASSERT(kExternalStringTag != 0); - STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0); - __ And(at, a0, Operand(kIsNotStringMask | kExternalStringTag)); - __ Branch(&runtime, ne, at, Operand(zero_reg)); + // In the case of a sliced string its offset has to be taken into account. + Label cons_string, check_encoding; + STATIC_ASSERT(kConsStringTag < kExternalStringTag); + STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); + __ Branch(&cons_string, lt, a1, Operand(kExternalStringTag)); + __ Branch(&runtime, eq, a1, Operand(kExternalStringTag)); + + // String is sliced. + __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); + __ sra(t0, t0, kSmiTagSize); + __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); + // t5: offset of sliced string, smi-tagged. + __ jmp(&check_encoding); + // String is a cons string, check whether it is flat. + __ bind(&cons_string); __ lw(a0, FieldMemOperand(subject, ConsString::kSecondOffset)); __ LoadRoot(a1, Heap::kEmptyStringRootIndex); __ Branch(&runtime, ne, a0, Operand(a1)); __ lw(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); + // Is first part of cons or parent of slice a flat string? + __ bind(&check_encoding); __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); - // Is first part a flat string? STATIC_ASSERT(kSeqStringTag == 0); __ And(at, a0, Operand(kStringRepresentationMask)); __ Branch(&runtime, ne, at, Operand(zero_reg)); @@ -4562,8 +4577,8 @@ __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for ascii. __ lw(t9, FieldMemOperand(regexp_data, JSRegExp::kDataAsciiCodeOffset)); __ sra(a3, a0, 2); // a3 is 1 for ascii, 0 for UC16 (usyed below). - __ lw(t0, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset)); - __ movz(t9, t0, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset. + __ lw(t1, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset)); + __ movz(t9, t1, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset. // Check that the irregexp code has been generated for the actual string // encoding. If it has, the field contains a code object otherwise it contains @@ -4630,23 +4645,32 @@ // For arguments 4 and 3 get string length, calculate start of string data // and calculate the shift of the index (0 for ASCII and 1 for two byte). - __ lw(a0, FieldMemOperand(subject, String::kLengthOffset)); - __ sra(a0, a0, kSmiTagSize); STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize); - __ Addu(t0, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); + __ Addu(t2, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); __ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte. - // Argument 4 (a3): End of string data - // Argument 3 (a2): Start of string data + // Load the length from the original subject string from the previous stack + // frame. Therefore we have to use fp, which points exactly to two pointer + // sizes below the previous sp. (Because creating a new stack frame pushes + // the previous fp onto the stack and moves up sp by 2 * kPointerSize.) + __ lw(a0, MemOperand(fp, kSubjectOffset + 2 * kPointerSize)); + // If slice offset is not 0, load the length from the original sliced string. + // Argument 4, a3: End of string data + // Argument 3, a2: Start of string data + // Prepare start and end index of the input. + __ sllv(t1, t0, a3); + __ addu(t0, t2, t1); __ sllv(t1, a1, a3); __ addu(a2, t0, t1); - __ sllv(t1, a0, a3); - __ addu(a3, t0, t1); + __ lw(t2, FieldMemOperand(a0, String::kLengthOffset)); + __ sra(t2, t2, kSmiTagSize); + __ sllv(t1, t2, a3); + __ addu(a3, t0, t1); // Argument 2 (a1): Previous index. // Already there // Argument 1 (a0): Subject string. - __ mov(a0, subject); + // Already there // Locate the code entry and call it. __ Addu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag)); @@ -4663,11 +4687,14 @@ // Check the result. Label success; - __ Branch(&success, eq, v0, Operand(NativeRegExpMacroAssembler::SUCCESS)); + __ Branch(&success, eq, + subject, Operand(NativeRegExpMacroAssembler::SUCCESS)); Label failure; - __ Branch(&failure, eq, v0, Operand(NativeRegExpMacroAssembler::FAILURE)); + __ Branch(&failure, eq, + subject, Operand(NativeRegExpMacroAssembler::FAILURE)); // If not exception it can only be retry. Handle that in the runtime system. - __ Branch(&runtime, ne, v0, Operand(NativeRegExpMacroAssembler::EXCEPTION)); + __ Branch(&runtime, ne, + subject, Operand(NativeRegExpMacroAssembler::EXCEPTION)); // Result must now be exception. If there is no pending exception already a // stack overflow (on the backtrack stack) was detected in RegExp code but // haven't created the exception yet. Handle that in the runtime system. @@ -4678,16 +4705,16 @@ __ li(a2, Operand(ExternalReference(Isolate::k_pending_exception_address, masm->isolate()))); __ lw(v0, MemOperand(a2, 0)); - __ Branch(&runtime, eq, v0, Operand(a1)); + __ Branch(&runtime, eq, subject, Operand(a1)); __ sw(a1, MemOperand(a2, 0)); // Clear pending exception. // Check if the exception is a termination. If so, throw as uncatchable. __ LoadRoot(a0, Heap::kTerminationExceptionRootIndex); Label termination_exception; - __ Branch(&termination_exception, eq, v0, Operand(a0)); + __ Branch(&termination_exception, eq, subject, Operand(a0)); - __ Throw(a0); // Expects thrown value in v0. + __ Throw(subject); // Expects thrown value in v0. __ bind(&termination_exception); __ ThrowUncatchable(TERMINATION, v0); // Expects thrown value in v0. @@ -4963,6 +4990,7 @@ Label flat_string; Label ascii_string; Label got_char_code; + Label sliced_string; ASSERT(!t0.is(scratch_)); ASSERT(!t0.is(index_)); @@ -4996,23 +5024,37 @@ __ Branch(&flat_string, eq, t0, Operand(zero_reg)); // Handle non-flat strings. - __ And(t0, result_, Operand(kIsConsStringMask)); - __ Branch(&call_runtime_, eq, t0, Operand(zero_reg)); + __ And(result_, result_, Operand(kStringRepresentationMask)); + STATIC_ASSERT(kConsStringTag < kExternalStringTag); + STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); + __ Branch(&sliced_string, gt, result_, Operand(kExternalStringTag)); + __ Branch(&call_runtime_, eq, result_, Operand(kExternalStringTag)); // ConsString. // Check whether the right hand side is the empty string (i.e. if // this is really a flat string in a cons string). If that is not // the case we would rather go to the runtime system now to flatten // the string. + Label assure_seq_string; __ lw(result_, FieldMemOperand(object_, ConsString::kSecondOffset)); __ LoadRoot(t0, Heap::kEmptyStringRootIndex); __ Branch(&call_runtime_, ne, result_, Operand(t0)); // Get the first of the two strings and load its instance type. __ lw(object_, FieldMemOperand(object_, ConsString::kFirstOffset)); + __ jmp(&assure_seq_string); + + // SlicedString, unpack and add offset. + __ bind(&sliced_string); + __ lw(result_, FieldMemOperand(object_, SlicedString::kOffsetOffset)); + __ addu(scratch_, scratch_, result_); + __ lw(object_, FieldMemOperand(object_, SlicedString::kParentOffset)); + + // Assure that we are dealing with a sequential string. Go to runtime if not. + __ bind(&assure_seq_string); __ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); - // If the first cons component is also non-flat, then go to runtime. + // Check that parent is not an external string. Go to runtime otherwise. STATIC_ASSERT(kSeqStringTag == 0); __ And(t0, result_, Operand(kStringRepresentationMask)); @@ -5583,6 +5625,11 @@ Register to = t2; Register from = t3; + if (FLAG_string_slices) { + __ nop(); // Jumping as first instruction would crash the code generation. + __ jmp(&sub_string_runtime); + } + // Check bounds and smi-ness. __ lw(to, MemOperand(sp, kToOffset)); __ lw(from, MemOperand(sp, kFromOffset)); diff -Nru libv8-3.4.14.21/src/mips/deoptimizer-mips.cc libv8-3.5.10.24/src/mips/deoptimizer-mips.cc --- libv8-3.4.14.21/src/mips/deoptimizer-mips.cc 2011-07-04 14:01:31.000000000 +0000 +++ libv8-3.5.10.24/src/mips/deoptimizer-mips.cc 2011-08-10 11:27:35.000000000 +0000 @@ -39,7 +39,7 @@ namespace internal { -int Deoptimizer::table_entry_size_ = 10; +const int Deoptimizer::table_entry_size_ = 10; int Deoptimizer::patch_size() { diff -Nru libv8-3.4.14.21/src/mips/frames-mips.h libv8-3.5.10.24/src/mips/frames-mips.h --- libv8-3.4.14.21/src/mips/frames-mips.h 2011-05-25 07:58:50.000000000 +0000 +++ libv8-3.5.10.24/src/mips/frames-mips.h 2011-08-29 10:41:00.000000000 +0000 @@ -59,10 +59,10 @@ // Saved temporaries. 1 << 16 | 1 << 17 | 1 << 18 | 1 << 19 | 1 << 20 | 1 << 21 | 1 << 22 | 1 << 23 | - // gp, sp, fp. - 1 << 28 | 1 << 29 | 1 << 30; + // fp. + 1 << 30; -static const int kNumCalleeSaved = 11; +static const int kNumCalleeSaved = 9; // Number of registers for which space is reserved in safepoints. Must be a @@ -121,10 +121,11 @@ class StackHandlerConstants : public AllStatic { public: - static const int kNextOffset = 0 * kPointerSize; - static const int kStateOffset = 1 * kPointerSize; - static const int kFPOffset = 2 * kPointerSize; - static const int kPCOffset = 3 * kPointerSize; + static const int kNextOffset = 0 * kPointerSize; + static const int kStateOffset = 1 * kPointerSize; + static const int kContextOffset = 2 * kPointerSize; + static const int kFPOffset = 3 * kPointerSize; + static const int kPCOffset = 4 * kPointerSize; static const int kSize = kPCOffset + kPointerSize; }; diff -Nru libv8-3.4.14.21/src/mips/full-codegen-mips.cc libv8-3.5.10.24/src/mips/full-codegen-mips.cc --- libv8-3.4.14.21/src/mips/full-codegen-mips.cc 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/mips/full-codegen-mips.cc 2011-10-18 08:10:33.000000000 +0000 @@ -55,7 +55,6 @@ static unsigned GetPropertyId(Property* property) { - if (property->is_synthetic()) return AstNode::kNoNumber; return property->id(); } @@ -697,109 +696,77 @@ Comment cmnt(masm_, "[ Declaration"); ASSERT(variable != NULL); // Must have been resolved. Slot* slot = variable->AsSlot(); - Property* prop = variable->AsProperty(); - - if (slot != NULL) { - switch (slot->type()) { - case Slot::PARAMETER: - case Slot::LOCAL: - if (mode == Variable::CONST) { - __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); - __ sw(t0, MemOperand(fp, SlotOffset(slot))); - } else if (function != NULL) { - VisitForAccumulatorValue(function); - __ sw(result_register(), MemOperand(fp, SlotOffset(slot))); - } - break; - - case Slot::CONTEXT: - // We bypass the general EmitSlotSearch because we know more about - // this specific context. - - // The variable in the decl always resides in the current function - // context. - ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); - if (FLAG_debug_code) { - // Check that we're not inside a with or catch context. - __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); - __ LoadRoot(t0, Heap::kWithContextMapRootIndex); - __ Check(ne, "Declaration in with context.", - a1, Operand(t0)); - __ LoadRoot(t0, Heap::kCatchContextMapRootIndex); - __ Check(ne, "Declaration in catch context.", - a1, Operand(t0)); - } - if (mode == Variable::CONST) { - __ LoadRoot(at, Heap::kTheHoleValueRootIndex); - __ sw(at, ContextOperand(cp, slot->index())); - // No write barrier since the_hole_value is in old space. - } else if (function != NULL) { - VisitForAccumulatorValue(function); - __ sw(result_register(), ContextOperand(cp, slot->index())); - int offset = Context::SlotOffset(slot->index()); - // We know that we have written a function, which is not a smi. - __ mov(a1, cp); - __ RecordWrite(a1, Operand(offset), a2, result_register()); - } - break; - - case Slot::LOOKUP: { - __ li(a2, Operand(variable->name())); - // Declaration nodes are always introduced in one of two modes. - ASSERT(mode == Variable::VAR || - mode == Variable::CONST); - PropertyAttributes attr = - (mode == Variable::VAR) ? NONE : READ_ONLY; - __ li(a1, Operand(Smi::FromInt(attr))); - // Push initial value, if any. - // Note: For variables we must not push an initial value (such as - // 'undefined') because we may have a (legal) redeclaration and we - // must not destroy the current value. - if (mode == Variable::CONST) { - __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); - __ Push(cp, a2, a1, a0); - } else if (function != NULL) { - __ Push(cp, a2, a1); - // Push initial value for function declaration. - VisitForStackValue(function); - } else { - ASSERT(Smi::FromInt(0) == 0); - // No initial value! - __ mov(a0, zero_reg); // Operand(Smi::FromInt(0))); - __ Push(cp, a2, a1, a0); - } - __ CallRuntime(Runtime::kDeclareContextSlot, 4); - break; + ASSERT(slot != NULL); + switch (slot->type()) { + case Slot::PARAMETER: + case Slot::LOCAL: + if (mode == Variable::CONST) { + __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); + __ sw(t0, MemOperand(fp, SlotOffset(slot))); + } else if (function != NULL) { + VisitForAccumulatorValue(function); + __ sw(result_register(), MemOperand(fp, SlotOffset(slot))); } - } + break; - } else if (prop != NULL) { - // A const declaration aliasing a parameter is an illegal redeclaration. - ASSERT(mode != Variable::CONST); - if (function != NULL) { - // We are declaring a function that rewrites to a property. - // Use (keyed) IC to set the initial value. We cannot visit the - // rewrite because it's shared and we risk recording duplicate AST - // IDs for bailouts from optimized code. - ASSERT(prop->obj()->AsVariableProxy() != NULL); - { AccumulatorValueContext for_object(this); - EmitVariableLoad(prop->obj()->AsVariableProxy()); + case Slot::CONTEXT: + // We bypass the general EmitSlotSearch because we know more about + // this specific context. + + // The variable in the decl always resides in the current function + // context. + ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); + if (FLAG_debug_code) { + // Check that we're not inside a with or catch context. + __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); + __ LoadRoot(t0, Heap::kWithContextMapRootIndex); + __ Check(ne, "Declaration in with context.", + a1, Operand(t0)); + __ LoadRoot(t0, Heap::kCatchContextMapRootIndex); + __ Check(ne, "Declaration in catch context.", + a1, Operand(t0)); } + if (mode == Variable::CONST) { + __ LoadRoot(at, Heap::kTheHoleValueRootIndex); + __ sw(at, ContextOperand(cp, slot->index())); + // No write barrier since the_hole_value is in old space. + } else if (function != NULL) { + VisitForAccumulatorValue(function); + __ sw(result_register(), ContextOperand(cp, slot->index())); + int offset = Context::SlotOffset(slot->index()); + // We know that we have written a function, which is not a smi. + __ mov(a1, cp); + __ RecordWrite(a1, Operand(offset), a2, result_register()); + } + break; - __ push(result_register()); - VisitForAccumulatorValue(function); - __ mov(a0, result_register()); - __ pop(a2); - - ASSERT(prop->key()->AsLiteral() != NULL && - prop->key()->AsLiteral()->handle()->IsSmi()); - __ li(a1, Operand(prop->key()->AsLiteral()->handle())); - - Handle ic = is_strict_mode() - ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() - : isolate()->builtins()->KeyedStoreIC_Initialize(); - __ Call(ic); - // Value in v0 is ignored (declarations are statements). + case Slot::LOOKUP: { + __ li(a2, Operand(variable->name())); + // Declaration nodes are always introduced in one of two modes. + ASSERT(mode == Variable::VAR || + mode == Variable::CONST || + mode == Variable::LET); + PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE; + __ li(a1, Operand(Smi::FromInt(attr))); + // Push initial value, if any. + // Note: For variables we must not push an initial value (such as + // 'undefined') because we may have a (legal) redeclaration and we + // must not destroy the current value. + if (mode == Variable::CONST) { + __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); + __ Push(cp, a2, a1, a0); + } else if (function != NULL) { + __ Push(cp, a2, a1); + // Push initial value for function declaration. + VisitForStackValue(function); + } else { + ASSERT(Smi::FromInt(0) == 0); + // No initial value! + __ mov(a0, zero_reg); // Operand(Smi::FromInt(0))); + __ Push(cp, a2, a1, a0); + } + __ CallRuntime(Runtime::kDeclareContextSlot, 4); + break; } } } @@ -886,7 +853,7 @@ __ bind(&next_test); __ Drop(1); // Switch value is no longer needed. if (default_clause == NULL) { - __ Branch(nested_statement.break_target()); + __ Branch(nested_statement.break_label()); } else { __ Branch(default_clause->body_target()); } @@ -900,7 +867,7 @@ VisitStatements(clause->statements()); } - __ bind(nested_statement.break_target()); + __ bind(nested_statement.break_label()); PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); } @@ -1026,7 +993,7 @@ // Load the current count to a0, load the length to a1. __ lw(a0, MemOperand(sp, 0 * kPointerSize)); __ lw(a1, MemOperand(sp, 1 * kPointerSize)); - __ Branch(loop_statement.break_target(), hs, a0, Operand(a1)); + __ Branch(loop_statement.break_label(), hs, a0, Operand(a1)); // Get the current entry of the array into register a3. __ lw(a2, MemOperand(sp, 2 * kPointerSize)); @@ -1053,7 +1020,7 @@ __ push(a3); // Current entry. __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); __ mov(a3, result_register()); - __ Branch(loop_statement.continue_target(), eq, a3, Operand(zero_reg)); + __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg)); // Update the 'each' property or variable from the possibly filtered // entry in register a3. @@ -1069,7 +1036,7 @@ // Generate code for the going to the next element by incrementing // the index (smi) stored on top of the stack. - __ bind(loop_statement.continue_target()); + __ bind(loop_statement.continue_label()); __ pop(a0); __ Addu(a0, a0, Operand(Smi::FromInt(1))); __ push(a0); @@ -1078,7 +1045,7 @@ __ Branch(&loop); // Remove the pointers stored on the stack. - __ bind(loop_statement.break_target()); + __ bind(loop_statement.break_label()); __ Drop(5); // Exit and decrement the loop depth. @@ -1533,9 +1500,7 @@ // Update the write barrier for the array store with v0 as the scratch // register. - __ li(a2, Operand(offset)); - // TODO(PJ): double check this RecordWrite call. - __ RecordWrite(a1, a2, result_register()); + __ RecordWrite(a1, Operand(offset), a2, result_register()); PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); } @@ -2286,36 +2251,10 @@ EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); } else { // Call to a keyed property. - // For a synthetic property use keyed load IC followed by function call, - // for a regular property use EmitKeyedCallWithIC. - if (prop->is_synthetic()) { - // Do not visit the object and key subexpressions (they are shared - // by all occurrences of the same rewritten parameter). - ASSERT(prop->obj()->AsVariableProxy() != NULL); - ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL); - Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot(); - MemOperand operand = EmitSlotSearch(slot, a1); - __ lw(a1, operand); - - ASSERT(prop->key()->AsLiteral() != NULL); - ASSERT(prop->key()->AsLiteral()->handle()->IsSmi()); - __ li(a0, Operand(prop->key()->AsLiteral()->handle())); - - // Record source code position for IC call. - SetSourcePosition(prop->position()); - - Handle ic = isolate()->builtins()->KeyedLoadIC_Initialize(); - __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop)); - __ lw(a1, GlobalObjectOperand()); - __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset)); - __ Push(v0, a1); // Function, receiver. - EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); - } else { - { PreservePositionScope scope(masm()->positions_recorder()); - VisitForStackValue(prop->obj()); - } - EmitKeyedCallWithIC(expr, prop->key()); + { PreservePositionScope scope(masm()->positions_recorder()); + VisitForStackValue(prop->obj()); } + EmitKeyedCallWithIC(expr, prop->key()); } } else { { PreservePositionScope scope(masm()->positions_recorder()); @@ -2761,7 +2700,7 @@ // Objects with a non-function constructor have class 'Object'. __ bind(&non_function_constructor); - __ LoadRoot(v0, Heap::kfunction_class_symbolRootIndex); + __ LoadRoot(v0, Heap::kObject_symbolRootIndex); __ jmp(&done); // Non-JS objects have class null. @@ -3602,39 +3541,6 @@ } -void FullCodeGenerator::EmitIsNativeOrStrictMode(ZoneList* args) { - ASSERT(args->length() == 1); - - // Load the function into v0. - VisitForAccumulatorValue(args->at(0)); - - // Prepare for the test. - Label materialize_true, materialize_false; - Label* if_true = NULL; - Label* if_false = NULL; - Label* fall_through = NULL; - context()->PrepareTest(&materialize_true, &materialize_false, - &if_true, &if_false, &fall_through); - - // Test for strict mode function. - __ lw(a1, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset)); - __ lw(a1, FieldMemOperand(a1, SharedFunctionInfo::kCompilerHintsOffset)); - __ And(at, a1, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + - kSmiTagSize))); - __ Branch(if_true, ne, at, Operand(zero_reg)); - - // Test for native function. - __ And(at, a1, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); - __ Branch(if_true, ne, at, Operand(zero_reg)); - - // Not native or strict-mode function. - __ Branch(if_false); - - PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); - context()->Plug(if_true, if_false); -} - - void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { Handle name = expr->name(); if (name->length() > 0 && name->Get(0) == '_') { @@ -3686,18 +3592,12 @@ Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); if (prop != NULL) { - if (prop->is_synthetic()) { - // Result of deleting parameters is false, even when they rewrite - // to accesses on the arguments object. - context()->Plug(false); - } else { - VisitForStackValue(prop->obj()); - VisitForStackValue(prop->key()); - __ li(a1, Operand(Smi::FromInt(strict_mode_flag()))); - __ push(a1); - __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); - context()->Plug(v0); - } + VisitForStackValue(prop->obj()); + VisitForStackValue(prop->key()); + __ li(a1, Operand(Smi::FromInt(strict_mode_flag()))); + __ push(a1); + __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); + context()->Plug(v0); } else if (var != NULL) { // Delete of an unqualified identifier is disallowed in strict mode // but "delete this" is. @@ -4052,6 +3952,10 @@ __ Branch(if_true, eq, v0, Operand(at)); __ LoadRoot(at, Heap::kFalseValueRootIndex); Split(eq, v0, Operand(at), if_true, if_false, fall_through); + } else if (FLAG_harmony_typeof && + check->Equals(isolate()->heap()->null_symbol())) { + __ LoadRoot(at, Heap::kNullValueRootIndex); + Split(eq, v0, Operand(at), if_true, if_false, fall_through); } else if (check->Equals(isolate()->heap()->undefined_symbol())) { __ LoadRoot(at, Heap::kUndefinedValueRootIndex); __ Branch(if_true, eq, v0, Operand(at)); @@ -4069,8 +3973,10 @@ } else if (check->Equals(isolate()->heap()->object_symbol())) { __ JumpIfSmi(v0, if_false); - __ LoadRoot(at, Heap::kNullValueRootIndex); - __ Branch(if_true, eq, v0, Operand(at)); + if (!FLAG_harmony_typeof) { + __ LoadRoot(at, Heap::kNullValueRootIndex); + __ Branch(if_true, eq, v0, Operand(at)); + } // Check for JS objects => true. __ GetObjectType(v0, v0, a1); __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); @@ -4143,11 +4049,8 @@ default: { VisitForAccumulatorValue(expr->right()); Condition cc = eq; - bool strict = false; switch (op) { case Token::EQ_STRICT: - strict = true; - // Fall through. case Token::EQ: cc = eq; __ mov(a0, result_register()); @@ -4316,6 +4219,34 @@ #undef __ +#define __ ACCESS_MASM(masm()) + +FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( + int* stack_depth, + int* context_length) { + // The macros used here must preserve the result register. + + // Because the handler block contains the context of the finally + // code, we can restore it directly from there for the finally code + // rather than iteratively unwinding contexts via their previous + // links. + __ Drop(*stack_depth); // Down to the handler block. + if (*context_length > 0) { + // Restore the context to its dedicated register and the stack. + __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); + __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); + } + __ PopTryHandler(); + __ Call(finally_entry_); + + *stack_depth = 0; + *context_length = 0; + return previous_; +} + + +#undef __ + } } // namespace v8::internal #endif // V8_TARGET_ARCH_MIPS diff -Nru libv8-3.4.14.21/src/mips/ic-mips.cc libv8-3.5.10.24/src/mips/ic-mips.cc --- libv8-3.4.14.21/src/mips/ic-mips.cc 2011-07-11 11:41:22.000000000 +0000 +++ libv8-3.5.10.24/src/mips/ic-mips.cc 2011-08-29 10:41:00.000000000 +0000 @@ -885,8 +885,8 @@ MemOperand unmapped_location = GenerateUnmappedArgumentsLookup(masm, a0, a2, a3, &slow); __ lw(a2, unmapped_location); - __ Branch(&slow, eq, a2, Operand(a3)); __ LoadRoot(a3, Heap::kTheHoleValueRootIndex); + __ Branch(&slow, eq, a2, Operand(a3)); __ mov(v0, a2); __ Ret(); __ bind(&slow); diff -Nru libv8-3.4.14.21/src/mips/macro-assembler-mips.cc libv8-3.5.10.24/src/mips/macro-assembler-mips.cc --- libv8-3.4.14.21/src/mips/macro-assembler-mips.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/mips/macro-assembler-mips.cc 2011-08-31 09:03:56.000000000 +0000 @@ -757,15 +757,20 @@ uint16_t pos, uint16_t size) { ASSERT(pos < 32); - ASSERT(pos + size < 32); + ASSERT(pos + size < 33); if (mips32r2) { ext_(rt, rs, pos, size); } else { // Move rs to rt and shift it left then right to get the // desired bitfield on the right side and zeroes on the left. - sll(rt, rs, 32 - (pos + size)); - srl(rt, rt, 32 - size); + int shift_left = 32 - (pos + size); + sll(rt, rs, shift_left); // Acts as a move if shift_left == 0. + + int shift_right = 32 - size; + if (shift_right > 0) { + srl(rt, rt, shift_right); + } } } @@ -807,28 +812,32 @@ } -void MacroAssembler::Cvt_d_uw(FPURegister fd, FPURegister fs) { - // Move the data from fs to t4. - mfc1(t4, fs); - return Cvt_d_uw(fd, t4); +void MacroAssembler::Cvt_d_uw(FPURegister fd, + FPURegister fs, + FPURegister scratch) { + // Move the data from fs to t8. + mfc1(t8, fs); + Cvt_d_uw(fd, t8, scratch); } -void MacroAssembler::Cvt_d_uw(FPURegister fd, Register rs) { +void MacroAssembler::Cvt_d_uw(FPURegister fd, + Register rs, + FPURegister scratch) { // Convert rs to a FP value in fd (and fd + 1). // We do this by converting rs minus the MSB to avoid sign conversion, - // then adding 2^31-1 and 1 to the result. + // then adding 2^31 to the result (if needed). - ASSERT(!fd.is(f20)); + ASSERT(!fd.is(scratch)); ASSERT(!rs.is(t9)); - ASSERT(!rs.is(t8)); + ASSERT(!rs.is(at)); - // Save rs's MSB to t8. - And(t8, rs, 0x80000000); + // Save rs's MSB to t9. + Ext(t9, rs, 31, 1); // Remove rs's MSB. - And(t9, rs, 0x7FFFFFFF); - // Move t9 to fd. - mtc1(t9, fd); + Ext(at, rs, 0, 31); + // Move the result to fd. + mtc1(at, fd); // Convert fd to a real FP value. cvt_d_w(fd, fd); @@ -837,41 +846,39 @@ // If rs's MSB was 0, it's done. // Otherwise we need to add that to the FP register. - Branch(&conversion_done, eq, t8, Operand(zero_reg)); + Branch(&conversion_done, eq, t9, Operand(zero_reg)); - // First load 2^31 - 1 into f20. - Or(t9, zero_reg, 0x7FFFFFFF); - mtc1(t9, f20); - - // Convert it to FP and add it to fd. - cvt_d_w(f20, f20); - add_d(fd, fd, f20); - // Now add 1. - Or(t9, zero_reg, 1); - mtc1(t9, f20); + // Load 2^31 into f20 as its float representation. + li(at, 0x41E00000); + mtc1(at, FPURegister::from_code(scratch.code() + 1)); + mtc1(zero_reg, scratch); + // Add it to fd. + add_d(fd, fd, scratch); - cvt_d_w(f20, f20); - add_d(fd, fd, f20); bind(&conversion_done); } -void MacroAssembler::Trunc_uw_d(FPURegister fd, FPURegister fs) { - Trunc_uw_d(fs, t4); - mtc1(t4, fd); +void MacroAssembler::Trunc_uw_d(FPURegister fd, + FPURegister fs, + FPURegister scratch) { + Trunc_uw_d(fs, t8, scratch); + mtc1(t8, fd); } -void MacroAssembler::Trunc_uw_d(FPURegister fd, Register rs) { - ASSERT(!fd.is(f22)); - ASSERT(!rs.is(t8)); - - // Load 2^31 into f22. - Or(t8, zero_reg, 0x80000000); - Cvt_d_uw(f22, t8); - - // Test if f22 > fd. - c(OLT, D, fd, f22); +void MacroAssembler::Trunc_uw_d(FPURegister fd, + Register rs, + FPURegister scratch) { + ASSERT(!fd.is(scratch)); + ASSERT(!rs.is(at)); + + // Load 2^31 into scratch as its float representation. + li(at, 0x41E00000); + mtc1(at, FPURegister::from_code(scratch.code() + 1)); + mtc1(zero_reg, scratch); + // Test if scratch > fd. + c(OLT, D, fd, scratch); Label simple_convert; // If fd < 2^31 we can convert it normally. @@ -879,18 +886,17 @@ // First we subtract 2^31 from fd, then trunc it to rs // and add 2^31 to rs. - - sub_d(f22, fd, f22); - trunc_w_d(f22, f22); - mfc1(rs, f22); - or_(rs, rs, t8); + sub_d(scratch, fd, scratch); + trunc_w_d(scratch, scratch); + mfc1(rs, scratch); + Or(rs, rs, 1 << 31); Label done; Branch(&done); // Simple conversion. bind(&simple_convert); - trunc_w_d(f22, fd); - mfc1(rs, f22); + trunc_w_d(scratch, fd); + mfc1(rs, scratch); bind(&done); } @@ -1551,12 +1557,14 @@ b(offset); break; case eq: + ASSERT(!scratch.is(rs)); r2 = scratch; li(r2, rt); offset = shifted_branch_offset(L, false); beq(rs, r2, offset); break; case ne: + ASSERT(!scratch.is(rs)); r2 = scratch; li(r2, rt); offset = shifted_branch_offset(L, false); @@ -1568,6 +1576,7 @@ offset = shifted_branch_offset(L, false); bgtz(rs, offset); } else { + ASSERT(!scratch.is(rs)); r2 = scratch; li(r2, rt); slt(scratch, r2, rs); @@ -1584,6 +1593,7 @@ offset = shifted_branch_offset(L, false); beq(scratch, zero_reg, offset); } else { + ASSERT(!scratch.is(rs)); r2 = scratch; li(r2, rt); slt(scratch, rs, r2); @@ -1600,6 +1610,7 @@ offset = shifted_branch_offset(L, false); bne(scratch, zero_reg, offset); } else { + ASSERT(!scratch.is(rs)); r2 = scratch; li(r2, rt); slt(scratch, rs, r2); @@ -1612,6 +1623,7 @@ offset = shifted_branch_offset(L, false); blez(rs, offset); } else { + ASSERT(!scratch.is(rs)); r2 = scratch; li(r2, rt); slt(scratch, r2, rs); @@ -1625,6 +1637,7 @@ offset = shifted_branch_offset(L, false); bgtz(rs, offset); } else { + ASSERT(!scratch.is(rs)); r2 = scratch; li(r2, rt); sltu(scratch, r2, rs); @@ -1641,6 +1654,7 @@ offset = shifted_branch_offset(L, false); beq(scratch, zero_reg, offset); } else { + ASSERT(!scratch.is(rs)); r2 = scratch; li(r2, rt); sltu(scratch, rs, r2); @@ -1657,6 +1671,7 @@ offset = shifted_branch_offset(L, false); bne(scratch, zero_reg, offset); } else { + ASSERT(!scratch.is(rs)); r2 = scratch; li(r2, rt); sltu(scratch, rs, r2); @@ -1669,6 +1684,7 @@ offset = shifted_branch_offset(L, false); b(offset); } else { + ASSERT(!scratch.is(rs)); r2 = scratch; li(r2, rt); sltu(scratch, r2, rs); @@ -2244,7 +2260,13 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location, HandlerType type) { // Adjust this code if not the case. - ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize); + // The return address is passed in register ra. if (try_location == IN_JAVASCRIPT) { if (type == TRY_CATCH_HANDLER) { @@ -2252,19 +2274,16 @@ } else { li(t0, Operand(StackHandler::TRY_FINALLY)); } - ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize - && StackHandlerConstants::kFPOffset == 2 * kPointerSize - && StackHandlerConstants::kPCOffset == 3 * kPointerSize - && StackHandlerConstants::kNextOffset == 0 * kPointerSize); // Save the current handler as the next handler. li(t2, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); lw(t1, MemOperand(t2)); addiu(sp, sp, -StackHandlerConstants::kSize); - sw(ra, MemOperand(sp, 12)); - sw(fp, MemOperand(sp, 8)); - sw(t0, MemOperand(sp, 4)); - sw(t1, MemOperand(sp, 0)); + sw(ra, MemOperand(sp, StackHandlerConstants::kPCOffset)); + sw(fp, MemOperand(sp, StackHandlerConstants::kFPOffset)); + sw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); + sw(t0, MemOperand(sp, StackHandlerConstants::kStateOffset)); + sw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset)); // Link this handler as the new current one. sw(sp, MemOperand(t2)); @@ -2272,11 +2291,6 @@ } else { // Must preserve a0-a3, and s0 (argv). ASSERT(try_location == IN_JS_ENTRY); - ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize - && StackHandlerConstants::kFPOffset == 2 * kPointerSize - && StackHandlerConstants::kPCOffset == 3 * kPointerSize - && StackHandlerConstants::kNextOffset == 0 * kPointerSize); - // The frame pointer does not point to a JS frame so we save NULL // for fp. We expect the code throwing an exception to check fp // before dereferencing it to restore the context. @@ -2286,11 +2300,14 @@ li(t2, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); lw(t1, MemOperand(t2)); + ASSERT(Smi::FromInt(0) == 0); // Used for no context. + addiu(sp, sp, -StackHandlerConstants::kSize); - sw(ra, MemOperand(sp, 12)); - sw(zero_reg, MemOperand(sp, 8)); - sw(t0, MemOperand(sp, 4)); - sw(t1, MemOperand(sp, 0)); + sw(ra, MemOperand(sp, StackHandlerConstants::kPCOffset)); + sw(zero_reg, MemOperand(sp, StackHandlerConstants::kFPOffset)); + sw(zero_reg, MemOperand(sp, StackHandlerConstants::kContextOffset)); + sw(t0, MemOperand(sp, StackHandlerConstants::kStateOffset)); + sw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset)); // Link this handler as the new current one. sw(sp, MemOperand(t2)); @@ -2299,7 +2316,7 @@ void MacroAssembler::PopTryHandler() { - ASSERT_EQ(0, StackHandlerConstants::kNextOffset); + STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); pop(a1); Addu(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); li(at, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); @@ -2312,28 +2329,31 @@ Move(v0, value); // Adjust this code if not the case. - STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize); // Drop the sp to the top of the handler. li(a3, Operand(ExternalReference(Isolate::k_handler_address, - isolate()))); + isolate()))); lw(sp, MemOperand(a3)); - // Restore the next handler and frame pointer, discard handler state. - STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); + // Restore the next handler. pop(a2); sw(a2, MemOperand(a3)); - STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize); - MultiPop(a3.bit() | fp.bit()); - // Before returning we restore the context from the frame pointer if - // not NULL. The frame pointer is NULL in the exception handler of a - // JS entry frame. - // Set cp to NULL if fp is NULL. + // Restore context and frame pointer, discard state (a3). + MultiPop(a3.bit() | cp.bit() | fp.bit()); + + // If the handler is a JS frame, restore the context to the frame. + // (a3 == ENTRY) == (fp == 0) == (cp == 0), so we could test any + // of them. Label done; - Branch(USE_DELAY_SLOT, &done, eq, fp, Operand(zero_reg)); - mov(cp, zero_reg); // In branch delay slot. - lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); + Branch(&done, eq, fp, Operand(zero_reg)); + sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); bind(&done); #ifdef DEBUG @@ -2355,7 +2375,6 @@ } #endif - STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); pop(t9); // 2 instructions: lw, add sp. Jump(t9); // 2 instructions: jr, nop (in delay slot). @@ -2370,7 +2389,12 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, Register value) { // Adjust this code if not the case. - STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize); + STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize); // v0 is expected to hold the exception. Move(v0, value); @@ -2393,7 +2417,6 @@ bind(&done); // Set the top handler address to next handler past the current ENTRY handler. - STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); pop(a2); sw(a2, MemOperand(a3)); @@ -2415,20 +2438,12 @@ // Stack layout at this point. See also StackHandlerConstants. // sp -> state (ENTRY) + // cp // fp // ra - // Discard handler state (a2 is not used) and restore frame pointer. - STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize); - MultiPop(a2.bit() | fp.bit()); // a2: discarded state. - // Before returning we restore the context from the frame pointer if - // not NULL. The frame pointer is NULL in the exception handler of a - // JS entry frame. - Label cp_null; - Branch(USE_DELAY_SLOT, &cp_null, eq, fp, Operand(zero_reg)); - mov(cp, zero_reg); // In the branch delay slot. - lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); - bind(&cp_null); + // Restore context and frame pointer, discard state (r2). + MultiPop(a2.bit() | cp.bit() | fp.bit()); #ifdef DEBUG // When emitting debug_code, set ra as return address for the jump. @@ -2448,7 +2463,6 @@ addiu(ra, ra, kOffsetRaBytes); } #endif - STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); pop(t9); // 2 instructions: lw, add sp. Jump(t9); // 2 instructions: jr, nop (in delay slot). diff -Nru libv8-3.4.14.21/src/mips/macro-assembler-mips.h libv8-3.5.10.24/src/mips/macro-assembler-mips.h --- libv8-3.4.14.21/src/mips/macro-assembler-mips.h 2011-07-11 11:41:22.000000000 +0000 +++ libv8-3.5.10.24/src/mips/macro-assembler-mips.h 2011-08-29 10:41:00.000000000 +0000 @@ -524,12 +524,12 @@ void Ext(Register rt, Register rs, uint16_t pos, uint16_t size); // Convert unsigned word to double. - void Cvt_d_uw(FPURegister fd, FPURegister fs); - void Cvt_d_uw(FPURegister fd, Register rs); + void Cvt_d_uw(FPURegister fd, FPURegister fs, FPURegister scratch); + void Cvt_d_uw(FPURegister fd, Register rs, FPURegister scratch); // Convert double to unsigned word. - void Trunc_uw_d(FPURegister fd, FPURegister fs); - void Trunc_uw_d(FPURegister fd, Register rs); + void Trunc_uw_d(FPURegister fd, FPURegister fs, FPURegister scratch); + void Trunc_uw_d(FPURegister fd, Register rs, FPURegister scratch); // Convert the HeapNumber pointed to by source to a 32bits signed integer // dest. If the HeapNumber does not fit into a 32bits signed integer branch diff -Nru libv8-3.4.14.21/src/mips/regexp-macro-assembler-mips.cc libv8-3.5.10.24/src/mips/regexp-macro-assembler-mips.cc --- libv8-3.4.14.21/src/mips/regexp-macro-assembler-mips.cc 2011-07-11 11:41:22.000000000 +0000 +++ libv8-3.5.10.24/src/mips/regexp-macro-assembler-mips.cc 2011-08-31 09:03:56.000000000 +0000 @@ -1036,12 +1036,12 @@ } // Prepare for possible GC. - HandleScope handles; + HandleScope handles(isolate); Handle code_handle(re_code); Handle subject(frame_entry(re_frame, kInputString)); // Current string. - bool is_ascii = subject->IsAsciiRepresentation(); + bool is_ascii = subject->IsAsciiRepresentationUnderneath(); ASSERT(re_code->instruction_start() <= *return_address); ASSERT(*return_address <= @@ -1050,7 +1050,7 @@ MaybeObject* result = Execution::HandleStackGuardInterrupt(); if (*code_handle != re_code) { // Return address no longer valid. - int delta = *code_handle - re_code; + int delta = code_handle->address() - re_code->address(); // Overwrite the return address on the stack. *return_address += delta; } @@ -1059,8 +1059,20 @@ return EXCEPTION; } + Handle subject_tmp = subject; + int slice_offset = 0; + + // Extract the underlying string and the slice offset. + if (StringShape(*subject_tmp).IsCons()) { + subject_tmp = Handle(ConsString::cast(*subject_tmp)->first()); + } else if (StringShape(*subject_tmp).IsSliced()) { + SlicedString* slice = SlicedString::cast(*subject_tmp); + subject_tmp = Handle(slice->parent()); + slice_offset = slice->offset(); + } + // String might have changed. - if (subject->IsAsciiRepresentation() != is_ascii) { + if (subject_tmp->IsAsciiRepresentation() != is_ascii) { // If we changed between an ASCII and an UC16 string, the specialized // code cannot be used, and we need to restart regexp matching from // scratch (including, potentially, compiling a new version of the code). @@ -1071,8 +1083,8 @@ // be a sequential or external string with the same content. // Update the start and end pointers in the stack frame to the current // location (whether it has actually moved or not). - ASSERT(StringShape(*subject).IsSequential() || - StringShape(*subject).IsExternal()); + ASSERT(StringShape(*subject_tmp).IsSequential() || + StringShape(*subject_tmp).IsExternal()); // The original start address of the characters to match. const byte* start_address = frame_entry(re_frame, kInputStart); @@ -1080,13 +1092,14 @@ // Find the current start address of the same character at the current string // position. int start_index = frame_entry(re_frame, kStartIndex); - const byte* new_address = StringCharacterPosition(*subject, start_index); + const byte* new_address = StringCharacterPosition(*subject_tmp, + start_index + slice_offset); if (start_address != new_address) { // If there is a difference, update the object pointer and start and end // addresses in the RegExp stack frame to match the new value. const byte* end_address = frame_entry(re_frame, kInputEnd); - int byte_length = end_address - start_address; + int byte_length = static_cast(end_address - start_address); frame_entry(re_frame, kInputString) = *subject; frame_entry(re_frame, kInputStart) = new_address; frame_entry(re_frame, kInputEnd) = new_address + byte_length; diff -Nru libv8-3.4.14.21/src/mips/simulator-mips.cc libv8-3.5.10.24/src/mips/simulator-mips.cc --- libv8-3.4.14.21/src/mips/simulator-mips.cc 2011-06-27 13:21:41.000000000 +0000 +++ libv8-3.5.10.24/src/mips/simulator-mips.cc 2011-08-31 09:03:56.000000000 +0000 @@ -1409,20 +1409,11 @@ int32_t arg1 = get_register(a1); int32_t arg2 = get_register(a2); int32_t arg3 = get_register(a3); - int32_t arg4 = 0; - int32_t arg5 = 0; - // Need to check if sp is valid before assigning arg4, arg5. - // This is a fix for cctest test-api/CatchStackOverflow which causes - // the stack to overflow. For some reason arm doesn't need this - // stack check here. int32_t* stack_pointer = reinterpret_cast(get_register(sp)); - int32_t* stack = reinterpret_cast(stack_); - if (stack_pointer >= stack && stack_pointer < stack + stack_size_ - 5) { - // Args 4 and 5 are on the stack after the reserved space for args 0..3. - arg4 = stack_pointer[4]; - arg5 = stack_pointer[5]; - } + // Args 4 and 5 are on the stack after the reserved space for args 0..3. + int32_t arg4 = stack_pointer[4]; + int32_t arg5 = stack_pointer[5]; bool fp_call = (redirection->type() == ExternalReference::BUILTIN_FP_FP_CALL) || diff -Nru libv8-3.4.14.21/src/mips/stub-cache-mips.cc libv8-3.5.10.24/src/mips/stub-cache-mips.cc --- libv8-3.4.14.21/src/mips/stub-cache-mips.cc 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/mips/stub-cache-mips.cc 2011-08-29 10:41:00.000000000 +0000 @@ -3494,7 +3494,7 @@ __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset)); __ sra(t2, key, kSmiTagSize); // Unsigned comparison catches both negative and too-large values. - __ Branch(&miss_force_generic, Uless, t1, Operand(t2)); + __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1)); __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset)); // a3: base pointer of external storage @@ -3638,7 +3638,7 @@ // __ mtc1(zero_reg, f1); // MS 32-bits are all zero. // __ cvt_d_l(f0, f0); // Use 64 bit conv to get correct unsigned 32-bit. - __ Cvt_d_uw(f0, value); + __ Cvt_d_uw(f0, value, f22); __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag)); @@ -3822,16 +3822,16 @@ // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset)); - - // Check that the key is a smi. + // Check that the key is a smi. __ JumpIfNotSmi(key, &miss_force_generic); + __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset)); + // Check that the index is in range. __ SmiUntag(t0, key); __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset)); // Unsigned comparison catches both negative and too-large values. - __ Branch(&miss_force_generic, Ugreater_equal, t0, Operand(t1)); + __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1)); // Handle both smis and HeapNumbers in the fast path. Go to the // runtime for all other kinds of values. @@ -4428,7 +4428,8 @@ __ sw(mantissa_reg, FieldMemOperand(scratch, FixedDoubleArray::kHeaderSize)); uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32); __ sw(exponent_reg, FieldMemOperand(scratch, offset)); - __ Ret(); + __ Ret(USE_DELAY_SLOT); + __ mov(v0, value_reg); // In delay slot. __ bind(&maybe_nan); // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise @@ -4459,11 +4460,18 @@ } else { destination = FloatingPointHelper::kCoreRegisters; } - __ SmiUntag(value_reg, value_reg); + + Register untagged_value = receiver_reg; + __ SmiUntag(untagged_value, value_reg); FloatingPointHelper::ConvertIntToDouble( - masm, value_reg, destination, - f0, mantissa_reg, exponent_reg, // These are: double_dst, dst1, dst2. - scratch4, f2); // These are: scratch2, single_scratch. + masm, + untagged_value, + destination, + f0, + mantissa_reg, + exponent_reg, + scratch4, + f2); if (destination == FloatingPointHelper::kFPURegisters) { CpuFeatures::Scope scope(FPU); __ sdc1(f0, MemOperand(scratch, 0)); @@ -4471,7 +4479,8 @@ __ sw(mantissa_reg, MemOperand(scratch, 0)); __ sw(exponent_reg, MemOperand(scratch, Register::kSizeInBytes)); } - __ Ret(); + __ Ret(USE_DELAY_SLOT); + __ mov(v0, value_reg); // In delay slot. // Handle store cache miss, replacing the ic with the generic stub. __ bind(&miss_force_generic); diff -Nru libv8-3.4.14.21/src/mirror-debugger.js libv8-3.5.10.24/src/mirror-debugger.js --- libv8-3.4.14.21/src/mirror-debugger.js 2011-07-20 13:44:42.000000000 +0000 +++ libv8-3.5.10.24/src/mirror-debugger.js 2011-08-29 10:41:00.000000000 +0000 @@ -195,7 +195,8 @@ Local: 1, With: 2, Closure: 3, - Catch: 4 }; + Catch: 4, + Block: 5 }; // Mirror hierarchy: diff -Nru libv8-3.4.14.21/src/mksnapshot.cc libv8-3.5.10.24/src/mksnapshot.cc --- libv8-3.4.14.21/src/mksnapshot.cc 2011-07-13 13:23:34.000000000 +0000 +++ libv8-3.5.10.24/src/mksnapshot.cc 2011-08-15 13:01:23.000000000 +0000 @@ -40,8 +40,6 @@ #include "serialize.h" #include "list.h" -// use explicit namespace to avoid clashing with types in namespace v8 -namespace i = v8::internal; using namespace v8; static const unsigned int kMaxCounters = 256; diff -Nru libv8-3.4.14.21/src/objects.cc libv8-3.5.10.24/src/objects.cc --- libv8-3.4.14.21/src/objects.cc 2011-09-12 11:26:01.000000000 +0000 +++ libv8-3.5.10.24/src/objects.cc 2011-10-05 09:40:01.000000000 +0000 @@ -33,6 +33,7 @@ #include "codegen.h" #include "debug.h" #include "deoptimizer.h" +#include "elements.h" #include "execution.h" #include "full-codegen.h" #include "hydrogen.h" @@ -602,36 +603,69 @@ MaybeObject* Object::GetElementWithReceiver(Object* receiver, uint32_t index) { - Object* holder = NULL; - if (IsSmi()) { - Context* global_context = Isolate::Current()->context()->global_context(); - holder = global_context->number_function()->instance_prototype(); - } else { - HeapObject* heap_object = HeapObject::cast(this); + Heap* heap = IsSmi() + ? Isolate::Current()->heap() + : HeapObject::cast(this)->GetHeap(); + Object* holder = this; + + // Iterate up the prototype chain until an element is found or the null + // prototype is encountered. + for (holder = this; + holder != heap->null_value(); + holder = holder->GetPrototype()) { + if (holder->IsSmi()) { + Context* global_context = Isolate::Current()->context()->global_context(); + holder = global_context->number_function()->instance_prototype(); + } else { + HeapObject* heap_object = HeapObject::cast(holder); + if (!heap_object->IsJSObject()) { + Isolate* isolate = heap->isolate(); + Context* global_context = isolate->context()->global_context(); + if (heap_object->IsString()) { + holder = global_context->string_function()->instance_prototype(); + } else if (heap_object->IsHeapNumber()) { + holder = global_context->number_function()->instance_prototype(); + } else if (heap_object->IsBoolean()) { + holder = global_context->boolean_function()->instance_prototype(); + } else if (heap_object->IsJSProxy()) { + return heap->undefined_value(); // For now... + } else { + // Undefined and null have no indexed properties. + ASSERT(heap_object->IsUndefined() || heap_object->IsNull()); + return heap->undefined_value(); + } + } + } - if (heap_object->IsJSObject()) { - return JSObject::cast(this)->GetElementWithReceiver(receiver, index); + // Inline the case for JSObjects. Doing so significantly improves the + // performance of fetching elements where checking the prototype chain is + // necessary. + JSObject* js_object = JSObject::cast(holder); + + // Check access rights if needed. + if (js_object->IsAccessCheckNeeded()) { + Isolate* isolate = heap->isolate(); + if (!isolate->MayIndexedAccess(js_object, index, v8::ACCESS_GET)) { + isolate->ReportFailedAccessCheck(js_object, v8::ACCESS_GET); + return heap->undefined_value(); + } } - Heap* heap = heap_object->GetHeap(); - Isolate* isolate = heap->isolate(); - Context* global_context = isolate->context()->global_context(); - if (heap_object->IsString()) { - holder = global_context->string_function()->instance_prototype(); - } else if (heap_object->IsHeapNumber()) { - holder = global_context->number_function()->instance_prototype(); - } else if (heap_object->IsBoolean()) { - holder = global_context->boolean_function()->instance_prototype(); - } else if (heap_object->IsJSProxy()) { - return heap->undefined_value(); // For now... - } else { - // Undefined and null have no indexed properties. - ASSERT(heap_object->IsUndefined() || heap_object->IsNull()); - return heap->undefined_value(); + if (js_object->HasIndexedInterceptor()) { + return js_object->GetElementWithInterceptor(receiver, index); + } + + if (js_object->elements() != heap->empty_fixed_array()) { + MaybeObject* result = js_object->GetElementsAccessor()->Get( + js_object->elements(), + index, + js_object, + receiver); + if (result != heap->the_hole_value()) return result; } } - return JSObject::cast(holder)->GetElementWithReceiver(receiver, index); + return heap->undefined_value(); } @@ -962,6 +996,11 @@ accumulator->Add("", static_cast(length)); break; } + case JS_WEAK_MAP_TYPE: { + int elements = JSWeakMap::cast(this)->table()->NumberOfElements(); + accumulator->Add("", elements); + break; + } case JS_REGEXP_TYPE: { accumulator->Add(""); break; @@ -1009,7 +1048,6 @@ global_object ? "Global Object: " : "", vowel ? "n" : ""); accumulator->Put(str); - accumulator->Put('>'); printed = true; } } @@ -1169,6 +1207,9 @@ case kConsStringTag: ConsString::BodyDescriptor::IterateBody(this, v); break; + case kSlicedStringTag: + SlicedString::BodyDescriptor::IterateBody(this, v); + break; case kExternalStringTag: if ((type & kStringEncodingMask) == kAsciiStringTag) { reinterpret_cast(this)-> @@ -1192,6 +1233,7 @@ case JS_CONTEXT_EXTENSION_OBJECT_TYPE: case JS_VALUE_TYPE: case JS_ARRAY_TYPE: + case JS_WEAK_MAP_TYPE: case JS_REGEXP_TYPE: case JS_GLOBAL_PROXY_TYPE: case JS_GLOBAL_OBJECT_TYPE: @@ -2279,7 +2321,8 @@ if (has_exception) return Failure::Exception(); Object* bool_result = result->ToBoolean(); - if (mode == STRICT_DELETION && bool_result == GetHeap()->false_value()) { + if (mode == STRICT_DELETION && + bool_result == isolate->heap()->false_value()) { Handle args[] = { handler, trap_name }; Handle error = isolate->factory()->NewTypeError( "handler_failed", HandleVector(args, ARRAY_SIZE(args))); @@ -2331,7 +2374,7 @@ Handle self(this); isolate->factory()->BecomeJSObject(self); - ASSERT(IsJSObject()); + ASSERT(self->IsJSObject()); // TODO(rossberg): recognize function proxies. } @@ -2471,6 +2514,9 @@ // callback setter removed. The two lines looking up the LookupResult // result are also added. If one of the functions is changed, the other // should be. +// Note that this method cannot be used to set the prototype of a function +// because ConvertDescriptorToField() which is called in "case CALLBACKS:" +// doesn't handle function prototypes correctly. MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes( String* name, Object* value, @@ -2896,9 +2942,12 @@ int length = IsJSArray() ? Smi::cast(JSArray::cast(this)->length())->value() : array->length(); + int old_capacity = 0; + int used_elements = 0; + GetElementsCapacityAndUsage(&old_capacity, &used_elements); NumberDictionary* dictionary = NULL; { Object* object; - MaybeObject* maybe = NumberDictionary::Allocate(length); + MaybeObject* maybe = NumberDictionary::Allocate(used_elements); if (!maybe->ToObject(&object)) return maybe; dictionary = NumberDictionary::cast(object); } @@ -2917,7 +2966,7 @@ // exceed the capacity of new space, and we would fail repeatedly // trying to convert the FixedDoubleArray. MaybeObject* maybe_value_object = - GetHeap()->AllocateHeapNumber(double_array->get(i), TENURED); + GetHeap()->AllocateHeapNumber(double_array->get_scalar(i), TENURED); if (!maybe_value_object->ToObject(&value)) return maybe_value_object; } } else { @@ -2961,6 +3010,98 @@ } +MaybeObject* JSObject::GetHiddenProperties(HiddenPropertiesFlag flag) { + Isolate* isolate = GetIsolate(); + Heap* heap = isolate->heap(); + Object* holder = BypassGlobalProxy(); + if (holder->IsUndefined()) return heap->undefined_value(); + JSObject* obj = JSObject::cast(holder); + if (obj->HasFastProperties()) { + // If the object has fast properties, check whether the first slot + // in the descriptor array matches the hidden symbol. Since the + // hidden symbols hash code is zero (and no other string has hash + // code zero) it will always occupy the first entry if present. + DescriptorArray* descriptors = obj->map()->instance_descriptors(); + if ((descriptors->number_of_descriptors() > 0) && + (descriptors->GetKey(0) == heap->hidden_symbol()) && + descriptors->IsProperty(0)) { + ASSERT(descriptors->GetType(0) == FIELD); + return obj->FastPropertyAt(descriptors->GetFieldIndex(0)); + } + } + + // Only attempt to find the hidden properties in the local object and not + // in the prototype chain. + if (!obj->HasHiddenPropertiesObject()) { + // Hidden properties object not found. Allocate a new hidden properties + // object if requested. Otherwise return the undefined value. + if (flag == ALLOW_CREATION) { + Object* hidden_obj; + { MaybeObject* maybe_obj = heap->AllocateJSObject( + isolate->context()->global_context()->object_function()); + if (!maybe_obj->ToObject(&hidden_obj)) return maybe_obj; + } + // Don't allow leakage of the hidden object through accessors + // on Object.prototype. + { + MaybeObject* maybe_obj = + JSObject::cast(hidden_obj)->SetPrototype(heap->null_value(), false); + if (maybe_obj->IsFailure()) return maybe_obj; + } + return obj->SetHiddenPropertiesObject(hidden_obj); + } else { + return heap->undefined_value(); + } + } + return obj->GetHiddenPropertiesObject(); +} + + +MaybeObject* JSObject::GetIdentityHash(HiddenPropertiesFlag flag) { + Isolate* isolate = GetIsolate(); + Object* hidden_props_obj; + { MaybeObject* maybe_obj = GetHiddenProperties(flag); + if (!maybe_obj->ToObject(&hidden_props_obj)) return maybe_obj; + } + if (!hidden_props_obj->IsJSObject()) { + // We failed to create hidden properties. That's a detached + // global proxy. + ASSERT(hidden_props_obj->IsUndefined()); + return Smi::FromInt(0); + } + JSObject* hidden_props = JSObject::cast(hidden_props_obj); + String* hash_symbol = isolate->heap()->identity_hash_symbol(); + { + // Note that HasLocalProperty() can cause a GC in the general case in the + // presence of interceptors. + AssertNoAllocation no_alloc; + if (hidden_props->HasLocalProperty(hash_symbol)) { + MaybeObject* hash = hidden_props->GetProperty(hash_symbol); + return Smi::cast(hash->ToObjectChecked()); + } + } + + int hash_value; + int attempts = 0; + do { + // Generate a random 32-bit hash value but limit range to fit + // within a smi. + hash_value = V8::Random(isolate) & Smi::kMaxValue; + attempts++; + } while (hash_value == 0 && attempts < 30); + hash_value = hash_value != 0 ? hash_value : 1; // never return 0 + + Smi* hash = Smi::FromInt(hash_value); + { MaybeObject* result = hidden_props->SetLocalPropertyIgnoreAttributes( + hash_symbol, + hash, + static_cast(None)); + if (result->IsFailure()) return result; + } + return hash; +} + + MaybeObject* JSObject::DeletePropertyPostInterceptor(String* name, DeleteMode mode) { // Check local property, ignore interceptor. @@ -3010,48 +3151,6 @@ } -MaybeObject* JSObject::DeleteElementPostInterceptor(uint32_t index, - DeleteMode mode) { - ASSERT(!HasExternalArrayElements()); - switch (GetElementsKind()) { - case FAST_ELEMENTS: { - Object* obj; - { MaybeObject* maybe_obj = EnsureWritableFastElements(); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - uint32_t length = IsJSArray() ? - static_cast(Smi::cast(JSArray::cast(this)->length())->value()) : - static_cast(FixedArray::cast(elements())->length()); - if (index < length) { - FixedArray::cast(elements())->set_the_hole(index); - } - break; - } - case DICTIONARY_ELEMENTS: { - NumberDictionary* dictionary = element_dictionary(); - int entry = dictionary->FindEntry(index); - if (entry != NumberDictionary::kNotFound) { - Object* deleted = dictionary->DeleteProperty(entry, mode); - if (deleted == GetHeap()->true_value()) { - MaybeObject* maybe_elements = dictionary->Shrink(index); - FixedArray* new_elements = NULL; - if (!maybe_elements->To(&new_elements)) { - return maybe_elements; - } - set_elements(new_elements); - } - return deleted; - } - break; - } - default: - UNREACHABLE(); - break; - } - return GetHeap()->true_value(); -} - - MaybeObject* JSObject::DeleteElementWithInterceptor(uint32_t index) { Isolate* isolate = GetIsolate(); Heap* heap = isolate->heap(); @@ -3079,100 +3178,15 @@ ASSERT(result->IsBoolean()); return *v8::Utils::OpenHandle(*result); } - MaybeObject* raw_result = - this_handle->DeleteElementPostInterceptor(index, NORMAL_DELETION); + MaybeObject* raw_result = this_handle->GetElementsAccessor()->Delete( + *this_handle, + index, + NORMAL_DELETION); RETURN_IF_SCHEDULED_EXCEPTION(isolate); return raw_result; } -MaybeObject* JSObject::DeleteFastElement(uint32_t index) { - ASSERT(HasFastElements() || HasFastArgumentsElements()); - Heap* heap = GetHeap(); - FixedArray* backing_store = FixedArray::cast(elements()); - if (backing_store->map() == heap->non_strict_arguments_elements_map()) { - backing_store = FixedArray::cast(backing_store->get(1)); - } else { - Object* writable; - MaybeObject* maybe = EnsureWritableFastElements(); - if (!maybe->ToObject(&writable)) return maybe; - backing_store = FixedArray::cast(writable); - } - uint32_t length = static_cast( - IsJSArray() - ? Smi::cast(JSArray::cast(this)->length())->value() - : backing_store->length()); - if (index < length) { - backing_store->set_the_hole(index); - // If an old space backing store is larger than a certain size and - // has too few used values, normalize it. - // To avoid doing the check on every delete we require at least - // one adjacent hole to the value being deleted. - Object* hole = heap->the_hole_value(); - const int kMinLengthForSparsenessCheck = 64; - if (backing_store->length() >= kMinLengthForSparsenessCheck && - !heap->InNewSpace(backing_store) && - ((index > 0 && backing_store->get(index - 1) == hole) || - (index + 1 < length && backing_store->get(index + 1) == hole))) { - int num_used = 0; - for (int i = 0; i < backing_store->length(); ++i) { - if (backing_store->get(i) != hole) ++num_used; - // Bail out early if more than 1/4 is used. - if (4 * num_used > backing_store->length()) break; - } - if (4 * num_used <= backing_store->length()) { - MaybeObject* result = NormalizeElements(); - if (result->IsFailure()) return result; - } - } - } - return heap->true_value(); -} - - -MaybeObject* JSObject::DeleteDictionaryElement(uint32_t index, - DeleteMode mode) { - Isolate* isolate = GetIsolate(); - Heap* heap = isolate->heap(); - FixedArray* backing_store = FixedArray::cast(elements()); - bool is_arguments = - (GetElementsKind() == JSObject::NON_STRICT_ARGUMENTS_ELEMENTS); - if (is_arguments) { - backing_store = FixedArray::cast(backing_store->get(1)); - } - NumberDictionary* dictionary = NumberDictionary::cast(backing_store); - int entry = dictionary->FindEntry(index); - if (entry != NumberDictionary::kNotFound) { - Object* result = dictionary->DeleteProperty(entry, mode); - if (result == heap->true_value()) { - MaybeObject* maybe_elements = dictionary->Shrink(index); - FixedArray* new_elements = NULL; - if (!maybe_elements->To(&new_elements)) { - return maybe_elements; - } - if (is_arguments) { - FixedArray::cast(elements())->set(1, new_elements); - } else { - set_elements(new_elements); - } - } - if (mode == STRICT_DELETION && result == heap->false_value()) { - // In strict mode, attempting to delete a non-configurable property - // throws an exception. - HandleScope scope(isolate); - Handle holder(this); - Handle name = isolate->factory()->NewNumberFromUint(index); - Handle args[2] = { name, holder }; - Handle error = - isolate->factory()->NewTypeError("strict_delete_property", - HandleVector(args, 2)); - return isolate->Throw(*error); - } - } - return heap->true_value(); -} - - MaybeObject* JSObject::DeleteElement(uint32_t index, DeleteMode mode) { Isolate* isolate = GetIsolate(); // Check access rights if needed. @@ -3191,62 +3205,13 @@ if (HasIndexedInterceptor()) { // Skip interceptor if forcing deletion. - return (mode == FORCE_DELETION) - ? DeleteElementPostInterceptor(index, FORCE_DELETION) - : DeleteElementWithInterceptor(index); - } - - switch (GetElementsKind()) { - case FAST_ELEMENTS: - return DeleteFastElement(index); - - case DICTIONARY_ELEMENTS: - return DeleteDictionaryElement(index, mode); - - case FAST_DOUBLE_ELEMENTS: { - int length = IsJSArray() - ? Smi::cast(JSArray::cast(this)->length())->value() - : FixedDoubleArray::cast(elements())->length(); - if (index < static_cast(length)) { - FixedDoubleArray::cast(elements())->set_the_hole(index); - } - break; - } - case EXTERNAL_PIXEL_ELEMENTS: - case EXTERNAL_BYTE_ELEMENTS: - case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: - case EXTERNAL_SHORT_ELEMENTS: - case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: - case EXTERNAL_INT_ELEMENTS: - case EXTERNAL_UNSIGNED_INT_ELEMENTS: - case EXTERNAL_FLOAT_ELEMENTS: - case EXTERNAL_DOUBLE_ELEMENTS: - // Pixel and external array elements cannot be deleted. Just - // silently ignore here. - break; - - case NON_STRICT_ARGUMENTS_ELEMENTS: { - FixedArray* parameter_map = FixedArray::cast(elements()); - uint32_t length = parameter_map->length(); - Object* probe = - index < (length - 2) ? parameter_map->get(index + 2) : NULL; - if (probe != NULL && !probe->IsTheHole()) { - // TODO(kmillikin): We could check if this was the last aliased - // parameter, and revert to normal elements in that case. That - // would enable GC of the context. - parameter_map->set_the_hole(index + 2); - } else { - FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); - if (arguments->IsDictionary()) { - return DeleteDictionaryElement(index, mode); - } else { - return DeleteFastElement(index); - } - } - break; + if (mode != FORCE_DELETION) { + return DeleteElementWithInterceptor(index); } + mode = JSReceiver::FORCE_DELETION; } - return isolate->heap()->true_value(); + + return GetElementsAccessor()->Delete(this, index, mode); } @@ -3667,6 +3632,7 @@ if (is_element) { switch (GetElementsKind()) { case FAST_ELEMENTS: + case FAST_DOUBLE_ELEMENTS: break; case EXTERNAL_PIXEL_ELEMENTS: case EXTERNAL_BYTE_ELEMENTS: @@ -3677,7 +3643,6 @@ case EXTERNAL_UNSIGNED_INT_ELEMENTS: case EXTERNAL_FLOAT_ELEMENTS: case EXTERNAL_DOUBLE_ELEMENTS: - case FAST_DOUBLE_ELEMENTS: // Ignore getters and setters on pixel and external array // elements. return heap->undefined_value(); @@ -3916,6 +3881,7 @@ // Accessors overwrite previous callbacks (cf. with getters/setters). switch (GetElementsKind()) { case FAST_ELEMENTS: + case FAST_DOUBLE_ELEMENTS: break; case EXTERNAL_PIXEL_ELEMENTS: case EXTERNAL_BYTE_ELEMENTS: @@ -3926,7 +3892,6 @@ case EXTERNAL_UNSIGNED_INT_ELEMENTS: case EXTERNAL_FLOAT_ELEMENTS: case EXTERNAL_DOUBLE_ELEMENTS: - case FAST_DOUBLE_ELEMENTS: // Ignore getters and setters on pixel and external array // elements. return isolate->heap()->undefined_value(); @@ -4522,20 +4487,6 @@ } -static bool HasKey(FixedArray* array, Object* key) { - int len0 = array->length(); - for (int i = 0; i < len0; i++) { - Object* element = array->get(i); - if (element->IsSmi() && key->IsSmi() && (element == key)) return true; - if (element->IsString() && - key->IsString() && String::cast(element)->Equals(String::cast(key))) { - return true; - } - } - return false; -} - - MaybeObject* PolymorphicCodeCache::Update(MapList* maps, Code::Flags flags, Code* code) { @@ -4695,102 +4646,37 @@ MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) { - ASSERT(!array->HasExternalArrayElements()); - switch (array->GetElementsKind()) { - case JSObject::FAST_ELEMENTS: - return UnionOfKeys(FixedArray::cast(array->elements())); - case JSObject::DICTIONARY_ELEMENTS: { - NumberDictionary* dict = array->element_dictionary(); - int size = dict->NumberOfElements(); - - // Allocate a temporary fixed array. - Object* object; - { MaybeObject* maybe_object = GetHeap()->AllocateFixedArray(size); - if (!maybe_object->ToObject(&object)) return maybe_object; - } - FixedArray* key_array = FixedArray::cast(object); - - int capacity = dict->Capacity(); - int pos = 0; - // Copy the elements from the JSArray to the temporary fixed array. - for (int i = 0; i < capacity; i++) { - if (dict->IsKey(dict->KeyAt(i))) { - key_array->set(pos++, dict->ValueAt(i)); - } - } - // Compute the union of this and the temporary fixed array. - return UnionOfKeys(key_array); + ElementsAccessor* accessor = array->GetElementsAccessor(); + MaybeObject* maybe_result = + accessor->AddElementsToFixedArray(array->elements(), this, array, array); + FixedArray* result; + if (!maybe_result->To(&result)) return maybe_result; +#ifdef DEBUG + if (FLAG_enable_slow_asserts) { + for (int i = 0; i < result->length(); i++) { + Object* current = result->get(i); + ASSERT(current->IsNumber() || current->IsString()); } - case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS: - UNIMPLEMENTED(); - break; - case JSObject::EXTERNAL_BYTE_ELEMENTS: - case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS: - case JSObject::EXTERNAL_SHORT_ELEMENTS: - case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS: - case JSObject::EXTERNAL_INT_ELEMENTS: - case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS: - case JSObject::EXTERNAL_FLOAT_ELEMENTS: - case JSObject::EXTERNAL_DOUBLE_ELEMENTS: - case JSObject::EXTERNAL_PIXEL_ELEMENTS: - case JSObject::FAST_DOUBLE_ELEMENTS: - break; } - UNREACHABLE(); - return GetHeap()->null_value(); // Failure case needs to "return" a value. +#endif + return result; } MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) { - int len0 = length(); + ElementsAccessor* accessor = ElementsAccessor::ForArray(other); + MaybeObject* maybe_result = + accessor->AddElementsToFixedArray(other, this, NULL, NULL); + FixedArray* result; + if (!maybe_result->To(&result)) return maybe_result; #ifdef DEBUG if (FLAG_enable_slow_asserts) { - for (int i = 0; i < len0; i++) { - ASSERT(get(i)->IsString() || get(i)->IsNumber()); + for (int i = 0; i < result->length(); i++) { + Object* current = result->get(i); + ASSERT(current->IsNumber() || current->IsString()); } } #endif - int len1 = other->length(); - // Optimize if 'other' is empty. - // We cannot optimize if 'this' is empty, as other may have holes - // or non keys. - if (len1 == 0) return this; - - // Compute how many elements are not in this. - int extra = 0; - for (int y = 0; y < len1; y++) { - Object* value = other->get(y); - if (!value->IsTheHole() && !HasKey(this, value)) extra++; - } - - if (extra == 0) return this; - - // Allocate the result - Object* obj; - { MaybeObject* maybe_obj = GetHeap()->AllocateFixedArray(len0 + extra); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - // Fill in the content - AssertNoAllocation no_gc; - FixedArray* result = FixedArray::cast(obj); - WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); - for (int i = 0; i < len0; i++) { - Object* e = get(i); - ASSERT(e->IsString() || e->IsNumber()); - result->set(i, e, mode); - } - // Fill in the extra keys. - int index = 0; - for (int y = 0; y < len1; y++) { - Object* value = other->get(y); - if (!value->IsTheHole() && !HasKey(this, value)) { - Object* e = other->get(y); - ASSERT(e->IsString() || e->IsNumber()); - result->set(len0 + index, e, mode); - index++; - } - } - ASSERT(extra == index); return result; } @@ -5172,55 +5058,45 @@ } -Vector String::ToAsciiVector() { - ASSERT(IsAsciiRepresentation()); - ASSERT(IsFlat()); - - int offset = 0; +String::FlatContent String::GetFlatContent() { int length = this->length(); - StringRepresentationTag string_tag = StringShape(this).representation_tag(); + StringShape shape(this); String* string = this; - if (string_tag == kConsStringTag) { - ConsString* cons = ConsString::cast(string); - ASSERT(cons->second()->length() == 0); - string = cons->first(); - string_tag = StringShape(string).representation_tag(); - } - if (string_tag == kSeqStringTag) { - SeqAsciiString* seq = SeqAsciiString::cast(string); - char* start = seq->GetChars(); - return Vector(start + offset, length); - } - ASSERT(string_tag == kExternalStringTag); - ExternalAsciiString* ext = ExternalAsciiString::cast(string); - const char* start = ext->resource()->data(); - return Vector(start + offset, length); -} - - -Vector String::ToUC16Vector() { - ASSERT(IsTwoByteRepresentation()); - ASSERT(IsFlat()); - int offset = 0; - int length = this->length(); - StringRepresentationTag string_tag = StringShape(this).representation_tag(); - String* string = this; - if (string_tag == kConsStringTag) { + if (shape.representation_tag() == kConsStringTag) { ConsString* cons = ConsString::cast(string); - ASSERT(cons->second()->length() == 0); + if (cons->second()->length() != 0) { + return FlatContent(); + } string = cons->first(); - string_tag = StringShape(string).representation_tag(); + shape = StringShape(string); + } + if (shape.representation_tag() == kSlicedStringTag) { + SlicedString* slice = SlicedString::cast(string); + offset = slice->offset(); + string = slice->parent(); + shape = StringShape(string); + ASSERT(shape.representation_tag() != kConsStringTag && + shape.representation_tag() != kSlicedStringTag); + } + if (shape.encoding_tag() == kAsciiStringTag) { + const char* start; + if (shape.representation_tag() == kSeqStringTag) { + start = SeqAsciiString::cast(string)->GetChars(); + } else { + start = ExternalAsciiString::cast(string)->resource()->data(); + } + return FlatContent(Vector(start + offset, length)); + } else { + ASSERT(shape.encoding_tag() == kTwoByteStringTag); + const uc16* start; + if (shape.representation_tag() == kSeqStringTag) { + start = SeqTwoByteString::cast(string)->GetChars(); + } else { + start = ExternalTwoByteString::cast(string)->resource()->data(); + } + return FlatContent(Vector(start + offset, length)); } - if (string_tag == kSeqStringTag) { - SeqTwoByteString* seq = SeqTwoByteString::cast(string); - return Vector(seq->GetChars() + offset, length); - } - ASSERT(string_tag == kExternalStringTag); - ExternalTwoByteString* ext = ExternalTwoByteString::cast(string); - const uc16* start = - reinterpret_cast(ext->resource()->data()); - return Vector(start + offset, length); } @@ -5291,13 +5167,17 @@ const uc16* String::GetTwoByteData(unsigned start) { - ASSERT(!IsAsciiRepresentation()); + ASSERT(!IsAsciiRepresentationUnderneath()); switch (StringShape(this).representation_tag()) { case kSeqStringTag: return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start); case kExternalStringTag: return ExternalTwoByteString::cast(this)-> ExternalTwoByteStringGetData(start); + case kSlicedStringTag: { + SlicedString* slice = SlicedString::cast(this); + return slice->parent()->GetTwoByteData(start + slice->offset()); + } case kConsStringTag: UNREACHABLE(); return NULL; @@ -5588,6 +5468,10 @@ max_chars); return rbb->util_buffer; } + case kSlicedStringTag: + return SlicedString::cast(input)->SlicedStringReadBlock(rbb, + offset_ptr, + max_chars); default: break; } @@ -5670,11 +5554,13 @@ if (str_ == NULL) return; Handle str(str_); ASSERT(str->IsFlat()); - is_ascii_ = str->IsAsciiRepresentation(); + String::FlatContent content = str->GetFlatContent(); + ASSERT(content.IsFlat()); + is_ascii_ = content.IsAscii(); if (is_ascii_) { - start_ = str->ToAsciiVector().start(); + start_ = content.ToAsciiVector().start(); } else { - start_ = str->ToUC16Vector().start(); + start_ = content.ToUC16Vector().start(); } } @@ -5729,6 +5615,11 @@ max_chars); } return; + case kSlicedStringTag: + SlicedString::cast(input)->SlicedStringReadBlockIntoBuffer(rbb, + offset_ptr, + max_chars); + return; default: break; } @@ -5863,6 +5754,31 @@ } +uint16_t SlicedString::SlicedStringGet(int index) { + return parent()->Get(offset() + index); +} + + +const unibrow::byte* SlicedString::SlicedStringReadBlock( + ReadBlockBuffer* buffer, unsigned* offset_ptr, unsigned chars) { + unsigned offset = this->offset(); + *offset_ptr += offset; + const unibrow::byte* answer = String::ReadBlock(String::cast(parent()), + buffer, offset_ptr, chars); + *offset_ptr -= offset; + return answer; +} + + +void SlicedString::SlicedStringReadBlockIntoBuffer( + ReadBlockBuffer* buffer, unsigned* offset_ptr, unsigned chars) { + unsigned offset = this->offset(); + *offset_ptr += offset; + String::ReadBlockIntoBuffer(String::cast(parent()), + buffer, offset_ptr, chars); + *offset_ptr -= offset; +} + template void String::WriteToFlat(String* src, sinkchar* sink, @@ -5930,6 +5846,13 @@ } break; } + case kAsciiStringTag | kSlicedStringTag: + case kTwoByteStringTag | kSlicedStringTag: { + SlicedString* slice = SlicedString::cast(source); + unsigned offset = slice->offset(); + WriteToFlat(slice->parent(), sink, from + offset, to + offset); + return; + } } } } @@ -5994,12 +5917,13 @@ static inline bool CompareStringContentsPartial(Isolate* isolate, IteratorA* ia, String* b) { - if (b->IsFlat()) { - if (b->IsAsciiRepresentation()) { - VectorIterator ib(b->ToAsciiVector()); + String::FlatContent content = b->GetFlatContent(); + if (content.IsFlat()) { + if (content.IsAscii()) { + VectorIterator ib(content.ToAsciiVector()); return CompareStringContents(ia, &ib); } else { - VectorIterator ib(b->ToUC16Vector()); + VectorIterator ib(content.ToUC16Vector()); return CompareStringContents(ia, &ib); } } else { @@ -6038,16 +5962,18 @@ } Isolate* isolate = GetIsolate(); - if (lhs->IsFlat()) { - if (lhs->IsAsciiRepresentation()) { - Vector vec1 = lhs->ToAsciiVector(); - if (rhs->IsFlat()) { - if (rhs->IsAsciiRepresentation()) { - Vector vec2 = rhs->ToAsciiVector(); + String::FlatContent lhs_content = lhs->GetFlatContent(); + String::FlatContent rhs_content = rhs->GetFlatContent(); + if (lhs_content.IsFlat()) { + if (lhs_content.IsAscii()) { + Vector vec1 = lhs_content.ToAsciiVector(); + if (rhs_content.IsFlat()) { + if (rhs_content.IsAscii()) { + Vector vec2 = rhs_content.ToAsciiVector(); return CompareRawStringContents(vec1, vec2); } else { VectorIterator buf1(vec1); - VectorIterator ib(rhs->ToUC16Vector()); + VectorIterator ib(rhs_content.ToUC16Vector()); return CompareStringContents(&buf1, &ib); } } else { @@ -6057,14 +5983,14 @@ isolate->objects_string_compare_buffer_b()); } } else { - Vector vec1 = lhs->ToUC16Vector(); - if (rhs->IsFlat()) { - if (rhs->IsAsciiRepresentation()) { + Vector vec1 = lhs_content.ToUC16Vector(); + if (rhs_content.IsFlat()) { + if (rhs_content.IsAscii()) { VectorIterator buf1(vec1); - VectorIterator ib(rhs->ToAsciiVector()); + VectorIterator ib(rhs_content.ToAsciiVector()); return CompareStringContents(&buf1, &ib); } else { - Vector vec2(rhs->ToUC16Vector()); + Vector vec2(rhs_content.ToUC16Vector()); return CompareRawStringContents(vec1, vec2); } } else { @@ -6117,8 +6043,10 @@ bool String::IsAsciiEqualTo(Vector str) { int slen = length(); if (str.length() != slen) return false; - if (IsFlat() && IsAsciiRepresentation()) { - return CompareChars(ToAsciiVector().start(), str.start(), slen) == 0; + FlatContent content = GetFlatContent(); + if (content.IsAscii()) { + return CompareChars(content.ToAsciiVector().start(), + str.start(), slen) == 0; } for (int i = 0; i < slen; i++) { if (Get(i) != static_cast(str[i])) return false; @@ -6130,8 +6058,9 @@ bool String::IsTwoByteEqualTo(Vector str) { int slen = length(); if (str.length() != slen) return false; - if (IsFlat() && IsTwoByteRepresentation()) { - return CompareChars(ToUC16Vector().start(), str.start(), slen) == 0; + FlatContent content = GetFlatContent(); + if (content.IsTwoByte()) { + return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0; } for (int i = 0; i < slen; i++) { if (Get(i) != str[i]) return false; @@ -7061,126 +6990,99 @@ PrintF(out, "Deoptimization Input Data (deopt points = %d)\n", deopt_count); if (0 == deopt_count) return; - PrintF(out, "%6s %6s %6s %12s\n", "index", "ast id", "argc", "commands"); + PrintF(out, "%6s %6s %6s %12s\n", "index", "ast id", "argc", + FLAG_print_code_verbose ? "commands" : ""); for (int i = 0; i < deopt_count; i++) { - int command_count = 0; PrintF(out, "%6d %6d %6d", i, AstId(i)->value(), ArgumentsStackHeight(i)->value()); + + if (!FLAG_print_code_verbose) { + PrintF(out, "\n"); + continue; + } + // Print details of the frame translation. int translation_index = TranslationIndex(i)->value(); TranslationIterator iterator(TranslationByteArray(), translation_index); Translation::Opcode opcode = static_cast(iterator.Next()); ASSERT(Translation::BEGIN == opcode); int frame_count = iterator.Next(); - if (FLAG_print_code_verbose) { - PrintF(out, " %s {count=%d}\n", Translation::StringFor(opcode), - frame_count); - } + PrintF(out, " %s {count=%d}\n", Translation::StringFor(opcode), + frame_count); - for (int i = 0; i < frame_count; ++i) { - opcode = static_cast(iterator.Next()); - ASSERT(Translation::FRAME == opcode); - int ast_id = iterator.Next(); - int function_id = iterator.Next(); - JSFunction* function = - JSFunction::cast(LiteralArray()->get(function_id)); - unsigned height = iterator.Next(); - if (FLAG_print_code_verbose) { - PrintF(out, "%24s %s {ast_id=%d, function=", - "", Translation::StringFor(opcode), ast_id); - function->PrintName(out); - PrintF(out, ", height=%u}\n", height); - } - - // Size of translation is height plus all incoming arguments including - // receiver. - int size = height + function->shared()->formal_parameter_count() + 1; - command_count += size; - for (int j = 0; j < size; ++j) { - opcode = static_cast(iterator.Next()); - if (FLAG_print_code_verbose) { - PrintF(out, "%24s %s ", "", Translation::StringFor(opcode)); - } - - if (opcode == Translation::DUPLICATE) { - opcode = static_cast(iterator.Next()); - if (FLAG_print_code_verbose) { - PrintF(out, "%s ", Translation::StringFor(opcode)); - } - --j; // Two commands share the same frame index. + while (iterator.HasNext() && + Translation::BEGIN != + (opcode = static_cast(iterator.Next()))) { + PrintF(out, "%24s %s ", "", Translation::StringFor(opcode)); + + switch (opcode) { + case Translation::BEGIN: + UNREACHABLE(); + break; + + case Translation::FRAME: { + int ast_id = iterator.Next(); + int function_id = iterator.Next(); + JSFunction* function = + JSFunction::cast(LiteralArray()->get(function_id)); + unsigned height = iterator.Next(); + PrintF(out, "{ast_id=%d, \nfunction=", ast_id); + function->PrintName(out); + PrintF(out, ", height=%u}", height); + break; } - switch (opcode) { - case Translation::BEGIN: - case Translation::FRAME: - case Translation::DUPLICATE: - UNREACHABLE(); - break; - - case Translation::REGISTER: { - int reg_code = iterator.Next(); - if (FLAG_print_code_verbose) { - PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code)); - } - break; - } + case Translation::DUPLICATE: + break; - case Translation::INT32_REGISTER: { - int reg_code = iterator.Next(); - if (FLAG_print_code_verbose) { - PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code)); - } - break; - } + case Translation::REGISTER: { + int reg_code = iterator.Next(); + PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code)); + break; + } - case Translation::DOUBLE_REGISTER: { - int reg_code = iterator.Next(); - if (FLAG_print_code_verbose) { - PrintF(out, "{input=%s}", - DoubleRegister::AllocationIndexToString(reg_code)); - } - break; - } + case Translation::INT32_REGISTER: { + int reg_code = iterator.Next(); + PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code)); + break; + } - case Translation::STACK_SLOT: { - int input_slot_index = iterator.Next(); - if (FLAG_print_code_verbose) { - PrintF(out, "{input=%d}", input_slot_index); - } - break; - } + case Translation::DOUBLE_REGISTER: { + int reg_code = iterator.Next(); + PrintF(out, "{input=%s}", + DoubleRegister::AllocationIndexToString(reg_code)); + break; + } - case Translation::INT32_STACK_SLOT: { - int input_slot_index = iterator.Next(); - if (FLAG_print_code_verbose) { - PrintF(out, "{input=%d}", input_slot_index); - } - break; - } + case Translation::STACK_SLOT: { + int input_slot_index = iterator.Next(); + PrintF(out, "{input=%d}", input_slot_index); + break; + } - case Translation::DOUBLE_STACK_SLOT: { - int input_slot_index = iterator.Next(); - if (FLAG_print_code_verbose) { - PrintF(out, "{input=%d}", input_slot_index); - } - break; - } + case Translation::INT32_STACK_SLOT: { + int input_slot_index = iterator.Next(); + PrintF(out, "{input=%d}", input_slot_index); + break; + } - case Translation::LITERAL: { - unsigned literal_index = iterator.Next(); - if (FLAG_print_code_verbose) { - PrintF(out, "{literal_id=%u}", literal_index); - } - break; - } + case Translation::DOUBLE_STACK_SLOT: { + int input_slot_index = iterator.Next(); + PrintF(out, "{input=%d}", input_slot_index); + break; + } - case Translation::ARGUMENTS_OBJECT: - break; + case Translation::LITERAL: { + unsigned literal_index = iterator.Next(); + PrintF(out, "{literal_id=%u}", literal_index); + break; } - if (FLAG_print_code_verbose) PrintF(out, "\n"); + + case Translation::ARGUMENTS_OBJECT: + break; } + PrintF(out, "\n"); } - if (!FLAG_print_code_verbose) PrintF(out, " %12d\n", command_count); } } @@ -7218,6 +7120,7 @@ case UNARY_OP_IC: return "UNARY_OP_IC"; case BINARY_OP_IC: return "BINARY_OP_IC"; case COMPARE_IC: return "COMPARE_IC"; + case TO_BOOLEAN_IC: return "TO_BOOLEAN_IC"; } UNREACHABLE(); return NULL; @@ -7456,7 +7359,8 @@ // exceed the capacity of new space, and we would fail repeatedly // trying to convert the FixedDoubleArray. MaybeObject* maybe_value_object = - GetHeap()->AllocateHeapNumber(old_elements->get(i), TENURED); + GetHeap()->AllocateHeapNumber(old_elements->get_scalar(i), + TENURED); if (!maybe_value_object->ToObject(&obj)) return maybe_value_object; // Force write barrier. It's not worth trying to exploit // elems->GetWriteBarrierMode(), since it requires an @@ -7550,9 +7454,10 @@ switch (GetElementsKind()) { case FAST_ELEMENTS: { + case FAST_DOUBLE_ELEMENTS: // Make sure we never try to shrink dense arrays into sparse arrays. - ASSERT(static_cast(FixedArray::cast(elements())->length()) <= - new_length); + ASSERT(static_cast( + FixedArrayBase::cast(elements())->length()) <= new_length); MaybeObject* result = NormalizeElements(); if (result->IsFailure()) return result; @@ -7581,7 +7486,6 @@ case EXTERNAL_FLOAT_ELEMENTS: case EXTERNAL_DOUBLE_ELEMENTS: case EXTERNAL_PIXEL_ELEMENTS: - case FAST_DOUBLE_ELEMENTS: UNREACHABLE(); break; } @@ -7696,8 +7600,7 @@ } int min = NewElementsCapacity(old_capacity); int new_capacity = value > min ? value : min; - if (new_capacity <= kMaxFastElementsLength || - !ShouldConvertToSlowElements(new_capacity)) { + if (!ShouldConvertToSlowElements(new_capacity)) { MaybeObject* result; if (GetElementsKind() == FAST_ELEMENTS) { result = SetFastElementsCapacityAndLength(new_capacity, value); @@ -7923,6 +7826,17 @@ } break; } + case FAST_DOUBLE_ELEMENTS: { + uint32_t length = IsJSArray() ? + static_cast + (Smi::cast(JSArray::cast(this)->length())->value()) : + static_cast(FixedDoubleArray::cast(elements())->length()); + if ((index < length) && + !FixedDoubleArray::cast(elements())->is_the_hole(index)) { + return true; + } + break; + } case EXTERNAL_PIXEL_ELEMENTS: { ExternalPixelArray* pixels = ExternalPixelArray::cast(elements()); if (index < static_cast(pixels->length())) { @@ -7937,8 +7851,7 @@ case EXTERNAL_INT_ELEMENTS: case EXTERNAL_UNSIGNED_INT_ELEMENTS: case EXTERNAL_FLOAT_ELEMENTS: - case EXTERNAL_DOUBLE_ELEMENTS: - case FAST_DOUBLE_ELEMENTS: { + case EXTERNAL_DOUBLE_ELEMENTS: { ExternalArray* array = ExternalArray::cast(elements()); if (index < static_cast(array->length())) { return true; @@ -8049,6 +7962,17 @@ } break; } + case FAST_DOUBLE_ELEMENTS: { + uint32_t length = IsJSArray() ? + static_cast + (Smi::cast(JSArray::cast(this)->length())->value()) : + static_cast(FixedDoubleArray::cast(elements())->length()); + if ((index < length) && + !FixedDoubleArray::cast(elements())->is_the_hole(index)) { + return FAST_ELEMENT; + } + break; + } case EXTERNAL_PIXEL_ELEMENTS: { ExternalPixelArray* pixels = ExternalPixelArray::cast(elements()); if (index < static_cast(pixels->length())) return FAST_ELEMENT; @@ -8066,9 +7990,6 @@ if (index < static_cast(array->length())) return FAST_ELEMENT; break; } - case FAST_DOUBLE_ELEMENTS: - UNREACHABLE(); - break; case DICTIONARY_ELEMENTS: { if (element_dictionary()->FindEntry(index) != NumberDictionary::kNotFound) { @@ -8435,8 +8356,7 @@ if ((index - length) < kMaxGap) { // Try allocating extra space. int new_capacity = NewElementsCapacity(index + 1); - if (new_capacity <= kMaxFastElementsLength || - !ShouldConvertToSlowElements(new_capacity)) { + if (!ShouldConvertToSlowElements(new_capacity)) { ASSERT(static_cast(new_capacity) > index); Object* new_elements; MaybeObject* maybe = @@ -8516,14 +8436,14 @@ return isolate->Throw(*error); } } - Object* new_dictionary; + FixedArrayBase* new_dictionary; MaybeObject* maybe = dictionary->AtNumberPut(index, value); - if (!maybe->ToObject(&new_dictionary)) return maybe; + if (!maybe->To(&new_dictionary)) return maybe; if (dictionary != NumberDictionary::cast(new_dictionary)) { if (is_arguments) { elements->set(1, new_dictionary); } else { - set_elements(HeapObject::cast(new_dictionary)); + set_elements(new_dictionary); } dictionary = NumberDictionary::cast(new_dictionary); } @@ -8544,7 +8464,7 @@ } else { new_length = dictionary->max_number_key() + 1; } - MaybeObject* result = ShouldConvertToFastDoubleElements() + MaybeObject* result = CanConvertToFastDoubleElements() ? SetFastDoubleElementsCapacityAndLength(new_length, new_length) : SetFastElementsCapacityAndLength(new_length, new_length); if (result->IsFailure()) return result; @@ -8618,8 +8538,7 @@ if ((index - elms_length) < kMaxGap) { // Try allocating extra space. int new_capacity = NewElementsCapacity(index+1); - if (new_capacity <= kMaxFastElementsLength || - !ShouldConvertToSlowElements(new_capacity)) { + if (!ShouldConvertToSlowElements(new_capacity)) { ASSERT(static_cast(new_capacity) > index); Object* obj; { MaybeObject* maybe_obj = @@ -8784,71 +8703,6 @@ } -MaybeObject* JSObject::GetElementPostInterceptor(Object* receiver, - uint32_t index) { - // Get element works for both JSObject and JSArray since - // JSArray::length cannot change. - switch (GetElementsKind()) { - case FAST_ELEMENTS: { - FixedArray* elms = FixedArray::cast(elements()); - if (index < static_cast(elms->length())) { - Object* value = elms->get(index); - if (!value->IsTheHole()) return value; - } - break; - } - case FAST_DOUBLE_ELEMENTS: { - FixedDoubleArray* elms = FixedDoubleArray::cast(elements()); - if (index < static_cast(elms->length())) { - if (!elms->is_the_hole(index)) { - return GetHeap()->NumberFromDouble(elms->get(index)); - } - } - break; - } - case EXTERNAL_PIXEL_ELEMENTS: - case EXTERNAL_BYTE_ELEMENTS: - case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: - case EXTERNAL_SHORT_ELEMENTS: - case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: - case EXTERNAL_INT_ELEMENTS: - case EXTERNAL_UNSIGNED_INT_ELEMENTS: - case EXTERNAL_FLOAT_ELEMENTS: - case EXTERNAL_DOUBLE_ELEMENTS: { - MaybeObject* maybe_value = GetExternalElement(index); - Object* value; - if (!maybe_value->ToObject(&value)) return maybe_value; - if (!value->IsUndefined()) return value; - break; - } - case DICTIONARY_ELEMENTS: { - NumberDictionary* dictionary = element_dictionary(); - int entry = dictionary->FindEntry(index); - if (entry != NumberDictionary::kNotFound) { - Object* element = dictionary->ValueAt(entry); - PropertyDetails details = dictionary->DetailsAt(entry); - if (details.type() == CALLBACKS) { - return GetElementWithCallback(receiver, - element, - index, - this); - } - return element; - } - break; - } - case NON_STRICT_ARGUMENTS_ELEMENTS: - UNIMPLEMENTED(); - break; - } - - // Continue searching via the prototype chain. - Object* pt = GetPrototype(); - if (pt->IsNull()) return GetHeap()->undefined_value(); - return pt->GetElementWithReceiver(receiver, index); -} - - MaybeObject* JSObject::GetElementWithInterceptor(Object* receiver, uint32_t index) { Isolate* isolate = GetIsolate(); @@ -8876,218 +8730,33 @@ if (!result.IsEmpty()) return *v8::Utils::OpenHandle(*result); } - MaybeObject* raw_result = - holder_handle->GetElementPostInterceptor(*this_handle, index); - RETURN_IF_SCHEDULED_EXCEPTION(isolate); - return raw_result; -} - + Heap* heap = holder_handle->GetHeap(); + ElementsAccessor* handler = holder_handle->GetElementsAccessor(); + MaybeObject* raw_result = handler->Get(holder_handle->elements(), + index, + *holder_handle, + *this_handle); + if (raw_result != heap->the_hole_value()) return raw_result; -MaybeObject* JSObject::GetElementWithReceiver(Object* receiver, - uint32_t index) { - // Check access rights if needed. - if (IsAccessCheckNeeded()) { - Heap* heap = GetHeap(); - if (!heap->isolate()->MayIndexedAccess(this, index, v8::ACCESS_GET)) { - heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_GET); - return heap->undefined_value(); - } - } - - if (HasIndexedInterceptor()) { - return GetElementWithInterceptor(receiver, index); - } - - // Get element works for both JSObject and JSArray since - // JSArray::length cannot change. - switch (GetElementsKind()) { - case FAST_ELEMENTS: { - FixedArray* elms = FixedArray::cast(elements()); - if (index < static_cast(elms->length())) { - Object* value = elms->get(index); - if (!value->IsTheHole()) return value; - } - break; - } - case FAST_DOUBLE_ELEMENTS: { - FixedDoubleArray* elms = FixedDoubleArray::cast(elements()); - if (index < static_cast(elms->length())) { - if (!elms->is_the_hole(index)) { - double double_value = elms->get(index); - return GetHeap()->NumberFromDouble(double_value); - } - } - break; - } - case EXTERNAL_PIXEL_ELEMENTS: - case EXTERNAL_BYTE_ELEMENTS: - case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: - case EXTERNAL_SHORT_ELEMENTS: - case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: - case EXTERNAL_INT_ELEMENTS: - case EXTERNAL_UNSIGNED_INT_ELEMENTS: - case EXTERNAL_FLOAT_ELEMENTS: - case EXTERNAL_DOUBLE_ELEMENTS: { - MaybeObject* maybe_value = GetExternalElement(index); - Object* value; - if (!maybe_value->ToObject(&value)) return maybe_value; - if (!value->IsUndefined()) return value; - break; - } - case DICTIONARY_ELEMENTS: { - NumberDictionary* dictionary = element_dictionary(); - int entry = dictionary->FindEntry(index); - if (entry != NumberDictionary::kNotFound) { - Object* element = dictionary->ValueAt(entry); - PropertyDetails details = dictionary->DetailsAt(entry); - if (details.type() == CALLBACKS) { - return GetElementWithCallback(receiver, - element, - index, - this); - } - return element; - } - break; - } - case NON_STRICT_ARGUMENTS_ELEMENTS: { - FixedArray* parameter_map = FixedArray::cast(elements()); - uint32_t length = parameter_map->length(); - Object* probe = - (index < length - 2) ? parameter_map->get(index + 2) : NULL; - if (probe != NULL && !probe->IsTheHole()) { - Context* context = Context::cast(parameter_map->get(0)); - int context_index = Smi::cast(probe)->value(); - ASSERT(!context->get(context_index)->IsTheHole()); - return context->get(context_index); - } else { - // Object is not mapped, defer to the arguments. - FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); - if (arguments->IsDictionary()) { - NumberDictionary* dictionary = NumberDictionary::cast(arguments); - int entry = dictionary->FindEntry(index); - if (entry != NumberDictionary::kNotFound) { - Object* element = dictionary->ValueAt(entry); - PropertyDetails details = dictionary->DetailsAt(entry); - if (details.type() == CALLBACKS) { - return GetElementWithCallback(receiver, - element, - index, - this); - } - return element; - } - } else if (index < static_cast(arguments->length())) { - Object* value = arguments->get(index); - if (!value->IsTheHole()) return value; - } - } - break; - } - } + RETURN_IF_SCHEDULED_EXCEPTION(isolate); - Object* pt = GetPrototype(); - Heap* heap = GetHeap(); + Object* pt = holder_handle->GetPrototype(); if (pt == heap->null_value()) return heap->undefined_value(); - return pt->GetElementWithReceiver(receiver, index); + return pt->GetElementWithReceiver(*this_handle, index); } -MaybeObject* JSObject::GetExternalElement(uint32_t index) { - // Get element works for both JSObject and JSArray since - // JSArray::length cannot change. - switch (GetElementsKind()) { - case EXTERNAL_PIXEL_ELEMENTS: { - ExternalPixelArray* pixels = ExternalPixelArray::cast(elements()); - if (index < static_cast(pixels->length())) { - uint8_t value = pixels->get(index); - return Smi::FromInt(value); - } - break; - } - case EXTERNAL_BYTE_ELEMENTS: { - ExternalByteArray* array = ExternalByteArray::cast(elements()); - if (index < static_cast(array->length())) { - int8_t value = array->get(index); - return Smi::FromInt(value); - } - break; - } - case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: { - ExternalUnsignedByteArray* array = - ExternalUnsignedByteArray::cast(elements()); - if (index < static_cast(array->length())) { - uint8_t value = array->get(index); - return Smi::FromInt(value); - } - break; - } - case EXTERNAL_SHORT_ELEMENTS: { - ExternalShortArray* array = ExternalShortArray::cast(elements()); - if (index < static_cast(array->length())) { - int16_t value = array->get(index); - return Smi::FromInt(value); - } - break; - } - case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: { - ExternalUnsignedShortArray* array = - ExternalUnsignedShortArray::cast(elements()); - if (index < static_cast(array->length())) { - uint16_t value = array->get(index); - return Smi::FromInt(value); - } - break; - } - case EXTERNAL_INT_ELEMENTS: { - ExternalIntArray* array = ExternalIntArray::cast(elements()); - if (index < static_cast(array->length())) { - int32_t value = array->get(index); - return GetHeap()->NumberFromInt32(value); - } - break; - } - case EXTERNAL_UNSIGNED_INT_ELEMENTS: { - ExternalUnsignedIntArray* array = - ExternalUnsignedIntArray::cast(elements()); - if (index < static_cast(array->length())) { - uint32_t value = array->get(index); - return GetHeap()->NumberFromUint32(value); - } - break; - } - case EXTERNAL_FLOAT_ELEMENTS: { - ExternalFloatArray* array = ExternalFloatArray::cast(elements()); - if (index < static_cast(array->length())) { - float value = array->get(index); - return GetHeap()->AllocateHeapNumber(value); - } - break; - } - case EXTERNAL_DOUBLE_ELEMENTS: { - ExternalDoubleArray* array = ExternalDoubleArray::cast(elements()); - if (index < static_cast(array->length())) { - double value = array->get(index); - return GetHeap()->AllocateHeapNumber(value); - } - break; - } - case FAST_DOUBLE_ELEMENTS: - case FAST_ELEMENTS: - case DICTIONARY_ELEMENTS: - UNREACHABLE(); - break; - case NON_STRICT_ARGUMENTS_ELEMENTS: - UNIMPLEMENTED(); - break; - } - return GetHeap()->undefined_value(); +bool JSObject::HasDenseElements() { + int capacity = 0; + int used = 0; + GetElementsCapacityAndUsage(&capacity, &used); + return (capacity == 0) || (used > (capacity / 2)); } -bool JSObject::HasDenseElements() { - int capacity = 0; - int number_of_elements = 0; +void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) { + *capacity = 0; + *used = 0; FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements()); FixedArray* backing_store = NULL; @@ -9098,34 +8767,33 @@ backing_store = FixedArray::cast(backing_store_base); if (backing_store->IsDictionary()) { NumberDictionary* dictionary = NumberDictionary::cast(backing_store); - capacity = dictionary->Capacity(); - number_of_elements = dictionary->NumberOfElements(); + *capacity = dictionary->Capacity(); + *used = dictionary->NumberOfElements(); break; } // Fall through. case FAST_ELEMENTS: backing_store = FixedArray::cast(backing_store_base); - capacity = backing_store->length(); - for (int i = 0; i < capacity; ++i) { - if (!backing_store->get(i)->IsTheHole()) ++number_of_elements; + *capacity = backing_store->length(); + for (int i = 0; i < *capacity; ++i) { + if (!backing_store->get(i)->IsTheHole()) ++(*used); } break; case DICTIONARY_ELEMENTS: { NumberDictionary* dictionary = NumberDictionary::cast(FixedArray::cast(elements())); - capacity = dictionary->Capacity(); - number_of_elements = dictionary->NumberOfElements(); + *capacity = dictionary->Capacity(); + *used = dictionary->NumberOfElements(); break; } case FAST_DOUBLE_ELEMENTS: { FixedDoubleArray* elms = FixedDoubleArray::cast(elements()); - capacity = elms->length(); - for (int i = 0; i < capacity; i++) { - if (!elms->is_the_hole(i)) number_of_elements++; + *capacity = elms->length(); + for (int i = 0; i < *capacity; i++) { + if (!elms->is_the_hole(i)) ++(*used); } break; } - case EXTERNAL_PIXEL_ELEMENTS: case EXTERNAL_BYTE_ELEMENTS: case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: case EXTERNAL_SHORT_ELEMENTS: @@ -9133,30 +8801,34 @@ case EXTERNAL_INT_ELEMENTS: case EXTERNAL_UNSIGNED_INT_ELEMENTS: case EXTERNAL_FLOAT_ELEMENTS: - case EXTERNAL_DOUBLE_ELEMENTS: { - return true; - } + case EXTERNAL_DOUBLE_ELEMENTS: + case EXTERNAL_PIXEL_ELEMENTS: + // External arrays are considered 100% used. + ExternalArray* external_array = ExternalArray::cast(elements()); + *capacity = external_array->length(); + *used = external_array->length(); + break; } - return (capacity == 0) || (number_of_elements > (capacity / 2)); } bool JSObject::ShouldConvertToSlowElements(int new_capacity) { - // Keep the array in fast case if the current backing storage is - // almost filled and if the new capacity is no more than twice the - // old capacity. - int elements_length = 0; - if (elements()->map() == GetHeap()->non_strict_arguments_elements_map()) { - FixedArray* backing_store = FixedArray::cast(elements()); - elements_length = FixedArray::cast(backing_store->get(1))->length(); - } else if (HasFastElements()) { - elements_length = FixedArray::cast(elements())->length(); - } else if (HasFastDoubleElements()) { - elements_length = FixedDoubleArray::cast(elements())->length(); - } else { - UNREACHABLE(); + STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <= + kMaxUncheckedFastElementsLength); + if (new_capacity <= kMaxUncheckedOldFastElementsLength || + (new_capacity <= kMaxUncheckedFastElementsLength && + GetHeap()->InNewSpace(this))) { + return false; } - return !HasDenseElements() || ((new_capacity / 2) > elements_length); + // If the fast-case backing storage takes up roughly three times as + // much space (in machine words) as a dictionary backing storage + // would, the object should have slow elements. + int old_capacity = 0; + int used_elements = 0; + GetElementsCapacityAndUsage(&old_capacity, &used_elements); + int dictionary_size = NumberDictionary::ComputeCapacity(used_elements) * + NumberDictionary::kEntrySize; + return 3 * dictionary_size <= new_capacity; } @@ -9179,20 +8851,21 @@ // dictionary, we cannot go back to fast case. if (dictionary->requires_slow_elements()) return false; // If the dictionary backing storage takes up roughly half as much - // space as a fast-case backing storage would the array should have - // fast elements. - uint32_t length = 0; + // space (in machine words) as a fast-case backing storage would, + // the object should have fast elements. + uint32_t array_size = 0; if (IsJSArray()) { - CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length)); + CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size)); } else { - length = dictionary->max_number_key(); + array_size = dictionary->max_number_key(); } - return static_cast(dictionary->Capacity()) >= - (length / (2 * NumberDictionary::kEntrySize)); + uint32_t dictionary_size = static_cast(dictionary->Capacity()) * + NumberDictionary::kEntrySize; + return 2 * dictionary_size >= array_size; } -bool JSObject::ShouldConvertToFastDoubleElements() { +bool JSObject::CanConvertToFastDoubleElements() { if (FLAG_unbox_double_arrays) { ASSERT(HasDictionaryElements()); NumberDictionary* dictionary = NumberDictionary::cast(elements()); @@ -9382,6 +9055,15 @@ return (index < length) && !FixedArray::cast(elements())->get(index)->IsTheHole(); } + case FAST_DOUBLE_ELEMENTS: { + uint32_t length = IsJSArray() ? + static_cast( + Smi::cast(JSArray::cast(this)->length())->value()) : + static_cast(FixedDoubleArray::cast(elements())->length()); + return (index < length) && + !FixedDoubleArray::cast(elements())->is_the_hole(index); + break; + } case EXTERNAL_PIXEL_ELEMENTS: { ExternalPixelArray* pixels = ExternalPixelArray::cast(elements()); return index < static_cast(pixels->length()); @@ -9397,9 +9079,6 @@ ExternalArray* array = ExternalArray::cast(elements()); return index < static_cast(array->length()); } - case FAST_DOUBLE_ELEMENTS: - UNREACHABLE(); - break; case DICTIONARY_ELEMENTS: { return element_dictionary()->FindEntry(index) != NumberDictionary::kNotFound; @@ -10119,11 +9798,8 @@ template MaybeObject* HashTable::Allocate(int at_least_space_for, PretenureFlag pretenure) { - const int kMinCapacity = 32; - int capacity = RoundUpToPowerOf2(at_least_space_for * 2); - if (capacity < kMinCapacity) { - capacity = kMinCapacity; // Guarantee min capacity. - } else if (capacity > HashTable::kMaxCapacity) { + int capacity = ComputeCapacity(at_least_space_for); + if (capacity > HashTable::kMaxCapacity) { return Failure::OutOfMemoryException(); } @@ -10291,6 +9967,8 @@ template class HashTable; +template class HashTable; + template class Dictionary; template class Dictionary; @@ -10473,8 +10151,6 @@ // If the object is in dictionary mode, it is converted to fast elements // mode. MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) { - ASSERT(!HasExternalArrayElements()); - Heap* heap = GetHeap(); if (HasDictionaryElements()) { @@ -10504,19 +10180,22 @@ set_map(new_map); set_elements(fast_elements); - } else { + } else if (HasExternalArrayElements()) { + // External arrays cannot have holes or undefined elements. + return Smi::FromInt(ExternalArray::cast(elements())->length()); + } else if (!HasFastDoubleElements()) { Object* obj; { MaybeObject* maybe_obj = EnsureWritableFastElements(); if (!maybe_obj->ToObject(&obj)) return maybe_obj; } } - ASSERT(HasFastElements()); + ASSERT(HasFastElements() || HasFastDoubleElements()); // Collect holes at the end, undefined before that and the rest at the // start, and return the number of non-hole, non-undefined values. - FixedArray* elements = FixedArray::cast(this->elements()); - uint32_t elements_length = static_cast(elements->length()); + FixedArrayBase* elements_base = FixedArrayBase::cast(this->elements()); + uint32_t elements_length = static_cast(elements_base->length()); if (limit > elements_length) { limit = elements_length ; } @@ -10535,47 +10214,78 @@ result_double = HeapNumber::cast(new_double); } - AssertNoAllocation no_alloc; - - // Split elements into defined, undefined and the_hole, in that order. - // Only count locations for undefined and the hole, and fill them afterwards. - WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_alloc); - unsigned int undefs = limit; - unsigned int holes = limit; - // Assume most arrays contain no holes and undefined values, so minimize the - // number of stores of non-undefined, non-the-hole values. - for (unsigned int i = 0; i < undefs; i++) { - Object* current = elements->get(i); - if (current->IsTheHole()) { - holes--; - undefs--; - } else if (current->IsUndefined()) { - undefs--; - } else { - continue; + uint32_t result = 0; + if (elements_base->map() == heap->fixed_double_array_map()) { + FixedDoubleArray* elements = FixedDoubleArray::cast(elements_base); + // Split elements into defined and the_hole, in that order. + unsigned int holes = limit; + // Assume most arrays contain no holes and undefined values, so minimize the + // number of stores of non-undefined, non-the-hole values. + for (unsigned int i = 0; i < holes; i++) { + if (elements->is_the_hole(i)) { + holes--; + } else { + continue; + } + // Position i needs to be filled. + while (holes > i) { + if (elements->is_the_hole(holes)) { + holes--; + } else { + elements->set(i, elements->get_scalar(holes)); + break; + } + } } - // Position i needs to be filled. - while (undefs > i) { - current = elements->get(undefs); + result = holes; + while (holes < limit) { + elements->set_the_hole(holes); + holes++; + } + } else { + FixedArray* elements = FixedArray::cast(elements_base); + AssertNoAllocation no_alloc; + + // Split elements into defined, undefined and the_hole, in that order. Only + // count locations for undefined and the hole, and fill them afterwards. + WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_alloc); + unsigned int undefs = limit; + unsigned int holes = limit; + // Assume most arrays contain no holes and undefined values, so minimize the + // number of stores of non-undefined, non-the-hole values. + for (unsigned int i = 0; i < undefs; i++) { + Object* current = elements->get(i); if (current->IsTheHole()) { holes--; undefs--; } else if (current->IsUndefined()) { undefs--; } else { - elements->set(i, current, write_barrier); - break; + continue; + } + // Position i needs to be filled. + while (undefs > i) { + current = elements->get(undefs); + if (current->IsTheHole()) { + holes--; + undefs--; + } else if (current->IsUndefined()) { + undefs--; + } else { + elements->set(i, current, write_barrier); + break; + } } } - } - uint32_t result = undefs; - while (undefs < holes) { - elements->set_undefined(undefs); - undefs++; - } - while (holes < limit) { - elements->set_the_hole(holes); - holes++; + result = undefs; + while (undefs < holes) { + elements->set_undefined(undefs); + undefs++; + } + while (holes < limit) { + elements->set_the_hole(holes); + holes++; + } } if (result <= static_cast(Smi::kMaxValue)) { @@ -11603,6 +11313,63 @@ } +Object* ObjectHashTable::Lookup(JSObject* key) { + // If the object does not have an identity hash, it was never used as a key. + MaybeObject* maybe_hash = key->GetIdentityHash(JSObject::OMIT_CREATION); + if (maybe_hash->IsFailure()) return GetHeap()->undefined_value(); + int entry = FindEntry(key); + if (entry == kNotFound) return GetHeap()->undefined_value(); + return get(EntryToIndex(entry) + 1); +} + + +MaybeObject* ObjectHashTable::Put(JSObject* key, Object* value) { + // Make sure the key object has an identity hash code. + int hash; + { MaybeObject* maybe_hash = key->GetIdentityHash(JSObject::ALLOW_CREATION); + if (maybe_hash->IsFailure()) return maybe_hash; + hash = Smi::cast(maybe_hash->ToObjectUnchecked())->value(); + } + int entry = FindEntry(key); + + // Check whether to perform removal operation. + if (value->IsUndefined()) { + if (entry == kNotFound) return this; + RemoveEntry(entry); + return Shrink(key); + } + + // Key is already in table, just overwrite value. + if (entry != kNotFound) { + set(EntryToIndex(entry) + 1, value); + return this; + } + + // Check whether the hash table should be extended. + Object* obj; + { MaybeObject* maybe_obj = EnsureCapacity(1, key); + if (!maybe_obj->ToObject(&obj)) return maybe_obj; + } + ObjectHashTable* table = ObjectHashTable::cast(obj); + table->AddEntry(table->FindInsertionEntry(hash), key, value); + return table; +} + + +void ObjectHashTable::AddEntry(int entry, JSObject* key, Object* value) { + set(EntryToIndex(entry), key); + set(EntryToIndex(entry) + 1, value); + ElementAdded(); +} + + +void ObjectHashTable::RemoveEntry(int entry, Heap* heap) { + set_null(heap, EntryToIndex(entry)); + set_null(heap, EntryToIndex(entry) + 1); + ElementRemoved(); +} + + #ifdef ENABLE_DEBUGGER_SUPPORT // Check if there is a break point at this code position. bool DebugInfo::HasBreakPoint(int code_position) { @@ -11829,7 +11596,7 @@ Handle break_point_object) { // No break point. if (break_point_info->break_point_objects()->IsUndefined()) return false; - // Single beak point. + // Single break point. if (!break_point_info->break_point_objects()->IsFixedArray()) { return break_point_info->break_point_objects() == *break_point_object; } @@ -11848,7 +11615,7 @@ int BreakPointInfo::GetBreakPointCount() { // No break point. if (break_point_objects()->IsUndefined()) return 0; - // Single beak point. + // Single break point. if (!break_point_objects()->IsFixedArray()) return 1; // Multiple break points. return FixedArray::cast(break_point_objects())->length(); diff -Nru libv8-3.4.14.21/src/objects-debug.cc libv8-3.5.10.24/src/objects-debug.cc --- libv8-3.4.14.21/src/objects-debug.cc 2011-07-18 09:50:57.000000000 +0000 +++ libv8-3.5.10.24/src/objects-debug.cc 2011-08-29 10:41:00.000000000 +0000 @@ -153,6 +153,9 @@ case JS_ARRAY_TYPE: JSArray::cast(this)->JSArrayVerify(); break; + case JS_WEAK_MAP_TYPE: + JSWeakMap::cast(this)->JSWeakMapVerify(); + break; case JS_REGEXP_TYPE: JSRegExp::cast(this)->JSRegExpVerify(); break; @@ -313,7 +316,7 @@ void FixedDoubleArray::FixedDoubleArrayVerify() { for (int i = 0; i < length(); i++) { if (!is_the_hole(i)) { - double value = get(i); + double value = get_scalar(i); ASSERT(!isnan(value) || (BitCast(value) == BitCast(canonical_not_the_hole_nan_as_double()))); @@ -349,6 +352,31 @@ if (IsSymbol()) { CHECK(!HEAP->InNewSpace(this)); } + if (IsConsString()) { + ConsString::cast(this)->ConsStringVerify(); + } else if (IsSlicedString()) { + SlicedString::cast(this)->SlicedStringVerify(); + } +} + + +void ConsString::ConsStringVerify() { + CHECK(this->first()->IsString()); + CHECK(this->second() == GetHeap()->empty_string() || + this->second()->IsString()); + CHECK(this->length() >= String::kMinNonFlatLength); + if (this->IsFlat()) { + // A flat cons can only be created by String::SlowTryFlatten. + // Afterwards, the first part may be externalized. + CHECK(this->first()->IsSeqString() || this->first()->IsExternalString()); + } +} + + +void SlicedString::SlicedStringVerify() { + CHECK(!this->parent()->IsConsString()); + CHECK(!this->parent()->IsSlicedString()); + CHECK(this->length() >= SlicedString::kMinLength); } @@ -453,6 +481,14 @@ } +void JSWeakMap::JSWeakMapVerify() { + CHECK(IsJSWeakMap()); + JSObjectVerify(); + VerifyHeapPointer(table()); + ASSERT(table()->IsHashTable()); +} + + void JSRegExp::JSRegExpVerify() { JSObjectVerify(); ASSERT(data()->IsUndefined() || data()->IsFixedArray()); diff -Nru libv8-3.4.14.21/src/objects.h libv8-3.5.10.24/src/objects.h --- libv8-3.4.14.21/src/objects.h 2011-08-11 16:03:29.000000000 +0000 +++ libv8-3.5.10.24/src/objects.h 2011-08-31 09:03:56.000000000 +0000 @@ -51,6 +51,7 @@ // - JSReceiver (suitable for property access) // - JSObject // - JSArray +// - JSWeakMap // - JSRegExp // - JSFunction // - GlobalObject @@ -61,31 +62,34 @@ // - JSMessageObject // - JSProxy // - JSFunctionProxy -// - ByteArray -// - ExternalArray -// - ExternalPixelArray -// - ExternalByteArray -// - ExternalUnsignedByteArray -// - ExternalShortArray -// - ExternalUnsignedShortArray -// - ExternalIntArray -// - ExternalUnsignedIntArray -// - ExternalFloatArray -// - FixedArray -// - DescriptorArray -// - HashTable -// - Dictionary -// - SymbolTable -// - CompilationCacheTable -// - CodeCacheHashTable -// - MapCache -// - Context -// - JSFunctionResultCache -// - SerializedScopeInfo +// - FixedArrayBase +// - ByteArray +// - FixedArray +// - DescriptorArray +// - HashTable +// - Dictionary +// - SymbolTable +// - CompilationCacheTable +// - CodeCacheHashTable +// - MapCache +// - Context +// - JSFunctionResultCache +// - SerializedScopeInfo +// - FixedDoubleArray +// - ExternalArray +// - ExternalPixelArray +// - ExternalByteArray +// - ExternalUnsignedByteArray +// - ExternalShortArray +// - ExternalUnsignedShortArray +// - ExternalIntArray +// - ExternalUnsignedIntArray +// - ExternalFloatArray // - String // - SeqString // - SeqAsciiString // - SeqTwoByteString +// - SlicedString // - ConsString // - ExternalString // - ExternalAsciiString @@ -280,6 +284,7 @@ V(ASCII_STRING_TYPE) \ V(CONS_STRING_TYPE) \ V(CONS_ASCII_STRING_TYPE) \ + V(SLICED_STRING_TYPE) \ V(EXTERNAL_STRING_TYPE) \ V(EXTERNAL_STRING_WITH_ASCII_DATA_TYPE) \ V(EXTERNAL_ASCII_STRING_TYPE) \ @@ -319,6 +324,7 @@ V(POLYMORPHIC_CODE_CACHE_TYPE) \ \ V(FIXED_ARRAY_TYPE) \ + V(FIXED_DOUBLE_ARRAY_TYPE) \ V(SHARED_FUNCTION_INFO_TYPE) \ \ V(JS_MESSAGE_OBJECT_TYPE) \ @@ -331,6 +337,7 @@ V(JS_GLOBAL_PROXY_TYPE) \ V(JS_ARRAY_TYPE) \ V(JS_PROXY_TYPE) \ + V(JS_WEAK_MAP_TYPE) \ V(JS_REGEXP_TYPE) \ \ V(JS_FUNCTION_TYPE) \ @@ -396,6 +403,14 @@ ConsString::kSize, \ cons_ascii_string, \ ConsAsciiString) \ + V(SLICED_STRING_TYPE, \ + SlicedString::kSize, \ + sliced_string, \ + SlicedString) \ + V(SLICED_ASCII_STRING_TYPE, \ + SlicedString::kSize, \ + sliced_ascii_string, \ + SlicedAsciiString) \ V(EXTERNAL_STRING_TYPE, \ ExternalTwoByteString::kSize, \ external_string, \ @@ -469,9 +484,22 @@ enum StringRepresentationTag { kSeqStringTag = 0x0, kConsStringTag = 0x1, - kExternalStringTag = 0x2 + kExternalStringTag = 0x2, + kSlicedStringTag = 0x3 }; -const uint32_t kIsConsStringMask = 0x1; +const uint32_t kIsIndirectStringMask = 0x1; +const uint32_t kIsIndirectStringTag = 0x1; +STATIC_ASSERT((kSeqStringTag & kIsIndirectStringMask) == 0); +STATIC_ASSERT((kExternalStringTag & kIsIndirectStringMask) == 0); +STATIC_ASSERT( + (kConsStringTag & kIsIndirectStringMask) == kIsIndirectStringTag); +STATIC_ASSERT( + (kSlicedStringTag & kIsIndirectStringMask) == kIsIndirectStringTag); + +// Use this mask to distinguish between cons and slice only after making +// sure that the string is one of the two (an indirect string). +const uint32_t kSlicedNotConsMask = kSlicedStringTag & ~kConsStringTag; +STATIC_ASSERT(IS_POWER_OF_TWO(kSlicedNotConsMask) && kSlicedNotConsMask != 0); // If bit 7 is clear, then bit 3 indicates whether this two-byte // string actually contains ascii data. @@ -506,6 +534,8 @@ ASCII_STRING_TYPE = kAsciiStringTag | kSeqStringTag, CONS_STRING_TYPE = kTwoByteStringTag | kConsStringTag, CONS_ASCII_STRING_TYPE = kAsciiStringTag | kConsStringTag, + SLICED_STRING_TYPE = kTwoByteStringTag | kSlicedStringTag, + SLICED_ASCII_STRING_TYPE = kAsciiStringTag | kSlicedStringTag, EXTERNAL_STRING_TYPE = kTwoByteStringTag | kExternalStringTag, EXTERNAL_STRING_WITH_ASCII_DATA_TYPE = kTwoByteStringTag | kExternalStringTag | kAsciiDataHintTag, @@ -568,6 +598,7 @@ JS_GLOBAL_PROXY_TYPE, JS_ARRAY_TYPE, JS_PROXY_TYPE, + JS_WEAK_MAP_TYPE, JS_REGEXP_TYPE, // LAST_NONCALLABLE_SPEC_OBJECT_TYPE @@ -630,8 +661,11 @@ WriteBarrierMode mode = UPDATE_WRITE_BARRIER); \ -class StringStream; +class DictionaryElementsAccessor; +class ElementsAccessor; +class FixedArrayBase; class ObjectVisitor; +class StringStream; struct ValueInfo : public Malloced { ValueInfo() : type(FIRST_TYPE), ptr(NULL), str(NULL), number(0) { } @@ -709,6 +743,7 @@ V(SeqString) \ V(ExternalString) \ V(ConsString) \ + V(SlicedString) \ V(ExternalTwoByteString) \ V(ExternalAsciiString) \ V(SeqTwoByteString) \ @@ -736,6 +771,7 @@ V(FixedDoubleArray) \ V(Context) \ V(GlobalContext) \ + V(SerializedScopeInfo) \ V(JSFunction) \ V(Code) \ V(Oddball) \ @@ -748,6 +784,7 @@ V(JSArray) \ V(JSProxy) \ V(JSFunctionProxy) \ + V(JSWeakMap) \ V(JSRegExp) \ V(HashTable) \ V(Dictionary) \ @@ -790,6 +827,8 @@ STRUCT_LIST(DECLARE_STRUCT_PREDICATE) #undef DECLARE_STRUCT_PREDICATE + INLINE(bool IsSpecObject()); + // Oddball testing. INLINE(bool IsUndefined()); INLINE(bool IsNull()); @@ -1482,10 +1521,11 @@ // In the slow mode the elements is either a NumberDictionary, an // ExternalArray, or a FixedArray parameter map for a (non-strict) // arguments object. - DECL_ACCESSORS(elements, HeapObject) + DECL_ACCESSORS(elements, FixedArrayBase) inline void initialize_elements(); MUST_USE_RESULT inline MaybeObject* ResetElements(); inline ElementsKind GetElementsKind(); + inline ElementsAccessor* GetElementsAccessor(); inline bool HasFastElements(); inline bool HasFastDoubleElements(); inline bool HasDictionaryElements(); @@ -1636,6 +1676,23 @@ MUST_USE_RESULT inline MaybeObject* SetHiddenPropertiesObject( Object* hidden_obj); + // Indicates whether the hidden properties object should be created. + enum HiddenPropertiesFlag { ALLOW_CREATION, OMIT_CREATION }; + + // Retrieves the hidden properties object. + // + // The undefined value might be returned in case no hidden properties object + // is present and creation was omitted. + inline bool HasHiddenProperties(); + MUST_USE_RESULT MaybeObject* GetHiddenProperties(HiddenPropertiesFlag flag); + + // Retrieves a permanent object identity hash code. + // + // The identity hash is stored as a hidden property. The undefined value might + // be returned in case no hidden properties object is present and creation was + // omitted. + MUST_USE_RESULT MaybeObject* GetIdentityHash(HiddenPropertiesFlag flag); + MUST_USE_RESULT MaybeObject* DeleteProperty(String* name, DeleteMode mode); MUST_USE_RESULT MaybeObject* DeleteElement(uint32_t index, DeleteMode mode); @@ -1652,7 +1709,7 @@ bool ShouldConvertToFastElements(); // Returns true if the elements of JSObject contains only values that can be // represented in a FixedDoubleArray. - bool ShouldConvertToFastDoubleElements(); + bool CanConvertToFastDoubleElements(); // Tells whether the index'th element is present. inline bool HasElement(uint32_t index); @@ -1711,14 +1768,8 @@ // Returns the index'th element. // The undefined object if index is out of bounds. - MaybeObject* GetElementWithReceiver(Object* receiver, uint32_t index); MaybeObject* GetElementWithInterceptor(Object* receiver, uint32_t index); - // Get external element value at index if there is one and undefined - // otherwise. Can return a failure if allocation of a heap number - // failed. - MaybeObject* GetExternalElement(uint32_t index); - // Replace the elements' backing store with fast elements of the given // capacity. Update the length for JSArrays. Returns the new backing // store. @@ -1946,8 +1997,21 @@ // Also maximal value of JSArray's length property. static const uint32_t kMaxElementCount = 0xffffffffu; + // Constants for heuristics controlling conversion of fast elements + // to slow elements. + + // Maximal gap that can be introduced by adding an element beyond + // the current elements length. static const uint32_t kMaxGap = 1024; - static const int kMaxFastElementsLength = 5000; + + // Maximal length of fast elements array that won't be checked for + // being dense enough on expansion. + static const int kMaxUncheckedFastElementsLength = 5000; + + // Same as above but for old arrays. This limit is more strict. We + // don't want to be wasteful with long lived objects. + static const int kMaxUncheckedOldFastElementsLength = 500; + static const int kInitialMaxFastElementArray = 100000; static const int kMaxFastProperties = 12; static const int kMaxInstanceSize = 255 * kPointerSize; @@ -1969,6 +2033,8 @@ }; private: + friend class DictionaryElementsAccessor; + MUST_USE_RESULT MaybeObject* GetElementWithCallback(Object* receiver, Object* structure, uint32_t index, @@ -1989,14 +2055,10 @@ StrictModeFlag strict_mode, bool check_prototype); - MaybeObject* GetElementPostInterceptor(Object* receiver, uint32_t index); - MUST_USE_RESULT MaybeObject* DeletePropertyPostInterceptor(String* name, DeleteMode mode); MUST_USE_RESULT MaybeObject* DeletePropertyWithInterceptor(String* name); - MUST_USE_RESULT MaybeObject* DeleteElementPostInterceptor(uint32_t index, - DeleteMode mode); MUST_USE_RESULT MaybeObject* DeleteElementWithInterceptor(uint32_t index); MUST_USE_RESULT MaybeObject* DeleteFastElement(uint32_t index); @@ -2013,6 +2075,9 @@ // Returns true if most of the elements backing storage is used. bool HasDenseElements(); + // Gets the current elements capacity and the number of used elements. + void GetElementsCapacityAndUsage(int* capacity, int* used); + bool CanSetCallback(String* name); MUST_USE_RESULT MaybeObject* SetElementCallback( uint32_t index, @@ -2049,6 +2114,8 @@ }; +class FixedDoubleArray; + // FixedArray describes fixed-sized arrays with element type Object*. class FixedArray: public FixedArrayBase { public: @@ -2056,6 +2123,7 @@ inline Object* get(int index); // Setter that uses write barrier. inline void set(int index, Object* value); + inline bool is_the_hole(int index); // Setter that doesn't need write barrier). inline void set(int index, Smi* value); @@ -2157,7 +2225,8 @@ inline void Initialize(NumberDictionary* from); // Setter and getter for elements. - inline double get(int index); + inline double get_scalar(int index); + inline MaybeObject* get(int index); inline void set(int index, double value); inline void set_the_hole(int index); @@ -2484,6 +2553,10 @@ int at_least_space_for, PretenureFlag pretenure = NOT_TENURED); + // Computes the required capacity for a table holding the given + // number of elements. May be more than HashTable::kMaxCapacity. + static int ComputeCapacity(int at_least_space_for); + // Returns the key at entry. Object* KeyAt(int entry) { return get(EntryToIndex(entry)); } @@ -2906,6 +2979,48 @@ }; +class ObjectHashTableShape { + public: + static inline bool IsMatch(JSObject* key, Object* other); + static inline uint32_t Hash(JSObject* key); + static inline uint32_t HashForObject(JSObject* key, Object* object); + MUST_USE_RESULT static inline MaybeObject* AsObject(JSObject* key); + static const int kPrefixSize = 0; + static const int kEntrySize = 2; +}; + + +// ObjectHashTable maps keys that are JavaScript objects to object values by +// using the identity hash of the key for hashing purposes. +class ObjectHashTable: public HashTable { + public: + static inline ObjectHashTable* cast(Object* obj) { + ASSERT(obj->IsHashTable()); + return reinterpret_cast(obj); + } + + // Looks up the value associated with the given key. The undefined value is + // returned in case the key is not present. + Object* Lookup(JSObject* key); + + // Adds (or overwrites) the value associated with the given key. Mapping a + // key to the undefined value causes removal of the whole entry. + MUST_USE_RESULT MaybeObject* Put(JSObject* key, Object* value); + + private: + friend class MarkCompactCollector; + + void AddEntry(int entry, JSObject* key, Object* value); + void RemoveEntry(int entry, Heap* heap); + inline void RemoveEntry(int entry); + + // Returns the index to the value of an entry. + static inline int EntryToValueIndex(int entry) { + return EntryToIndex(entry) + 1; + } +}; + + // JSFunctionResultCache caches results of some JSFunction invocation. // It is a fixed array with fixed structure: // [0]: factory function @@ -2968,12 +3083,8 @@ // ByteArray represents fixed sized byte arrays. Used by the outside world, // such as PCRE, and also by the memory allocator and garbage collector to // fill in free blocks in the heap. -class ByteArray: public HeapObject { +class ByteArray: public FixedArrayBase { public: - // [length]: length of the array. - inline int length(); - inline void set_length(int value); - // Setter and getter. inline byte get(int index); inline void set(int index, byte value); @@ -3018,10 +3129,6 @@ #endif // Layout description. - // Length is smi tagged when it is stored. - static const int kLengthOffset = HeapObject::kHeaderSize; - static const int kHeaderSize = kLengthOffset + kPointerSize; - static const int kAlignedSize = OBJECT_POINTER_ALIGN(kHeaderSize); // Maximal memory consumption for a single ByteArray. @@ -3045,11 +3152,10 @@ // Out-of-range values passed to the setter are converted via a C // cast, not clamping. Out-of-range indices cause exceptions to be // raised rather than being silently ignored. -class ExternalArray: public HeapObject { +class ExternalArray: public FixedArrayBase { public: - // [length]: length of the array. - inline int length(); - inline void set_length(int value); + + inline bool is_the_hole(int index) { return false; } // [external_pointer]: The pointer to the external memory area backing this // external array. @@ -3062,9 +3168,8 @@ static const int kMaxLength = 0x3fffffff; // ExternalArray headers are not quadword aligned. - static const int kLengthOffset = HeapObject::kHeaderSize; static const int kExternalPointerOffset = - POINTER_SIZE_ALIGN(kLengthOffset + kIntSize); + POINTER_SIZE_ALIGN(FixedArrayBase::kLengthOffset + kPointerSize); static const int kHeaderSize = kExternalPointerOffset + kPointerSize; static const int kAlignedSize = OBJECT_POINTER_ALIGN(kHeaderSize); @@ -3086,7 +3191,8 @@ inline uint8_t* external_pixel_pointer(); // Setter and getter. - inline uint8_t get(int index); + inline uint8_t get_scalar(int index); + inline MaybeObject* get(int index); inline void set(int index, uint8_t value); // This accessor applies the correct conversion from Smi, HeapNumber and @@ -3114,7 +3220,8 @@ class ExternalByteArray: public ExternalArray { public: // Setter and getter. - inline int8_t get(int index); + inline int8_t get_scalar(int index); + inline MaybeObject* get(int index); inline void set(int index, int8_t value); // This accessor applies the correct conversion from Smi, HeapNumber @@ -3142,7 +3249,8 @@ class ExternalUnsignedByteArray: public ExternalArray { public: // Setter and getter. - inline uint8_t get(int index); + inline uint8_t get_scalar(int index); + inline MaybeObject* get(int index); inline void set(int index, uint8_t value); // This accessor applies the correct conversion from Smi, HeapNumber @@ -3170,7 +3278,8 @@ class ExternalShortArray: public ExternalArray { public: // Setter and getter. - inline int16_t get(int index); + inline int16_t get_scalar(int index); + inline MaybeObject* get(int index); inline void set(int index, int16_t value); // This accessor applies the correct conversion from Smi, HeapNumber @@ -3198,7 +3307,8 @@ class ExternalUnsignedShortArray: public ExternalArray { public: // Setter and getter. - inline uint16_t get(int index); + inline uint16_t get_scalar(int index); + inline MaybeObject* get(int index); inline void set(int index, uint16_t value); // This accessor applies the correct conversion from Smi, HeapNumber @@ -3226,7 +3336,8 @@ class ExternalIntArray: public ExternalArray { public: // Setter and getter. - inline int32_t get(int index); + inline int32_t get_scalar(int index); + inline MaybeObject* get(int index); inline void set(int index, int32_t value); // This accessor applies the correct conversion from Smi, HeapNumber @@ -3254,7 +3365,8 @@ class ExternalUnsignedIntArray: public ExternalArray { public: // Setter and getter. - inline uint32_t get(int index); + inline uint32_t get_scalar(int index); + inline MaybeObject* get(int index); inline void set(int index, uint32_t value); // This accessor applies the correct conversion from Smi, HeapNumber @@ -3282,7 +3394,8 @@ class ExternalFloatArray: public ExternalArray { public: // Setter and getter. - inline float get(int index); + inline float get_scalar(int index); + inline MaybeObject* get(int index); inline void set(int index, float value); // This accessor applies the correct conversion from Smi, HeapNumber @@ -3310,7 +3423,8 @@ class ExternalDoubleArray: public ExternalArray { public: // Setter and getter. - inline double get(int index); + inline double get_scalar(int index); + inline MaybeObject* get(int index); inline void set(int index, double value); // This accessor applies the correct conversion from Smi, HeapNumber @@ -3480,13 +3594,14 @@ UNARY_OP_IC, BINARY_OP_IC, COMPARE_IC, + TO_BOOLEAN_IC, // No more than 16 kinds. The value currently encoded in four bits in // Flags. // Pseudo-kinds. REGEXP = BUILTIN, FIRST_IC_KIND = LOAD_IC, - LAST_IC_KIND = COMPARE_IC + LAST_IC_KIND = TO_BOOLEAN_IC }; enum { @@ -3552,13 +3667,10 @@ inline bool is_keyed_store_stub() { return kind() == KEYED_STORE_IC; } inline bool is_call_stub() { return kind() == CALL_IC; } inline bool is_keyed_call_stub() { return kind() == KEYED_CALL_IC; } - inline bool is_unary_op_stub() { - return kind() == UNARY_OP_IC; - } - inline bool is_binary_op_stub() { - return kind() == BINARY_OP_IC; - } + inline bool is_unary_op_stub() { return kind() == UNARY_OP_IC; } + inline bool is_binary_op_stub() { return kind() == BINARY_OP_IC; } inline bool is_compare_ic_stub() { return kind() == COMPARE_IC; } + inline bool is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; } // [major_key]: For kind STUB or BINARY_OP_IC, the major key. inline int major_key(); @@ -3600,21 +3712,24 @@ inline CheckType check_type(); inline void set_check_type(CheckType value); - // [type-recording unary op type]: For all UNARY_OP_IC. + // [type-recording unary op type]: For kind UNARY_OP_IC. inline byte unary_op_type(); inline void set_unary_op_type(byte value); - // [type-recording binary op type]: For all TYPE_RECORDING_BINARY_OP_IC. + // [type-recording binary op type]: For kind BINARY_OP_IC. inline byte binary_op_type(); inline void set_binary_op_type(byte value); inline byte binary_op_result_type(); inline void set_binary_op_result_type(byte value); - // [compare state]: For kind compare IC stubs, tells what state the - // stub is in. + // [compare state]: For kind COMPARE_IC, tells what state the stub is in. inline byte compare_state(); inline void set_compare_state(byte value); + // [to_boolean_foo]: For kind TO_BOOLEAN_IC tells what state the stub is in. + inline byte to_boolean_state(); + inline void set_to_boolean_state(byte value); + // Get the safepoint entry for the given pc. SafepointEntry GetSafepointEntry(Address pc); @@ -3756,9 +3871,10 @@ static const int kStackSlotsOffset = kKindSpecificFlagsOffset; static const int kCheckTypeOffset = kKindSpecificFlagsOffset; - static const int kCompareStateOffset = kStubMajorKeyOffset + 1; static const int kUnaryOpTypeOffset = kStubMajorKeyOffset + 1; static const int kBinaryOpTypeOffset = kStubMajorKeyOffset + 1; + static const int kCompareStateOffset = kStubMajorKeyOffset + 1; + static const int kToBooleanTypeOffset = kStubMajorKeyOffset + 1; static const int kHasDeoptimizationSupportOffset = kOptimizableOffset + 1; static const int kBinaryOpReturnTypeOffset = kBinaryOpTypeOffset + 1; @@ -5693,12 +5809,15 @@ inline bool IsSequential(); inline bool IsExternal(); inline bool IsCons(); + inline bool IsSliced(); + inline bool IsIndirect(); inline bool IsExternalAscii(); inline bool IsExternalTwoByte(); inline bool IsSequentialAscii(); inline bool IsSequentialTwoByte(); inline bool IsSymbol(); inline StringRepresentationTag representation_tag(); + inline uint32_t encoding_tag(); inline uint32_t full_representation_tag(); inline uint32_t size_tag(); #ifdef DEBUG @@ -5730,6 +5849,51 @@ // All string values have a length field. class String: public HeapObject { public: + // Representation of the flat content of a String. + // A non-flat string doesn't have flat content. + // A flat string has content that's encoded as a sequence of either + // ASCII chars or two-byte UC16. + // Returned by String::GetFlatContent(). + class FlatContent { + public: + // Returns true if the string is flat and this structure contains content. + bool IsFlat() { return state_ != NON_FLAT; } + // Returns true if the structure contains ASCII content. + bool IsAscii() { return state_ == ASCII; } + // Returns true if the structure contains two-byte content. + bool IsTwoByte() { return state_ == TWO_BYTE; } + + // Return the ASCII content of the string. Only use if IsAscii() returns + // true. + Vector ToAsciiVector() { + ASSERT_EQ(ASCII, state_); + return Vector::cast(buffer_); + } + // Return the two-byte content of the string. Only use if IsTwoByte() + // returns true. + Vector ToUC16Vector() { + ASSERT_EQ(TWO_BYTE, state_); + return Vector::cast(buffer_); + } + + private: + enum State { NON_FLAT, ASCII, TWO_BYTE }; + + // Constructors only used by String::GetFlatContent(). + explicit FlatContent(Vector chars) + : buffer_(Vector::cast(chars)), + state_(ASCII) { } + explicit FlatContent(Vector chars) + : buffer_(Vector::cast(chars)), + state_(TWO_BYTE) { } + FlatContent() : buffer_(), state_(NON_FLAT) { } + + Vector buffer_; + State state_; + + friend class String; + }; + // Get and set the length of the string. inline int length(); inline void set_length(int value); @@ -5738,14 +5902,19 @@ inline uint32_t hash_field(); inline void set_hash_field(uint32_t value); + // Returns whether this string has only ASCII chars, i.e. all of them can + // be ASCII encoded. This might be the case even if the string is + // two-byte. Such strings may appear when the embedder prefers + // two-byte external representations even for ASCII data. inline bool IsAsciiRepresentation(); inline bool IsTwoByteRepresentation(); - // Returns whether this string has ascii chars, i.e. all of them can - // be ascii encoded. This might be the case even if the string is - // two-byte. Such strings may appear when the embedder prefers - // two-byte external representations even for ascii data. - // + // Cons and slices have an encoding flag that may not represent the actual + // encoding of the underlying string. This is taken into account here. + // Requires: this->IsFlat() + inline bool IsAsciiRepresentationUnderneath(); + inline bool IsTwoByteRepresentationUnderneath(); + // NOTE: this should be considered only a hint. False negatives are // possible. inline bool HasOnlyAsciiChars(); @@ -5778,8 +5947,16 @@ // string. inline String* TryFlattenGetString(PretenureFlag pretenure = NOT_TENURED); - Vector ToAsciiVector(); - Vector ToUC16Vector(); + // Tries to return the content of a flat string as a structure holding either + // a flat vector of char or of uc16. + // If the string isn't flat, and therefore doesn't have flat content, the + // returned structure will report so, and can't provide a vector of either + // kind. + FlatContent GetFlatContent(); + + // Returns the parent of a sliced string or first part of a flat cons string. + // Requires: StringShape(this).IsIndirect() && this->IsFlat() + inline String* GetUnderlying(); // Mark the string as an undetectable object. It only applies to // ascii and two byte string types. @@ -5860,6 +6037,8 @@ StringPrint(stdout); } void StringPrint(FILE* out); + + char* ToAsciiArray(); #endif #ifdef DEBUG void StringVerify(); @@ -6207,11 +6386,69 @@ typedef FixedBodyDescriptor BodyDescriptor; +#ifdef DEBUG + void ConsStringVerify(); +#endif + private: DISALLOW_IMPLICIT_CONSTRUCTORS(ConsString); }; +// The Sliced String class describes strings that are substrings of another +// sequential string. The motivation is to save time and memory when creating +// a substring. A Sliced String is described as a pointer to the parent, +// the offset from the start of the parent string and the length. Using +// a Sliced String therefore requires unpacking of the parent string and +// adding the offset to the start address. A substring of a Sliced String +// are not nested since the double indirection is simplified when creating +// such a substring. +// Currently missing features are: +// - handling externalized parent strings +// - external strings as parent +// - truncating sliced string to enable otherwise unneeded parent to be GC'ed. +class SlicedString: public String { + public: + + inline String* parent(); + inline void set_parent(String* parent); + inline int offset(); + inline void set_offset(int offset); + + // Dispatched behavior. + uint16_t SlicedStringGet(int index); + + // Casting. + static inline SlicedString* cast(Object* obj); + + // Layout description. + static const int kParentOffset = POINTER_SIZE_ALIGN(String::kSize); + static const int kOffsetOffset = kParentOffset + kPointerSize; + static const int kSize = kOffsetOffset + kPointerSize; + + // Support for StringInputBuffer + inline const unibrow::byte* SlicedStringReadBlock(ReadBlockBuffer* buffer, + unsigned* offset_ptr, + unsigned chars); + inline void SlicedStringReadBlockIntoBuffer(ReadBlockBuffer* buffer, + unsigned* offset_ptr, + unsigned chars); + // Minimum length for a sliced string. + static const int kMinLength = 13; + + typedef FixedBodyDescriptor + BodyDescriptor; + +#ifdef DEBUG + void SlicedStringVerify(); +#endif + + private: + DISALLOW_IMPLICIT_CONSTRUCTORS(SlicedString); +}; + + // The ExternalString class describes string values that are backed by // a string resource that lies outside the V8 heap. ExternalStrings // consist of the length field common to all strings, a pointer to the @@ -6551,6 +6788,40 @@ }; +// The JSWeakMap describes EcmaScript Harmony weak maps +class JSWeakMap: public JSObject { + public: + // [table]: the backing hash table mapping keys to values. + DECL_ACCESSORS(table, ObjectHashTable) + + // [next]: linked list of encountered weak maps during GC. + DECL_ACCESSORS(next, Object) + + // Unchecked accessors to be used during GC. + inline ObjectHashTable* unchecked_table(); + + // Casting. + static inline JSWeakMap* cast(Object* obj); + +#ifdef OBJECT_PRINT + inline void JSWeakMapPrint() { + JSWeakMapPrint(stdout); + } + void JSWeakMapPrint(FILE* out); +#endif +#ifdef DEBUG + void JSWeakMapVerify(); +#endif + + static const int kTableOffset = JSObject::kHeaderSize; + static const int kNextOffset = kTableOffset + kPointerSize; + static const int kSize = kNextOffset + kPointerSize; + + private: + DISALLOW_IMPLICIT_CONSTRUCTORS(JSWeakMap); +}; + + // Foreign describes objects pointing from JavaScript to C structures. // Since they cannot contain references to JS HeapObjects they can be // placed in old_data_space. @@ -6850,7 +7121,6 @@ DECL_ACCESSORS(instance_call_handler, Object) DECL_ACCESSORS(access_check_info, Object) DECL_ACCESSORS(flag, Smi) - DECL_ACCESSORS(prototype_attributes, Smi) // Following properties use flag bits. DECL_BOOLEAN_ACCESSORS(hidden_prototype) @@ -6858,6 +7128,7 @@ // If the bit is set, object instances created by this function // requires access check. DECL_BOOLEAN_ACCESSORS(needs_access_check) + DECL_BOOLEAN_ACCESSORS(read_only_prototype) static inline FunctionTemplateInfo* cast(Object* obj); @@ -6890,14 +7161,14 @@ static const int kAccessCheckInfoOffset = kInstanceCallHandlerOffset + kPointerSize; static const int kFlagOffset = kAccessCheckInfoOffset + kPointerSize; - static const int kPrototypeAttributesOffset = kFlagOffset + kPointerSize; - static const int kSize = kPrototypeAttributesOffset + kPointerSize; + static const int kSize = kFlagOffset + kPointerSize; private: // Bit position in the flag, from least significant bit position. static const int kHiddenPrototypeBit = 0; static const int kUndetectableBit = 1; static const int kNeedsAccessCheckBit = 2; + static const int kReadOnlyPrototypeBit = 3; DISALLOW_IMPLICIT_CONSTRUCTORS(FunctionTemplateInfo); }; diff -Nru libv8-3.4.14.21/src/objects-inl.h libv8-3.5.10.24/src/objects-inl.h --- libv8-3.4.14.21/src/objects-inl.h 2011-08-09 12:57:00.000000000 +0000 +++ libv8-3.5.10.24/src/objects-inl.h 2011-10-14 10:53:18.000000000 +0000 @@ -35,6 +35,7 @@ #ifndef V8_OBJECTS_INL_H_ #define V8_OBJECTS_INL_H_ +#include "elements.h" #include "objects.h" #include "contexts.h" #include "conversions-inl.h" @@ -158,23 +159,33 @@ } +bool Object::IsSpecObject() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE; +} + + bool Object::IsSymbol() { if (!this->IsHeapObject()) return false; uint32_t type = HeapObject::cast(this)->map()->instance_type(); // Because the symbol tag is non-zero and no non-string types have the // symbol bit set we can test for symbols with a very simple test // operation. - ASSERT(kSymbolTag != 0); + STATIC_ASSERT(kSymbolTag != 0); ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE); return (type & kIsSymbolMask) != 0; } bool Object::IsConsString() { - if (!this->IsHeapObject()) return false; - uint32_t type = HeapObject::cast(this)->map()->instance_type(); - return (type & (kIsNotStringMask | kStringRepresentationMask)) == - (kStringTag | kConsStringTag); + if (!IsString()) return false; + return StringShape(String::cast(this)).IsCons(); +} + + +bool Object::IsSlicedString() { + if (!IsString()) return false; + return StringShape(String::cast(this)).IsSliced(); } @@ -245,7 +256,7 @@ bool StringShape::IsSymbol() { ASSERT(valid()); - ASSERT(kSymbolTag != 0); + STATIC_ASSERT(kSymbolTag != 0); return (type_ & kIsSymbolMask) != 0; } @@ -262,6 +273,38 @@ } +bool String::IsAsciiRepresentationUnderneath() { + uint32_t type = map()->instance_type(); + STATIC_ASSERT(kIsIndirectStringTag != 0); + STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0); + ASSERT(IsFlat()); + switch (type & (kIsIndirectStringMask | kStringEncodingMask)) { + case kAsciiStringTag: + return true; + case kTwoByteStringTag: + return false; + default: // Cons or sliced string. Need to go deeper. + return GetUnderlying()->IsAsciiRepresentation(); + } +} + + +bool String::IsTwoByteRepresentationUnderneath() { + uint32_t type = map()->instance_type(); + STATIC_ASSERT(kIsIndirectStringTag != 0); + STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0); + ASSERT(IsFlat()); + switch (type & (kIsIndirectStringMask | kStringEncodingMask)) { + case kAsciiStringTag: + return false; + case kTwoByteStringTag: + return true; + default: // Cons or sliced string. Need to go deeper. + return GetUnderlying()->IsTwoByteRepresentation(); + } +} + + bool String::HasOnlyAsciiChars() { uint32_t type = map()->instance_type(); return (type & kStringEncodingMask) == kAsciiStringTag || @@ -274,6 +317,16 @@ } +bool StringShape::IsSliced() { + return (type_ & kStringRepresentationMask) == kSlicedStringTag; +} + + +bool StringShape::IsIndirect() { + return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag; +} + + bool StringShape::IsExternal() { return (type_ & kStringRepresentationMask) == kExternalStringTag; } @@ -290,6 +343,11 @@ } +uint32_t StringShape::encoding_tag() { + return type_ & kStringEncodingMask; +} + + uint32_t StringShape::full_representation_tag() { return (type_ & (kStringRepresentationMask | kStringEncodingMask)); } @@ -474,6 +532,12 @@ } +bool Object::IsJSWeakMap() { + return Object::IsJSObject() && + HeapObject::cast(this)->map()->instance_type() == JS_WEAK_MAP_TYPE; +} + + bool Object::IsJSContextExtensionObject() { return IsHeapObject() && (HeapObject::cast(this)->map()->instance_type() == @@ -539,7 +603,8 @@ return (map == heap->function_context_map() || map == heap->catch_context_map() || map == heap->with_context_map() || - map == heap->global_context_map()); + map == heap->global_context_map() || + map == heap->block_context_map()); } return false; } @@ -552,6 +617,13 @@ } +bool Object::IsSerializedScopeInfo() { + return Object::IsHeapObject() && + HeapObject::cast(this)->map() == + HeapObject::cast(this)->GetHeap()->serialized_scope_info_map(); +} + + bool Object::IsJSFunction() { return Object::IsHeapObject() && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE; @@ -1322,17 +1394,19 @@ ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset) -HeapObject* JSObject::elements() { +FixedArrayBase* JSObject::elements() { Object* array = READ_FIELD(this, kElementsOffset); ASSERT(array->HasValidElements()); - return reinterpret_cast(array); + return static_cast(array); } -void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) { +void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) { ASSERT(map()->has_fast_elements() == (value->map() == GetHeap()->fixed_array_map() || value->map() == GetHeap()->fixed_cow_array_map())); + ASSERT(map()->has_fast_double_elements() == + value->IsFixedDoubleArray()); ASSERT(value->HasValidElements()); WRITE_FIELD(this, kElementsOffset, value); CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, mode); @@ -1408,6 +1482,8 @@ return JSValue::kSize; case JS_ARRAY_TYPE: return JSValue::kSize; + case JS_WEAK_MAP_TYPE: + return JSWeakMap::kSize; case JS_REGEXP_TYPE: return JSValue::kSize; case JS_CONTEXT_EXTENSION_OBJECT_TYPE: @@ -1595,6 +1671,7 @@ void FixedArray::set(int index, Smi* value) { ASSERT(map() != HEAP->fixed_cow_array_map()); + ASSERT(index >= 0 && index < this->length()); ASSERT(reinterpret_cast(value)->IsSmi()); int offset = kHeaderSize + index * kPointerSize; WRITE_FIELD(this, offset, value); @@ -1627,7 +1704,7 @@ } -double FixedDoubleArray::get(int index) { +double FixedDoubleArray::get_scalar(int index) { ASSERT(map() != HEAP->fixed_cow_array_map() && map() != HEAP->fixed_array_map()); ASSERT(index >= 0 && index < this->length()); @@ -1637,6 +1714,15 @@ } +MaybeObject* FixedDoubleArray::get(int index) { + if (is_the_hole(index)) { + return GetHeap()->the_hole_value(); + } else { + return GetHeap()->NumberFromDouble(get_scalar(index)); + } +} + + void FixedDoubleArray::set(int index, double value) { ASSERT(map() != HEAP->fixed_cow_array_map() && map() != HEAP->fixed_array_map()); @@ -1663,9 +1749,19 @@ void FixedDoubleArray::Initialize(FixedDoubleArray* from) { int old_length = from->length(); ASSERT(old_length < length()); - OS::MemCopy(FIELD_ADDR(this, kHeaderSize), - FIELD_ADDR(from, kHeaderSize), - old_length * kDoubleSize); + if (old_length * kDoubleSize >= OS::kMinComplexMemCopy) { + OS::MemCopy(FIELD_ADDR(this, kHeaderSize), + FIELD_ADDR(from, kHeaderSize), + old_length * kDoubleSize); + } else { + for (int i = 0; i < old_length; ++i) { + if (from->is_the_hole(i)) { + set_the_hole(i); + } else { + set(i, from->get_scalar(i)); + } + } + } int offset = kHeaderSize + old_length * kDoubleSize; for (int current = from->length(); current < length(); ++current) { WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double()); @@ -1961,6 +2057,17 @@ template +int HashTable::ComputeCapacity(int at_least_space_for) { + const int kMinCapacity = 32; + int capacity = RoundUpToPowerOf2(at_least_space_for * 2); + if (capacity < kMinCapacity) { + capacity = kMinCapacity; // Guarantee min capacity. + } + return capacity; +} + + +template int HashTable::FindEntry(Key key) { return FindEntry(GetIsolate(), key); } @@ -2024,6 +2131,7 @@ CAST_ACCESSOR(SeqString) CAST_ACCESSOR(SeqAsciiString) CAST_ACCESSOR(SeqTwoByteString) +CAST_ACCESSOR(SlicedString) CAST_ACCESSOR(ConsString) CAST_ACCESSOR(ExternalString) CAST_ACCESSOR(ExternalAsciiString) @@ -2047,6 +2155,7 @@ CAST_ACCESSOR(JSRegExp) CAST_ACCESSOR(JSProxy) CAST_ACCESSOR(JSFunctionProxy) +CAST_ACCESSOR(JSWeakMap) CAST_ACCESSOR(Foreign) CAST_ACCESSOR(ByteArray) CAST_ACCESSOR(ExternalArray) @@ -2075,12 +2184,6 @@ SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset) -SMI_ACCESSORS(ByteArray, length, kLengthOffset) - -// TODO(1493): Investigate if it's possible to s/INT/SMI/ here (and -// subsequently unify H{Fixed,External}ArrayLength). -INT_ACCESSORS(ExternalArray, length, kLengthOffset) - SMI_ACCESSORS(String, length, kLengthOffset) @@ -2110,7 +2213,7 @@ MaybeObject* String::TryFlatten(PretenureFlag pretenure) { if (!StringShape(this).IsCons()) return this; ConsString* cons = ConsString::cast(this); - if (cons->second()->length() == 0) return cons->first(); + if (cons->IsFlat()) return cons->first(); return SlowTryFlatten(pretenure); } @@ -2118,10 +2221,8 @@ String* String::TryFlattenGetString(PretenureFlag pretenure) { MaybeObject* flat = TryFlatten(pretenure); Object* successfully_flattened; - if (flat->ToObject(&successfully_flattened)) { - return String::cast(successfully_flattened); - } - return this; + if (!flat->ToObject(&successfully_flattened)) return this; + return String::cast(successfully_flattened); } @@ -2139,6 +2240,9 @@ return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index); case kExternalStringTag | kTwoByteStringTag: return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index); + case kSlicedStringTag | kAsciiStringTag: + case kSlicedStringTag | kTwoByteStringTag: + return SlicedString::cast(this)->SlicedStringGet(index); default: break; } @@ -2159,15 +2263,19 @@ bool String::IsFlat() { - switch (StringShape(this).representation_tag()) { - case kConsStringTag: { - String* second = ConsString::cast(this)->second(); - // Only flattened strings have second part empty. - return second->length() == 0; - } - default: - return true; - } + if (!StringShape(this).IsCons()) return true; + return ConsString::cast(this)->second()->length() == 0; +} + + +String* String::GetUnderlying() { + // Giving direct access to underlying string only makes sense if the + // wrapping string is already flattened. + ASSERT(this->IsFlat()); + ASSERT(StringShape(this).IsIndirect()); + STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset); + const int kUnderlyingOffset = SlicedString::kParentOffset; + return String::cast(READ_FIELD(this, kUnderlyingOffset)); } @@ -2226,6 +2334,20 @@ } +String* SlicedString::parent() { + return String::cast(READ_FIELD(this, kParentOffset)); +} + + +void SlicedString::set_parent(String* parent) { + ASSERT(parent->IsSeqString()); + WRITE_FIELD(this, kParentOffset, parent); +} + + +SMI_ACCESSORS(SlicedString, offset, kOffsetOffset) + + String* ConsString::first() { return String::cast(READ_FIELD(this, kFirstOffset)); } @@ -2350,13 +2472,18 @@ } -uint8_t ExternalPixelArray::get(int index) { +uint8_t ExternalPixelArray::get_scalar(int index) { ASSERT((index >= 0) && (index < this->length())); uint8_t* ptr = external_pixel_pointer(); return ptr[index]; } +MaybeObject* ExternalPixelArray::get(int index) { + return Smi::FromInt(static_cast(get_scalar(index))); +} + + void ExternalPixelArray::set(int index, uint8_t value) { ASSERT((index >= 0) && (index < this->length())); uint8_t* ptr = external_pixel_pointer(); @@ -2376,13 +2503,18 @@ } -int8_t ExternalByteArray::get(int index) { +int8_t ExternalByteArray::get_scalar(int index) { ASSERT((index >= 0) && (index < this->length())); int8_t* ptr = static_cast(external_pointer()); return ptr[index]; } +MaybeObject* ExternalByteArray::get(int index) { + return Smi::FromInt(static_cast(get_scalar(index))); +} + + void ExternalByteArray::set(int index, int8_t value) { ASSERT((index >= 0) && (index < this->length())); int8_t* ptr = static_cast(external_pointer()); @@ -2390,13 +2522,18 @@ } -uint8_t ExternalUnsignedByteArray::get(int index) { +uint8_t ExternalUnsignedByteArray::get_scalar(int index) { ASSERT((index >= 0) && (index < this->length())); uint8_t* ptr = static_cast(external_pointer()); return ptr[index]; } +MaybeObject* ExternalUnsignedByteArray::get(int index) { + return Smi::FromInt(static_cast(get_scalar(index))); +} + + void ExternalUnsignedByteArray::set(int index, uint8_t value) { ASSERT((index >= 0) && (index < this->length())); uint8_t* ptr = static_cast(external_pointer()); @@ -2404,13 +2541,18 @@ } -int16_t ExternalShortArray::get(int index) { +int16_t ExternalShortArray::get_scalar(int index) { ASSERT((index >= 0) && (index < this->length())); int16_t* ptr = static_cast(external_pointer()); return ptr[index]; } +MaybeObject* ExternalShortArray::get(int index) { + return Smi::FromInt(static_cast(get_scalar(index))); +} + + void ExternalShortArray::set(int index, int16_t value) { ASSERT((index >= 0) && (index < this->length())); int16_t* ptr = static_cast(external_pointer()); @@ -2418,13 +2560,18 @@ } -uint16_t ExternalUnsignedShortArray::get(int index) { +uint16_t ExternalUnsignedShortArray::get_scalar(int index) { ASSERT((index >= 0) && (index < this->length())); uint16_t* ptr = static_cast(external_pointer()); return ptr[index]; } +MaybeObject* ExternalUnsignedShortArray::get(int index) { + return Smi::FromInt(static_cast(get_scalar(index))); +} + + void ExternalUnsignedShortArray::set(int index, uint16_t value) { ASSERT((index >= 0) && (index < this->length())); uint16_t* ptr = static_cast(external_pointer()); @@ -2432,13 +2579,18 @@ } -int32_t ExternalIntArray::get(int index) { +int32_t ExternalIntArray::get_scalar(int index) { ASSERT((index >= 0) && (index < this->length())); int32_t* ptr = static_cast(external_pointer()); return ptr[index]; } +MaybeObject* ExternalIntArray::get(int index) { + return GetHeap()->NumberFromInt32(get_scalar(index)); +} + + void ExternalIntArray::set(int index, int32_t value) { ASSERT((index >= 0) && (index < this->length())); int32_t* ptr = static_cast(external_pointer()); @@ -2446,13 +2598,18 @@ } -uint32_t ExternalUnsignedIntArray::get(int index) { +uint32_t ExternalUnsignedIntArray::get_scalar(int index) { ASSERT((index >= 0) && (index < this->length())); uint32_t* ptr = static_cast(external_pointer()); return ptr[index]; } +MaybeObject* ExternalUnsignedIntArray::get(int index) { + return GetHeap()->NumberFromUint32(get_scalar(index)); +} + + void ExternalUnsignedIntArray::set(int index, uint32_t value) { ASSERT((index >= 0) && (index < this->length())); uint32_t* ptr = static_cast(external_pointer()); @@ -2460,13 +2617,18 @@ } -float ExternalFloatArray::get(int index) { +float ExternalFloatArray::get_scalar(int index) { ASSERT((index >= 0) && (index < this->length())); float* ptr = static_cast(external_pointer()); return ptr[index]; } +MaybeObject* ExternalFloatArray::get(int index) { + return GetHeap()->NumberFromDouble(get_scalar(index)); +} + + void ExternalFloatArray::set(int index, float value) { ASSERT((index >= 0) && (index < this->length())); float* ptr = static_cast(external_pointer()); @@ -2474,13 +2636,18 @@ } -double ExternalDoubleArray::get(int index) { +double ExternalDoubleArray::get_scalar(int index) { ASSERT((index >= 0) && (index < this->length())); double* ptr = static_cast(external_pointer()); return ptr[index]; } +MaybeObject* ExternalDoubleArray::get(int index) { + return GetHeap()->NumberFromDouble(get_scalar(index)); +} + + void ExternalDoubleArray::set(int index, double value) { ASSERT((index >= 0) && (index < this->length())); double* ptr = static_cast(external_pointer()); @@ -2757,7 +2924,8 @@ ASSERT(kind() == STUB || kind() == UNARY_OP_IC || kind() == BINARY_OP_IC || - kind() == COMPARE_IC); + kind() == COMPARE_IC || + kind() == TO_BOOLEAN_IC); return READ_BYTE_FIELD(this, kStubMajorKeyOffset); } @@ -2766,7 +2934,8 @@ ASSERT(kind() == STUB || kind() == UNARY_OP_IC || kind() == BINARY_OP_IC || - kind() == COMPARE_IC); + kind() == COMPARE_IC || + kind() == TO_BOOLEAN_IC); ASSERT(0 <= major && major < 256); WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major); } @@ -2908,6 +3077,17 @@ } +byte Code::to_boolean_state() { + ASSERT(is_to_boolean_ic_stub()); + return READ_BYTE_FIELD(this, kToBooleanTypeOffset); +} + + +void Code::set_to_boolean_state(byte value) { + ASSERT(is_to_boolean_ic_stub()); + WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value); +} + bool Code::is_inline_cache_stub() { Kind kind = this->kind(); return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND; @@ -3249,8 +3429,6 @@ ACCESSORS(FunctionTemplateInfo, access_check_info, Object, kAccessCheckInfoOffset) ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset) -ACCESSORS(FunctionTemplateInfo, prototype_attributes, Smi, - kPrototypeAttributesOffset) ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset) ACCESSORS(ObjectTemplateInfo, internal_field_count, Object, @@ -3305,6 +3483,8 @@ BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit) BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check, kNeedsAccessCheckBit) +BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype, + kReadOnlyPrototypeBit) BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression, kIsExpressionBit) BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel, @@ -3743,6 +3923,15 @@ ACCESSORS(JSProxy, padding, Object, kPaddingOffset) +ACCESSORS(JSWeakMap, table, ObjectHashTable, kTableOffset) +ACCESSORS_GCSAFE(JSWeakMap, next, Object, kNextOffset) + + +ObjectHashTable* JSWeakMap::unchecked_table() { + return reinterpret_cast(READ_FIELD(this, kTableOffset)); +} + + Address Foreign::address() { return AddressFrom
(READ_INTPTR_FIELD(this, kAddressOffset)); } @@ -3928,6 +4117,11 @@ } +ElementsAccessor* JSObject::GetElementsAccessor() { + return ElementsAccessor::ForKind(GetElementsKind()); +} + + bool JSObject::HasFastElements() { return GetElementsKind() == FAST_ELEMENTS; } @@ -4201,6 +4395,11 @@ } +bool JSObject::HasHiddenProperties() { + return !GetHiddenProperties(OMIT_CREATION)->ToObjectChecked()->IsUndefined(); +} + + bool JSObject::HasElement(uint32_t index) { return HasElementWithReceiver(this, index); } @@ -4316,6 +4515,36 @@ } +bool ObjectHashTableShape::IsMatch(JSObject* key, Object* other) { + return key == JSObject::cast(other); +} + + +uint32_t ObjectHashTableShape::Hash(JSObject* key) { + MaybeObject* maybe_hash = key->GetIdentityHash(JSObject::OMIT_CREATION); + ASSERT(!maybe_hash->IsFailure()); + return Smi::cast(maybe_hash->ToObjectUnchecked())->value(); +} + + +uint32_t ObjectHashTableShape::HashForObject(JSObject* key, Object* other) { + MaybeObject* maybe_hash = JSObject::cast(other)->GetIdentityHash( + JSObject::OMIT_CREATION); + ASSERT(!maybe_hash->IsFailure()); + return Smi::cast(maybe_hash->ToObjectUnchecked())->value(); +} + + +MaybeObject* ObjectHashTableShape::AsObject(JSObject* key) { + return key; +} + + +void ObjectHashTable::RemoveEntry(int entry) { + RemoveEntry(entry, GetHeap()); +} + + void Map::ClearCodeCache(Heap* heap) { // No write barrier is needed since empty_fixed_array is not in new space. // Please note this function is used during marking: diff -Nru libv8-3.4.14.21/src/objects-printer.cc libv8-3.5.10.24/src/objects-printer.cc --- libv8-3.4.14.21/src/objects-printer.cc 2011-06-20 15:33:18.000000000 +0000 +++ libv8-3.5.10.24/src/objects-printer.cc 2011-08-10 11:27:35.000000000 +0000 @@ -1,4 +1,4 @@ -// Copyright 2010 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -151,6 +151,9 @@ case JS_PROXY_TYPE: JSProxy::cast(this)->JSProxyPrint(out); break; + case JS_WEAK_MAP_TYPE: + JSWeakMap::cast(this)->JSWeakMapPrint(out); + break; case FOREIGN_TYPE: Foreign::cast(this)->ForeignPrint(out); break; @@ -282,17 +285,30 @@ } break; } + case FAST_DOUBLE_ELEMENTS: { + // Print in array notation for non-sparse arrays. + FixedDoubleArray* p = FixedDoubleArray::cast(elements()); + for (int i = 0; i < p->length(); i++) { + if (p->is_the_hole(i)) { + PrintF(out, " %d: ", i); + } else { + PrintF(out, " %d: %g", i, p->get_scalar(i)); + } + PrintF(out, "\n"); + } + break; + } case EXTERNAL_PIXEL_ELEMENTS: { ExternalPixelArray* p = ExternalPixelArray::cast(elements()); for (int i = 0; i < p->length(); i++) { - PrintF(out, " %d: %d\n", i, p->get(i)); + PrintF(out, " %d: %d\n", i, p->get_scalar(i)); } break; } case EXTERNAL_BYTE_ELEMENTS: { ExternalByteArray* p = ExternalByteArray::cast(elements()); for (int i = 0; i < p->length(); i++) { - PrintF(out, " %d: %d\n", i, static_cast(p->get(i))); + PrintF(out, " %d: %d\n", i, static_cast(p->get_scalar(i))); } break; } @@ -300,14 +316,14 @@ ExternalUnsignedByteArray* p = ExternalUnsignedByteArray::cast(elements()); for (int i = 0; i < p->length(); i++) { - PrintF(out, " %d: %d\n", i, static_cast(p->get(i))); + PrintF(out, " %d: %d\n", i, static_cast(p->get_scalar(i))); } break; } case EXTERNAL_SHORT_ELEMENTS: { ExternalShortArray* p = ExternalShortArray::cast(elements()); for (int i = 0; i < p->length(); i++) { - PrintF(out, " %d: %d\n", i, static_cast(p->get(i))); + PrintF(out, " %d: %d\n", i, static_cast(p->get_scalar(i))); } break; } @@ -315,14 +331,14 @@ ExternalUnsignedShortArray* p = ExternalUnsignedShortArray::cast(elements()); for (int i = 0; i < p->length(); i++) { - PrintF(out, " %d: %d\n", i, static_cast(p->get(i))); + PrintF(out, " %d: %d\n", i, static_cast(p->get_scalar(i))); } break; } case EXTERNAL_INT_ELEMENTS: { ExternalIntArray* p = ExternalIntArray::cast(elements()); for (int i = 0; i < p->length(); i++) { - PrintF(out, " %d: %d\n", i, static_cast(p->get(i))); + PrintF(out, " %d: %d\n", i, static_cast(p->get_scalar(i))); } break; } @@ -330,21 +346,21 @@ ExternalUnsignedIntArray* p = ExternalUnsignedIntArray::cast(elements()); for (int i = 0; i < p->length(); i++) { - PrintF(out, " %d: %d\n", i, static_cast(p->get(i))); + PrintF(out, " %d: %d\n", i, static_cast(p->get_scalar(i))); } break; } case EXTERNAL_FLOAT_ELEMENTS: { ExternalFloatArray* p = ExternalFloatArray::cast(elements()); for (int i = 0; i < p->length(); i++) { - PrintF(out, " %d: %f\n", i, p->get(i)); + PrintF(out, " %d: %f\n", i, p->get_scalar(i)); } break; } case EXTERNAL_DOUBLE_ELEMENTS: { ExternalDoubleArray* p = ExternalDoubleArray::cast(elements()); for (int i = 0; i < p->length(); i++) { - PrintF(out, " %d: %f\n", i, p->get(i)); + PrintF(out, " %d: %f\n", i, p->get_scalar(i)); } break; } @@ -360,9 +376,6 @@ } break; } - default: - UNREACHABLE(); - break; } } @@ -421,6 +434,7 @@ case CODE_TYPE: return "CODE"; case JS_ARRAY_TYPE: return "JS_ARRAY"; case JS_PROXY_TYPE: return "JS_PROXY"; + case JS_WEAK_MAP_TYPE: return "JS_WEAK_MAP"; case JS_REGEXP_TYPE: return "JS_REGEXP"; case JS_VALUE_TYPE: return "JS_VALUE"; case JS_GLOBAL_OBJECT_TYPE: return "JS_GLOBAL_OBJECT"; @@ -550,6 +564,21 @@ } +// This method is only meant to be called from gdb for debugging purposes. +// Since the string can also be in two-byte encoding, non-ascii characters +// will be ignored in the output. +char* String::ToAsciiArray() { + // Static so that subsequent calls frees previously allocated space. + // This also means that previous results will be overwritten. + static char* buffer = NULL; + if (buffer != NULL) free(buffer); + buffer = new char[length()+1]; + WriteToFlat(this, buffer, 0, length()); + buffer[length()] = 0; + return buffer; +} + + void JSProxy::JSProxyPrint(FILE* out) { HeapObject::PrintHeader(out, "JSProxy"); PrintF(out, " - map = 0x%p\n", reinterpret_cast(map())); @@ -559,6 +588,16 @@ } +void JSWeakMap::JSWeakMapPrint(FILE* out) { + HeapObject::PrintHeader(out, "JSWeakMap"); + PrintF(out, " - map = 0x%p\n", reinterpret_cast(map())); + PrintF(out, " - number of elements = %d\n", table()->NumberOfElements()); + PrintF(out, " - table = "); + table()->ShortPrint(out); + PrintF(out, "\n"); +} + + void JSFunction::JSFunctionPrint(FILE* out) { HeapObject::PrintHeader(out, "Function"); PrintF(out, " - map = 0x%p\n", reinterpret_cast(map())); diff -Nru libv8-3.4.14.21/src/objects-visiting.cc libv8-3.5.10.24/src/objects-visiting.cc --- libv8-3.4.14.21/src/objects-visiting.cc 2011-07-06 11:27:02.000000000 +0000 +++ libv8-3.5.10.24/src/objects-visiting.cc 2011-08-29 10:41:00.000000000 +0000 @@ -58,6 +58,9 @@ return kVisitConsString; } + case kSlicedStringTag: + return kVisitSlicedString; + case kExternalStringTag: return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric, @@ -88,6 +91,9 @@ case JS_GLOBAL_PROPERTY_CELL_TYPE: return kVisitPropertyCell; + case JS_WEAK_MAP_TYPE: + return kVisitJSWeakMap; + case JS_REGEXP_TYPE: return kVisitJSRegExp; diff -Nru libv8-3.4.14.21/src/objects-visiting.h libv8-3.5.10.24/src/objects-visiting.h --- libv8-3.4.14.21/src/objects-visiting.h 2011-07-11 11:41:22.000000000 +0000 +++ libv8-3.5.10.24/src/objects-visiting.h 2011-08-29 10:41:00.000000000 +0000 @@ -115,12 +115,14 @@ kVisitStructGeneric, kVisitConsString, + kVisitSlicedString, kVisitOddball, kVisitCode, kVisitMap, kVisitPropertyCell, kVisitSharedFunctionInfo, kVisitJSFunction, + kVisitJSWeakMap, kVisitJSRegExp, kVisitorIdCount, @@ -298,6 +300,11 @@ ConsString::BodyDescriptor, int>::Visit); + table_.Register(kVisitSlicedString, + &FixedBodyVisitor::Visit); + table_.Register(kVisitFixedArray, &FlexibleBodyVisitor::Visit); - table_.Register(kVisitJSRegExp, &VisitJSRegExp); + table_.Register(kVisitJSWeakMap, &VisitJSObject); + + table_.Register(kVisitJSRegExp, &VisitJSObject); table_.Register(kVisitSeqAsciiString, &VisitSeqAsciiString); @@ -356,15 +365,15 @@ return FixedDoubleArray::SizeFor(length); } + static inline int VisitJSObject(Map* map, HeapObject* object) { + return JSObjectVisitor::Visit(map, object); + } + static inline int VisitSeqAsciiString(Map* map, HeapObject* object) { return SeqAsciiString::cast(object)-> SeqAsciiStringSize(map->instance_type()); } - static inline int VisitJSRegExp(Map* map, HeapObject* object) { - return JSObjectVisitor::Visit(map, object); - } - static inline int VisitSeqTwoByteString(Map* map, HeapObject* object) { return SeqTwoByteString::cast(object)-> SeqTwoByteStringSize(map->instance_type()); diff -Nru libv8-3.4.14.21/src/parser.cc libv8-3.5.10.24/src/parser.cc --- libv8-3.4.14.21/src/parser.cc 2011-08-09 12:57:00.000000000 +0000 +++ libv8-3.5.10.24/src/parser.cc 2011-10-17 09:05:38.000000000 +0000 @@ -584,7 +584,8 @@ pre_data_(pre_data), fni_(NULL), stack_overflow_(false), - parenthesized_function_(false) { + parenthesized_function_(false), + harmony_block_scoping_(false) { AstNode::ResetIds(); } @@ -809,6 +810,10 @@ isolate()->Throw(*result, &location); } +void Parser::SetHarmonyBlockScoping(bool block_scoping) { + scanner().SetHarmonyBlockScoping(block_scoping); + harmony_block_scoping_ = block_scoping; +} // Base class containing common code for the different finder classes used by // the parser. @@ -945,17 +950,18 @@ }; -// A ThisNamedPropertyAssigmentFinder finds and marks statements of the form +// A ThisNamedPropertyAssignmentFinder finds and marks statements of the form // this.x = ...;, where x is a named property. It also determines whether a // function contains only assignments of this type. -class ThisNamedPropertyAssigmentFinder : public ParserFinder { +class ThisNamedPropertyAssignmentFinder : public ParserFinder { public: - explicit ThisNamedPropertyAssigmentFinder(Isolate* isolate) + explicit ThisNamedPropertyAssignmentFinder(Isolate* isolate) : isolate_(isolate), only_simple_this_property_assignments_(true), - names_(NULL), - assigned_arguments_(NULL), - assigned_constants_(NULL) {} + names_(0), + assigned_arguments_(0), + assigned_constants_(0) { + } void Update(Scope* scope, Statement* stat) { // Bail out if function already has property assignment that are @@ -982,19 +988,17 @@ // Returns a fixed array containing three elements for each assignment of the // form this.x = y; Handle GetThisPropertyAssignments() { - if (names_ == NULL) { + if (names_.is_empty()) { return isolate_->factory()->empty_fixed_array(); } - ASSERT(names_ != NULL); - ASSERT(assigned_arguments_ != NULL); - ASSERT_EQ(names_->length(), assigned_arguments_->length()); - ASSERT_EQ(names_->length(), assigned_constants_->length()); + ASSERT_EQ(names_.length(), assigned_arguments_.length()); + ASSERT_EQ(names_.length(), assigned_constants_.length()); Handle assignments = - isolate_->factory()->NewFixedArray(names_->length() * 3); - for (int i = 0; i < names_->length(); i++) { - assignments->set(i * 3, *names_->at(i)); - assignments->set(i * 3 + 1, Smi::FromInt(assigned_arguments_->at(i))); - assignments->set(i * 3 + 2, *assigned_constants_->at(i)); + isolate_->factory()->NewFixedArray(names_.length() * 3); + for (int i = 0; i < names_.length(); ++i) { + assignments->set(i * 3, *names_[i]); + assignments->set(i * 3 + 1, Smi::FromInt(assigned_arguments_[i])); + assignments->set(i * 3 + 2, *assigned_constants_[i]); } return assignments; } @@ -1051,18 +1055,37 @@ AssignmentFromSomethingElse(); } + + + + // We will potentially reorder the property assignments, so they must be + // simple enough that the ordering does not matter. void AssignmentFromParameter(Handle name, int index) { - EnsureAllocation(); - names_->Add(name); - assigned_arguments_->Add(index); - assigned_constants_->Add(isolate_->factory()->undefined_value()); + EnsureInitialized(); + for (int i = 0; i < names_.length(); ++i) { + if (name->Equals(*names_[i])) { + assigned_arguments_[i] = index; + assigned_constants_[i] = isolate_->factory()->undefined_value(); + return; + } + } + names_.Add(name); + assigned_arguments_.Add(index); + assigned_constants_.Add(isolate_->factory()->undefined_value()); } void AssignmentFromConstant(Handle name, Handle value) { - EnsureAllocation(); - names_->Add(name); - assigned_arguments_->Add(-1); - assigned_constants_->Add(value); + EnsureInitialized(); + for (int i = 0; i < names_.length(); ++i) { + if (name->Equals(*names_[i])) { + assigned_arguments_[i] = -1; + assigned_constants_[i] = value; + return; + } + } + names_.Add(name); + assigned_arguments_.Add(-1); + assigned_constants_.Add(value); } void AssignmentFromSomethingElse() { @@ -1070,25 +1093,43 @@ only_simple_this_property_assignments_ = false; } - void EnsureAllocation() { - if (names_ == NULL) { - ASSERT(assigned_arguments_ == NULL); - ASSERT(assigned_constants_ == NULL); - Zone* zone = isolate_->zone(); - names_ = new(zone) ZoneStringList(4); - assigned_arguments_ = new(zone) ZoneList(4); - assigned_constants_ = new(zone) ZoneObjectList(4); + void EnsureInitialized() { + if (names_.capacity() == 0) { + ASSERT(assigned_arguments_.capacity() == 0); + ASSERT(assigned_constants_.capacity() == 0); + names_.Initialize(4); + assigned_arguments_.Initialize(4); + assigned_constants_.Initialize(4); } } Isolate* isolate_; bool only_simple_this_property_assignments_; - ZoneStringList* names_; - ZoneList* assigned_arguments_; - ZoneObjectList* assigned_constants_; + ZoneStringList names_; + ZoneList assigned_arguments_; + ZoneObjectList assigned_constants_; }; +Statement* Parser::ParseSourceElement(ZoneStringList* labels, + bool* ok) { + if (peek() == Token::FUNCTION) { + // FunctionDeclaration is only allowed in the context of SourceElements + // (Ecma 262 5th Edition, clause 14): + // SourceElement: + // Statement + // FunctionDeclaration + // Common language extension is to allow function declaration in place + // of any statement. This language extension is disabled in strict mode. + return ParseFunctionDeclaration(ok); + } else if (peek() == Token::LET) { + return ParseVariableStatement(kSourceElement, ok); + } else { + return ParseStatement(labels, ok); + } +} + + void* Parser::ParseSourceElements(ZoneList* processor, int end_token, bool* ok) { @@ -1103,7 +1144,7 @@ ASSERT(processor != NULL); InitializationBlockFinder block_finder(top_scope_, target_stack_); - ThisNamedPropertyAssigmentFinder this_property_assignment_finder(isolate()); + ThisNamedPropertyAssignmentFinder this_property_assignment_finder(isolate()); bool directive_prologue = true; // Parsing directive prologue. while (peek() != end_token) { @@ -1112,21 +1153,7 @@ } Scanner::Location token_loc = scanner().peek_location(); - - Statement* stat; - if (peek() == Token::FUNCTION) { - // FunctionDeclaration is only allowed in the context of SourceElements - // (Ecma 262 5th Edition, clause 14): - // SourceElement: - // Statement - // FunctionDeclaration - // Common language extension is to allow function declaration in place - // of any statement. This language extension is disabled in strict mode. - stat = ParseFunctionDeclaration(CHECK_OK); - } else { - stat = ParseStatement(NULL, CHECK_OK); - } - + Statement* stat = ParseSourceElement(NULL, CHECK_OK); if (stat == NULL || stat->IsEmpty()) { directive_prologue = false; // End of directive prologue. continue; @@ -1214,7 +1241,7 @@ case Token::CONST: // fall through case Token::VAR: - stmt = ParseVariableStatement(ok); + stmt = ParseVariableStatement(kStatement, ok); break; case Token::SEMICOLON: @@ -1309,9 +1336,9 @@ bool resolve, bool* ok) { Variable* var = NULL; - // If we are inside a function, a declaration of a variable - // is a truly local variable, and the scope of the variable - // is always the function scope. + // If we are inside a function, a declaration of a var/const variable is a + // truly local variable, and the scope of the variable is always the function + // scope. // If a function scope exists, then we can statically declare this // variable and also set its mode. In any case, a Declaration node @@ -1321,24 +1348,28 @@ // to the calling function context. // Similarly, strict mode eval scope does not leak variable declarations to // the caller's scope so we declare all locals, too. - Scope* declaration_scope = top_scope_->DeclarationScope(); + + Scope* declaration_scope = mode == Variable::LET ? top_scope_ + : top_scope_->DeclarationScope(); if (declaration_scope->is_function_scope() || - declaration_scope->is_strict_mode_eval_scope()) { + declaration_scope->is_strict_mode_eval_scope() || + declaration_scope->is_block_scope()) { // Declare the variable in the function scope. var = declaration_scope->LocalLookup(name); if (var == NULL) { // Declare the name. var = declaration_scope->DeclareLocal(name, mode); } else { - // The name was declared before; check for conflicting - // re-declarations. If the previous declaration was a const or the - // current declaration is a const then we have a conflict. There is - // similar code in runtime.cc in the Declare functions. - if ((mode == Variable::CONST) || (var->mode() == Variable::CONST)) { - // We only have vars and consts in declarations. + // The name was declared before; check for conflicting re-declarations. + // We have a conflict if either of the declarations is not a var. There + // is similar code in runtime.cc in the Declare functions. + if ((mode != Variable::VAR) || (var->mode() != Variable::VAR)) { + // We only have vars, consts and lets in declarations. ASSERT(var->mode() == Variable::VAR || - var->mode() == Variable::CONST); - const char* type = (var->mode() == Variable::VAR) ? "var" : "const"; + var->mode() == Variable::CONST || + var->mode() == Variable::LET); + const char* type = (var->mode() == Variable::VAR) ? "var" : + (var->mode() == Variable::CONST) ? "const" : "let"; Handle type_string = isolate()->factory()->NewStringFromUtf8(CStrVector(type), TENURED); Expression* expression = @@ -1481,12 +1512,15 @@ // Even if we're not at the top-level of the global or a function // scope, we treat is as such and introduce the function with it's // initial value upon entering the corresponding scope. - Declare(name, Variable::VAR, fun, true, CHECK_OK); + Variable::Mode mode = harmony_block_scoping_ ? Variable::LET : Variable::VAR; + Declare(name, mode, fun, true, CHECK_OK); return EmptyStatement(); } Block* Parser::ParseBlock(ZoneStringList* labels, bool* ok) { + if (harmony_block_scoping_) return ParseScopedBlock(labels, ok); + // Block :: // '{' Statement* '}' @@ -1510,12 +1544,65 @@ } -Block* Parser::ParseVariableStatement(bool* ok) { +Block* Parser::ParseScopedBlock(ZoneStringList* labels, bool* ok) { + // Construct block expecting 16 statements. + Block* body = new(zone()) Block(isolate(), labels, 16, false); + Scope* saved_scope = top_scope_; + Scope* block_scope = NewScope(top_scope_, + Scope::BLOCK_SCOPE, + inside_with()); + body->set_block_scope(block_scope); + block_scope->DeclareLocal(isolate()->factory()->block_scope_symbol(), + Variable::VAR); + if (top_scope_->is_strict_mode()) { + block_scope->EnableStrictMode(); + } + top_scope_ = block_scope; + + // Parse the statements and collect escaping labels. + TargetCollector collector; + Target target(&this->target_stack_, &collector); + Expect(Token::LBRACE, CHECK_OK); + { + Target target_body(&this->target_stack_, body); + InitializationBlockFinder block_finder(top_scope_, target_stack_); + + while (peek() != Token::RBRACE) { + Statement* stat = ParseSourceElement(NULL, CHECK_OK); + if (stat && !stat->IsEmpty()) { + body->AddStatement(stat); + block_finder.Update(stat); + } + } + } + Expect(Token::RBRACE, CHECK_OK); + + // Create exit block. + Block* exit = new(zone()) Block(isolate(), NULL, 1, false); + exit->AddStatement(new(zone()) ExitContextStatement()); + + // Create a try-finally statement. + TryFinallyStatement* try_finally = + new(zone()) TryFinallyStatement(body, exit); + try_finally->set_escaping_targets(collector.targets()); + top_scope_ = saved_scope; + + // Create a result block. + Block* result = new(zone()) Block(isolate(), NULL, 1, false); + result->AddStatement(try_finally); + return result; +} + + +Block* Parser::ParseVariableStatement(VariableDeclarationContext var_context, + bool* ok) { // VariableStatement :: // VariableDeclarations ';' Handle ignore; - Block* result = ParseVariableDeclarations(true, &ignore, CHECK_OK); + Block* result = ParseVariableDeclarations(var_context, + &ignore, + CHECK_OK); ExpectSemicolon(CHECK_OK); return result; } @@ -1532,33 +1619,54 @@ // *var is untouched; in particular, it is the caller's responsibility // to initialize it properly. This mechanism is used for the parsing // of 'for-in' loops. -Block* Parser::ParseVariableDeclarations(bool accept_IN, +Block* Parser::ParseVariableDeclarations(VariableDeclarationContext var_context, Handle* out, bool* ok) { // VariableDeclarations :: // ('var' | 'const') (Identifier ('=' AssignmentExpression)?)+[','] Variable::Mode mode = Variable::VAR; + // True if the binding needs initialization. 'let' and 'const' declared + // bindings are created uninitialized by their declaration nodes and + // need initialization. 'var' declared bindings are always initialized + // immediately by their declaration nodes. + bool needs_init = false; bool is_const = false; - Scope* declaration_scope = top_scope_->DeclarationScope(); + Token::Value init_op = Token::INIT_VAR; if (peek() == Token::VAR) { Consume(Token::VAR); } else if (peek() == Token::CONST) { Consume(Token::CONST); - if (declaration_scope->is_strict_mode()) { + if (top_scope_->is_strict_mode()) { ReportMessage("strict_const", Vector::empty()); *ok = false; return NULL; } mode = Variable::CONST; is_const = true; + needs_init = true; + init_op = Token::INIT_CONST; + } else if (peek() == Token::LET) { + Consume(Token::LET); + if (var_context != kSourceElement && + var_context != kForStatement) { + ASSERT(var_context == kStatement); + ReportMessage("unprotected_let", Vector::empty()); + *ok = false; + return NULL; + } + mode = Variable::LET; + needs_init = true; + init_op = Token::INIT_LET; } else { UNREACHABLE(); // by current callers } - // The scope of a variable/const declared anywhere inside a function + Scope* declaration_scope = mode == Variable::LET + ? top_scope_ : top_scope_->DeclarationScope(); + // The scope of a var/const declared variable anywhere inside a function // is the entire function (ECMA-262, 3rd, 10.1.3, and 12.2). Thus we can - // transform a source-level variable/const declaration into a (Function) + // transform a source-level var/const declaration into a (Function) // Scope declaration, and rewrite the source-level initialization into an // assignment statement. We use a block to collect multiple assignments. // @@ -1642,18 +1750,19 @@ if (peek() == Token::ASSIGN) { Expect(Token::ASSIGN, CHECK_OK); position = scanner().location().beg_pos; - value = ParseAssignmentExpression(accept_IN, CHECK_OK); + value = ParseAssignmentExpression(var_context != kForStatement, CHECK_OK); // Don't infer if it is "a = function(){...}();"-like expression. if (fni_ != NULL && value->AsCall() == NULL && value->AsCallNew() == NULL) { fni_->Infer(); + } else { + fni_->RemoveLastFunction(); } } - // Make sure that 'const c' actually initializes 'c' to undefined - // even though it seems like a stupid thing to do. - if (value == NULL && is_const) { + // Make sure that 'const x' and 'let x' initialize 'x' to undefined. + if (value == NULL && needs_init) { value = GetLiteralUndefined(); } @@ -1741,12 +1850,11 @@ // for constant lookups is always the function context, while it is // the top context for variables). Sigh... if (value != NULL) { - Token::Value op = (is_const ? Token::INIT_CONST : Token::INIT_VAR); bool in_with = is_const ? false : inside_with(); VariableProxy* proxy = initialization_scope->NewUnresolved(name, in_with); Assignment* assignment = - new(zone()) Assignment(isolate(), op, proxy, value, position); + new(zone()) Assignment(isolate(), init_op, proxy, value, position); if (block) { block->AddStatement(new(zone()) ExpressionStatement(assignment)); } @@ -1956,41 +2064,6 @@ } -Block* Parser::WithHelper(Expression* obj, ZoneStringList* labels, bool* ok) { - // Parse the statement and collect escaping labels. - TargetCollector collector; - Statement* stat; - { Target target(&this->target_stack_, &collector); - with_nesting_level_++; - top_scope_->DeclarationScope()->RecordWithStatement(); - stat = ParseStatement(labels, CHECK_OK); - with_nesting_level_--; - } - // Create resulting block with two statements. - // 1: Evaluate the with expression. - // 2: The try-finally block evaluating the body. - Block* result = new(zone()) Block(isolate(), NULL, 2, false); - - if (result != NULL) { - result->AddStatement(new(zone()) EnterWithContextStatement(obj)); - - // Create body block. - Block* body = new(zone()) Block(isolate(), NULL, 1, false); - body->AddStatement(stat); - - // Create exit block. - Block* exit = new(zone()) Block(isolate(), NULL, 1, false); - exit->AddStatement(new(zone()) ExitContextStatement()); - - // Return a try-finally statement. - TryFinallyStatement* wrapper = new(zone()) TryFinallyStatement(body, exit); - wrapper->set_escaping_targets(collector.targets()); - result->AddStatement(wrapper); - } - return result; -} - - Statement* Parser::ParseWithStatement(ZoneStringList* labels, bool* ok) { // WithStatement :: // 'with' '(' Expression ')' Statement @@ -2007,7 +2080,11 @@ Expression* expr = ParseExpression(true, CHECK_OK); Expect(Token::RPAREN, CHECK_OK); - return WithHelper(expr, labels, CHECK_OK); + ++with_nesting_level_; + top_scope_->DeclarationScope()->RecordWithStatement(); + Statement* stmt = ParseStatement(labels, CHECK_OK); + --with_nesting_level_; + return new(zone()) WithStatement(expr, stmt); } @@ -2142,39 +2219,22 @@ Expect(Token::RPAREN, CHECK_OK); if (peek() == Token::LBRACE) { - // Rewrite the catch body B to a single statement block - // { try B finally { PopContext }}. - Block* inner_body; - // We need to collect escapes from the body for both the inner - // try/finally used to pop the catch context and any possible outer - // try/finally. - TargetCollector inner_collector; - { Target target(&this->target_stack_, &catch_collector); - { Target target(&this->target_stack_, &inner_collector); - catch_scope = NewScope(top_scope_, Scope::CATCH_SCOPE, inside_with()); - if (top_scope_->is_strict_mode()) { - catch_scope->EnableStrictMode(); - } - catch_variable = catch_scope->DeclareLocal(name, Variable::VAR); - - Scope* saved_scope = top_scope_; - top_scope_ = catch_scope; - inner_body = ParseBlock(NULL, CHECK_OK); - top_scope_ = saved_scope; - } + // Rewrite the catch body { B } to a block: + // { { B } ExitContext; }. + Target target(&this->target_stack_, &catch_collector); + catch_scope = NewScope(top_scope_, Scope::CATCH_SCOPE, inside_with()); + if (top_scope_->is_strict_mode()) { + catch_scope->EnableStrictMode(); } + catch_variable = catch_scope->DeclareLocal(name, Variable::VAR); + catch_block = new(zone()) Block(isolate(), NULL, 2, false); - // Create exit block. - Block* inner_finally = new(zone()) Block(isolate(), NULL, 1, false); - inner_finally->AddStatement(new(zone()) ExitContextStatement()); - - // Create a try/finally statement. - TryFinallyStatement* inner_try_finally = - new(zone()) TryFinallyStatement(inner_body, inner_finally); - inner_try_finally->set_escaping_targets(inner_collector.targets()); - - catch_block = new(zone()) Block(isolate(), NULL, 1, false); - catch_block->AddStatement(inner_try_finally); + Scope* saved_scope = top_scope_; + top_scope_ = catch_scope; + Block* catch_body = ParseBlock(NULL, CHECK_OK); + top_scope_ = saved_scope; + catch_block->AddStatement(catch_body); + catch_block->AddStatement(new(zone()) ExitContextStatement()); } else { Expect(Token::LBRACE, CHECK_OK); } @@ -2290,7 +2350,7 @@ if (peek() == Token::VAR || peek() == Token::CONST) { Handle name; Block* variable_statement = - ParseVariableDeclarations(false, &name, CHECK_OK); + ParseVariableDeclarations(kForStatement, &name, CHECK_OK); if (peek() == Token::IN && !name.is_null()) { VariableProxy* each = top_scope_->NewUnresolved(name, inside_with()); @@ -2448,6 +2508,8 @@ || op == Token::ASSIGN) && (right->AsCall() == NULL && right->AsCallNew() == NULL)) { fni_->Infer(); + } else { + fni_->RemoveLastFunction(); } fni_->Leave(); } @@ -2760,7 +2822,7 @@ Handle name = callee->name(); Variable* var = top_scope_->Lookup(name); if (var == NULL) { - top_scope_->RecordEvalCall(); + top_scope_->DeclarationScope()->RecordEvalCall(); } } result = NewCall(result, args, pos); @@ -3649,8 +3711,11 @@ } int num_parameters = 0; - // Function declarations are hoisted. - Scope* scope = (type == FunctionLiteral::DECLARATION) + // Function declarations are function scoped in normal mode, so they are + // hoisted. In harmony block scoping mode they are block scoped, so they + // are not hoisted. + Scope* scope = (type == FunctionLiteral::DECLARATION && + !harmony_block_scoping_) ? NewScope(top_scope_->DeclarationScope(), Scope::FUNCTION_SCOPE, false) : NewScope(top_scope_, Scope::FUNCTION_SCOPE, inside_with()); ZoneList* body = new(zone()) ZoneList(8); @@ -3952,7 +4017,7 @@ } -// Parses and identifier that is valid for the current scope, in particular it +// Parses an identifier that is valid for the current scope, in particular it // fails on strict mode future reserved keywords in a strict scope. Handle Parser::ParseIdentifier(bool* ok) { if (top_scope_->is_strict_mode()) { @@ -5033,9 +5098,11 @@ // Create a Scanner for the preparser to use as input, and preparse the source. static ScriptDataImpl* DoPreParse(UC16CharacterStream* source, bool allow_lazy, - ParserRecorder* recorder) { + ParserRecorder* recorder, + bool harmony_block_scoping) { Isolate* isolate = Isolate::Current(); JavaScriptScanner scanner(isolate->unicode_cache()); + scanner.SetHarmonyBlockScoping(harmony_block_scoping); scanner.Initialize(source); intptr_t stack_limit = isolate->stack_guard()->real_climit(); if (!preparser::PreParser::PreParseProgram(&scanner, @@ -5056,7 +5123,8 @@ // Preparse, but only collect data that is immediately useful, // even if the preparser data is only used once. ScriptDataImpl* ParserApi::PartialPreParse(UC16CharacterStream* source, - v8::Extension* extension) { + v8::Extension* extension, + bool harmony_block_scoping) { bool allow_lazy = FLAG_lazy && (extension == NULL); if (!allow_lazy) { // Partial preparsing is only about lazily compiled functions. @@ -5064,16 +5132,17 @@ return NULL; } PartialParserRecorder recorder; - return DoPreParse(source, allow_lazy, &recorder); + return DoPreParse(source, allow_lazy, &recorder, harmony_block_scoping); } ScriptDataImpl* ParserApi::PreParse(UC16CharacterStream* source, - v8::Extension* extension) { + v8::Extension* extension, + bool harmony_block_scoping) { Handle