diff --git a/.cirrus.yml b/.cirrus.yml
new file mode 100644
index 00000000000..7000bf6816b
--- /dev/null
+++ b/.cirrus.yml
@@ -0,0 +1,31 @@
+#-------------------------------------------------------------------------------------------------------
+# Copyright (c) ChakraCore Project Contributors. All rights reserved.
+# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
+#-------------------------------------------------------------------------------------------------------
+
+task:
+ name: CMake ARM64.macOS.Debug (noJit)
+ macos_instance:
+ image: ghcr.io/cirruslabs/macos-ventura-xcode
+ Dependencies_script: brew install ninja icu4c && mkdir -p build
+ CMake_script: cd build && cmake -GNinja -DCMAKE_BUILD_TYPE=Debug -DSTATIC_LIBRARY=ON -DICU_INCLUDE_PATH=/opt/homebrew/opt/icu4c/include -DDISABLE_JIT=ON -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_COMPILER=clang ..
+ Build_script: cd build && ninja
+ Test_script: cd build && ninja check
+
+task:
+ name: CMake ARM64.macOS.ReleaseWithDebug (noJit)
+ macos_instance:
+ image: ghcr.io/cirruslabs/macos-ventura-xcode
+ Dependencies_script: brew install ninja icu4c && mkdir -p build
+ CMake_script: cd build && cmake -GNinja -DCMAKE_BUILD_TYPE=RelWithDebInfo -DICU_INCLUDE_PATH=/opt/homebrew/opt/icu4c/include -DDISABLE_JIT=ON -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_COMPILER=clang ..
+ Build_script: cd build && ninja
+ Test_script: cd build && ninja check
+
+task:
+ name: CMake ARM64.macOS.Release (noJit)
+ macos_instance:
+ image: ghcr.io/cirruslabs/macos-ventura-xcode
+ Dependencies_script: brew install ninja icu4c && mkdir -p build
+ CMake_script: cd build && cmake -GNinja -DCMAKE_BUILD_TYPE=Release -DSTATIC_LIBRARY=ON -DICU_INCLUDE_PATH=/opt/homebrew/opt/icu4c/include -DDISABLE_JIT=ON -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_COMPILER=clang ..
+ Build_script: cd build && ninja
+ Test_script: cd build && ninja check
diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 00000000000..90120a467d7
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,18 @@
+root = true
+
+# See https://github.com/chakra-core/ChakraCore/wiki/Coding-Convention
+
+[*]
+indent_style = space
+indent_size = 4
+
+# See https://learn.microsoft.com/en-us/visualstudio/ide/cpp-editorconfig-properties?view=vs-2019
+[*.{cpp,h,inl}]
+cpp_space_pointer_reference_alignment = right
+cpp_new_line_before_open_brace_block = new_line
+cpp_new_line_before_catch = true
+cpp_new_line_before_else = true
+
+# Xml files
+[*.xml]
+indent_size = 2
diff --git a/.gitattributes b/.gitattributes
index d179809ca9c..3c1cc129f9e 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -3,6 +3,7 @@
test/**/*.js -crlf
test/es6/HTMLComments.js binary diff=cpp
*.wasm binary
+lib/**/*.js eol=lf diff=cpp
*.cpp text eol=lf diff=cpp
*.h text eol=lf diff=cpp
*.inl text eol=lf diff=cpp
diff --git a/.github/ISSUE_TEMPLATE/01-bug.yml b/.github/ISSUE_TEMPLATE/01-bug.yml
new file mode 100644
index 00000000000..c1a87ff2a0c
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/01-bug.yml
@@ -0,0 +1,37 @@
+name: Bug report
+description: Report a general bug in ChakraCore
+title: '[Bug]: '
+labels:
+ - Bug
+body:
+ - type: input
+ attributes:
+ label: ChakraCore Version
+ description: Specify the version of ChakraCore you are using
+ placeholder: Version / Commit id
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Steps to reproduce
+ description: Provide steps to reproduce the problem
+ placeholder: Instructions used to build and execute / Project setup
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Proof of concept
+ description: Your POC code
+ render: js
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Exception or Error
+ description: Provide error logs / console output
+ render: text
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Additional Context
diff --git a/.github/ISSUE_TEMPLATE/02-build-issue.yml b/.github/ISSUE_TEMPLATE/02-build-issue.yml
new file mode 100644
index 00000000000..11f11a4eadc
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/02-build-issue.yml
@@ -0,0 +1,36 @@
+name: Build Issue
+description: Report a build issue
+title: '[Build]: '
+labels:
+ - Build Break
+body:
+ - type: input
+ attributes:
+ label: Operating System
+ description: Specify the OS (with version) you are using
+ placeholder: Name + Version
+ validations:
+ required: true
+ - type: input
+ attributes:
+ label: Compiler
+ description: Specify the compiler (with version) you are using
+ placeholder: Name + Version
+ validations:
+ required: true
+ - type: input
+ attributes:
+ label: ChakraCore Version
+ description: Specify the version of ChakraCore you are using
+ placeholder: Version / Commit id
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Compiler output
+ render: shell
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Additional Context
diff --git a/.github/ISSUE_TEMPLATE/03-jsrt-bug.yaml b/.github/ISSUE_TEMPLATE/03-jsrt-bug.yaml
new file mode 100644
index 00000000000..98924206a81
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/03-jsrt-bug.yaml
@@ -0,0 +1,37 @@
+name: Api Bug
+description: Report a bug with the embedding api
+title: '[Api]: '
+labels:
+ - APIs
+ - Bug
+body:
+ - type: input
+ attributes:
+ label: Operating System
+ description: Specify the OS (with version) you are using
+ placeholder: Name + Version
+ validations:
+ required: true
+ - type: input
+ attributes:
+ label: ChakraCore Version
+ description: Specify the version of ChakraCore you are using
+ placeholder: Version / Commit id
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Steps to reproduce
+ description: Please describe how your project is set-up
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Proof of concept
+ description: Add some minimal poc that triggers the bug
+ render: cpp
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Additional context
diff --git a/.github/ISSUE_TEMPLATE/04-jsrt-feature-request.yml b/.github/ISSUE_TEMPLATE/04-jsrt-feature-request.yml
new file mode 100644
index 00000000000..7452ce935b1
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/04-jsrt-feature-request.yml
@@ -0,0 +1,24 @@
+name: Api Feature Request
+description: Propose a new feature to the embedding api
+title: '[Api]: '
+labels:
+ - APIs
+ - Feature Request
+body:
+ - type: textarea
+ attributes:
+ label: Description
+ description: A clear and concise description of the problem or missing capability
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Describe the solution you'd like
+ description: If you have a solution in mind, please describe it.
+ - type: textarea
+ attributes:
+ label: Describe alternatives you've considered
+ description: Have you considered any alternative solutions or workarounds?
+ - type: textarea
+ attributes:
+ label: Additional context
diff --git a/.github/ISSUE_TEMPLATE/05-tc39-proposal.yml b/.github/ISSUE_TEMPLATE/05-tc39-proposal.yml
new file mode 100644
index 00000000000..f55852a96a1
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/05-tc39-proposal.yml
@@ -0,0 +1,26 @@
+name: Implement tc39 proposal
+description: Request the implementation of a tc39 proposal
+title: '[Proposal]: '
+labels:
+ - ECMAScript Spec
+ - Feature Request
+body:
+ - type: input
+ attributes:
+ label: Link to proposal
+ placeholder: https://github.com/tc39/...
+ validations:
+ required: true
+ - type: dropdown
+ attributes:
+ label: Proposal stage
+ options:
+ - Stage 4
+ - Stage 3
+ - Stage 2.7
+ - Stage 2
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Additional Context
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 00000000000..c3469ee2494
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,5 @@
+blank_issues_enabled: true
+contact_links:
+ - name: Discord
+ url: https://discord.gg/dgRawPdNuC
+ about: If you have any questions, feel free to join our Discord server.
diff --git a/.github/workflows/agreement.yml b/.github/workflows/agreement.yml
new file mode 100644
index 00000000000..05d98a8dd65
--- /dev/null
+++ b/.github/workflows/agreement.yml
@@ -0,0 +1,18 @@
+name: "Contribution Agreement"
+on:
+ pull_request
+
+jobs:
+ AgreementCheck:
+ runs-on: ubuntu-latest
+ if: ${{ github.actor != 'dependabot[bot]' }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+ - run: |
+ echo Looking up contributor
+ tail -n +36 ContributionAgreement.md | awk -F\| '{print $3}' | grep -w $USER
+ env:
+ USER: ${{ github.actor }}
diff --git a/.gitignore b/.gitignore
index 7c2254493f6..da133ae06dc 100644
--- a/.gitignore
+++ b/.gitignore
@@ -77,6 +77,8 @@ install_manifest.txt
# ICU
deps/Chakra.ICU/Chakra.ICU.props
deps/Chakra.ICU/icu
+# CMake External Project
+deps/thirdparty
# VIM
.*.swo
@@ -94,7 +96,6 @@ tags
*.dylib
Makefile
pal/src/config.h
-DbgController.js.h
lib/wabt/built/config.h
# Generated by other tools
diff --git a/Build/Chakra.Build.Clang.Default.props b/Build/Chakra.Build.Clang.Default.props
index dfb766246c5..4ad30b8877a 100644
--- a/Build/Chakra.Build.Clang.Default.props
+++ b/Build/Chakra.Build.Clang.Default.props
@@ -1,7 +1,7 @@
-
+
LLVM-vs2014
diff --git a/Build/Chakra.Build.Clang.props b/Build/Chakra.Build.Clang.props
index 61637d60cae..2f697fdf1ad 100644
--- a/Build/Chakra.Build.Clang.props
+++ b/Build/Chakra.Build.Clang.props
@@ -61,7 +61,9 @@
-Wno-microsoft-extra-qualification
-Wno-microsoft-default-arg-redefinition
-Wno-microsoft-exception-spec
- -v
+ -Wno-clang-cl-pch
+ -Wno-unused-lambda-capture
+ -Wno-pragma-pack
OldStyle
diff --git a/Build/Chakra.Build.Clang.targets b/Build/Chakra.Build.Clang.targets
index cefd385aad2..e2d2b5d70d2 100644
--- a/Build/Chakra.Build.Clang.targets
+++ b/Build/Chakra.Build.Clang.targets
@@ -126,4 +126,4 @@
>
-
\ No newline at end of file
+
diff --git a/Build/Chakra.Core.sln b/Build/Chakra.Core.sln
index 0a852832b0e..4251e5fe208 100644
--- a/Build/Chakra.Core.sln
+++ b/Build/Chakra.Core.sln
@@ -1,7 +1,7 @@
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
-VisualStudioVersion = 15.0.26228.4
+VisualStudioVersion = 15.0.26726.0
MinimumVisualStudioVersion = 14.0.00000.0
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "ChakraCore", "..\bin\ChakraCore\ChakraCore.vcxproj", "{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}"
ProjectSection(ProjectDependencies) = postProject
@@ -166,18 +166,28 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Chakra.ICU.i18n", "..\deps\
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Chakra.ICU.Stubdata", "..\deps\Chakra.ICU\Chakra.ICU.Stubdata.vcxproj", "{E14F373D-05A0-4259-A5E9-AFE8405FB847}"
EndProject
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Chakra.SCACore", "..\lib\SCACore\Chakra.SCACore.vcxproj", "{4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "EditorConfig", "EditorConfig", "{9EAC0ED9-9EC0-492E-975B-92CA3FE527C8}"
+ ProjectSection(SolutionItems) = preProject
+ ..\.editorconfig = ..\.editorconfig
+ EndProjectSection
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|ARM = Debug|ARM
Debug|ARM64 = Debug|ARM64
+ Debug|CHPE = Debug|CHPE
Debug|x64 = Debug|x64
Debug|x86 = Debug|x86
Release|ARM = Release|ARM
Release|ARM64 = Release|ARM64
+ Release|CHPE = Release|CHPE
Release|x64 = Release|x64
Release|x86 = Release|x86
Test|ARM = Test|ARM
Test|ARM64 = Test|ARM64
+ Test|CHPE = Test|CHPE
Test|x64 = Test|x64
Test|x86 = Test|x86
EndGlobalSection
@@ -186,6 +196,8 @@ Global
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Debug|ARM.Build.0 = Debug|ARM
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Debug|ARM64.ActiveCfg = Debug|ARM64
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Debug|ARM64.Build.0 = Debug|ARM64
+ {EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Debug|CHPE.Build.0 = Debug|CHPE
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Debug|x64.ActiveCfg = Debug|x64
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Debug|x64.Build.0 = Debug|x64
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Debug|x86.ActiveCfg = Debug|Win32
@@ -194,6 +206,8 @@ Global
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Release|ARM.Build.0 = Release|ARM
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Release|ARM64.ActiveCfg = Release|ARM64
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Release|ARM64.Build.0 = Release|ARM64
+ {EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Release|CHPE.ActiveCfg = Release|CHPE
+ {EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Release|CHPE.Build.0 = Release|CHPE
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Release|x64.ActiveCfg = Release|x64
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Release|x64.Build.0 = Release|x64
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Release|x86.ActiveCfg = Release|Win32
@@ -202,6 +216,8 @@ Global
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Test|ARM.Build.0 = Test|ARM
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Test|ARM64.ActiveCfg = Test|ARM64
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Test|ARM64.Build.0 = Test|ARM64
+ {EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Test|CHPE.ActiveCfg = Test|CHPE
+ {EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Test|CHPE.Build.0 = Test|CHPE
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Test|x64.ActiveCfg = Test|x64
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Test|x64.Build.0 = Test|x64
{EA882C8D-81FC-42FE-ABD5-2666DB933FDB}.Test|x86.ActiveCfg = Test|Win32
@@ -210,6 +226,10 @@ Global
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Debug|ARM.Build.0 = Debug|ARM
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Debug|ARM64.ActiveCfg = Debug|ARM64
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Debug|ARM64.Build.0 = Debug|ARM64
+ {1876E800-AD77-48C4-A2F7-E5265F24AC38}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {1876E800-AD77-48C4-A2F7-E5265F24AC38}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {1876E800-AD77-48C4-A2F7-E5265F24AC38}.Debug|CHPE.Build.0 = Debug|CHPE
+ {1876E800-AD77-48C4-A2F7-E5265F24AC38}.Debug|CHPE.Deploy.0 = Debug|CHPE
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Debug|x64.ActiveCfg = Debug|x64
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Debug|x64.Build.0 = Debug|x64
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Debug|x86.ActiveCfg = Debug|Win32
@@ -218,6 +238,10 @@ Global
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Release|ARM.Build.0 = Release|ARM
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Release|ARM64.ActiveCfg = Release|ARM64
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Release|ARM64.Build.0 = Release|ARM64
+ {1876E800-AD77-48C4-A2F7-E5265F24AC38}.Release|ARM64.Deploy.0 = Release|ARM64
+ {1876E800-AD77-48C4-A2F7-E5265F24AC38}.Release|CHPE.ActiveCfg = Release|CHPE
+ {1876E800-AD77-48C4-A2F7-E5265F24AC38}.Release|CHPE.Build.0 = Release|CHPE
+ {1876E800-AD77-48C4-A2F7-E5265F24AC38}.Release|CHPE.Deploy.0 = Release|CHPE
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Release|x64.ActiveCfg = Release|x64
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Release|x64.Build.0 = Release|x64
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Release|x86.ActiveCfg = Release|Win32
@@ -226,6 +250,10 @@ Global
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Test|ARM.Build.0 = Test|ARM
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Test|ARM64.ActiveCfg = Test|ARM64
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Test|ARM64.Build.0 = Test|ARM64
+ {1876E800-AD77-48C4-A2F7-E5265F24AC38}.Test|ARM64.Deploy.0 = Test|ARM64
+ {1876E800-AD77-48C4-A2F7-E5265F24AC38}.Test|CHPE.ActiveCfg = Test|CHPE
+ {1876E800-AD77-48C4-A2F7-E5265F24AC38}.Test|CHPE.Build.0 = Test|CHPE
+ {1876E800-AD77-48C4-A2F7-E5265F24AC38}.Test|CHPE.Deploy.0 = Test|CHPE
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Test|x64.ActiveCfg = Test|x64
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Test|x64.Build.0 = Test|x64
{1876E800-AD77-48C4-A2F7-E5265F24AC38}.Test|x86.ActiveCfg = Test|Win32
@@ -234,6 +262,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110F5}.Debug|ARM.Build.0 = Debug|ARM
{706083F7-6AA4-4558-A153-6352EF9110F5}.Debug|ARM64.ActiveCfg = Debug|ARM64
{706083F7-6AA4-4558-A153-6352EF9110F5}.Debug|ARM64.Build.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F5}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F5}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F5}.Debug|CHPE.Build.0 = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F5}.Debug|CHPE.Deploy.0 = Debug|CHPE
{706083F7-6AA4-4558-A153-6352EF9110F5}.Debug|x64.ActiveCfg = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9110F5}.Debug|x64.Build.0 = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9110F5}.Debug|x86.ActiveCfg = Debug|Win32
@@ -242,6 +274,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110F5}.Release|ARM.Build.0 = Release|ARM
{706083F7-6AA4-4558-A153-6352EF9110F5}.Release|ARM64.ActiveCfg = Release|ARM64
{706083F7-6AA4-4558-A153-6352EF9110F5}.Release|ARM64.Build.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F5}.Release|ARM64.Deploy.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F5}.Release|CHPE.ActiveCfg = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F5}.Release|CHPE.Build.0 = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F5}.Release|CHPE.Deploy.0 = Release|CHPE
{706083F7-6AA4-4558-A153-6352EF9110F5}.Release|x64.ActiveCfg = Release|x64
{706083F7-6AA4-4558-A153-6352EF9110F5}.Release|x64.Build.0 = Release|x64
{706083F7-6AA4-4558-A153-6352EF9110F5}.Release|x86.ActiveCfg = Release|Win32
@@ -250,6 +286,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110F5}.Test|ARM.Build.0 = Test|ARM
{706083F7-6AA4-4558-A153-6352EF9110F5}.Test|ARM64.ActiveCfg = Test|ARM64
{706083F7-6AA4-4558-A153-6352EF9110F5}.Test|ARM64.Build.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F5}.Test|ARM64.Deploy.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F5}.Test|CHPE.ActiveCfg = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F5}.Test|CHPE.Build.0 = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F5}.Test|CHPE.Deploy.0 = Test|CHPE
{706083F7-6AA4-4558-A153-6352EF9110F5}.Test|x64.ActiveCfg = Test|x64
{706083F7-6AA4-4558-A153-6352EF9110F5}.Test|x64.Build.0 = Test|x64
{706083F7-6AA4-4558-A153-6352EF9110F5}.Test|x86.ActiveCfg = Test|Win32
@@ -258,6 +298,10 @@ Global
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Debug|ARM.Build.0 = Debug|ARM
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Debug|ARM64.ActiveCfg = Debug|ARM64
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Debug|ARM64.Build.0 = Debug|ARM64
+ {8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Debug|CHPE.Build.0 = Debug|CHPE
+ {8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Debug|CHPE.Deploy.0 = Debug|CHPE
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Debug|x64.ActiveCfg = Debug|x64
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Debug|x64.Build.0 = Debug|x64
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Debug|x86.ActiveCfg = Debug|Win32
@@ -266,6 +310,10 @@ Global
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Release|ARM.Build.0 = Release|ARM
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Release|ARM64.ActiveCfg = Release|ARM64
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Release|ARM64.Build.0 = Release|ARM64
+ {8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Release|ARM64.Deploy.0 = Release|ARM64
+ {8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Release|CHPE.ActiveCfg = Release|CHPE
+ {8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Release|CHPE.Build.0 = Release|CHPE
+ {8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Release|CHPE.Deploy.0 = Release|CHPE
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Release|x64.ActiveCfg = Release|x64
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Release|x64.Build.0 = Release|x64
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Release|x86.ActiveCfg = Release|Win32
@@ -274,6 +322,10 @@ Global
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Test|ARM.Build.0 = Test|ARM
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Test|ARM64.ActiveCfg = Test|ARM64
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Test|ARM64.Build.0 = Test|ARM64
+ {8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Test|ARM64.Deploy.0 = Test|ARM64
+ {8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Test|CHPE.ActiveCfg = Test|CHPE
+ {8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Test|CHPE.Build.0 = Test|CHPE
+ {8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Test|CHPE.Deploy.0 = Test|CHPE
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Test|x64.ActiveCfg = Test|x64
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Test|x64.Build.0 = Test|x64
{8C61E4E7-F0D6-420D-A352-3E6E50D406DD}.Test|x86.ActiveCfg = Test|Win32
@@ -282,6 +334,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110F8}.Debug|ARM.Build.0 = Debug|ARM
{706083F7-6AA4-4558-A153-6352EF9110F8}.Debug|ARM64.ActiveCfg = Debug|ARM64
{706083F7-6AA4-4558-A153-6352EF9110F8}.Debug|ARM64.Build.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F8}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F8}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F8}.Debug|CHPE.Build.0 = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F8}.Debug|CHPE.Deploy.0 = Debug|CHPE
{706083F7-6AA4-4558-A153-6352EF9110F8}.Debug|x64.ActiveCfg = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9110F8}.Debug|x64.Build.0 = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9110F8}.Debug|x86.ActiveCfg = Debug|Win32
@@ -290,6 +346,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110F8}.Release|ARM.Build.0 = Release|ARM
{706083F7-6AA4-4558-A153-6352EF9110F8}.Release|ARM64.ActiveCfg = Release|ARM64
{706083F7-6AA4-4558-A153-6352EF9110F8}.Release|ARM64.Build.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F8}.Release|ARM64.Deploy.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F8}.Release|CHPE.ActiveCfg = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F8}.Release|CHPE.Build.0 = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F8}.Release|CHPE.Deploy.0 = Release|CHPE
{706083F7-6AA4-4558-A153-6352EF9110F8}.Release|x64.ActiveCfg = Release|x64
{706083F7-6AA4-4558-A153-6352EF9110F8}.Release|x64.Build.0 = Release|x64
{706083F7-6AA4-4558-A153-6352EF9110F8}.Release|x86.ActiveCfg = Release|Win32
@@ -298,6 +358,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110F8}.Test|ARM.Build.0 = Test|ARM
{706083F7-6AA4-4558-A153-6352EF9110F8}.Test|ARM64.ActiveCfg = Test|ARM64
{706083F7-6AA4-4558-A153-6352EF9110F8}.Test|ARM64.Build.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F8}.Test|ARM64.Deploy.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F8}.Test|CHPE.ActiveCfg = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F8}.Test|CHPE.Build.0 = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F8}.Test|CHPE.Deploy.0 = Test|CHPE
{706083F7-6AA4-4558-A153-6352EF9110F8}.Test|x64.ActiveCfg = Test|x64
{706083F7-6AA4-4558-A153-6352EF9110F8}.Test|x64.Build.0 = Test|x64
{706083F7-6AA4-4558-A153-6352EF9110F8}.Test|x86.ActiveCfg = Test|Win32
@@ -306,6 +370,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110F7}.Debug|ARM.Build.0 = Debug|ARM
{706083F7-6AA4-4558-A153-6352EF9110F7}.Debug|ARM64.ActiveCfg = Debug|ARM64
{706083F7-6AA4-4558-A153-6352EF9110F7}.Debug|ARM64.Build.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F7}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F7}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F7}.Debug|CHPE.Build.0 = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F7}.Debug|CHPE.Deploy.0 = Debug|CHPE
{706083F7-6AA4-4558-A153-6352EF9110F7}.Debug|x64.ActiveCfg = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9110F7}.Debug|x64.Build.0 = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9110F7}.Debug|x86.ActiveCfg = Debug|Win32
@@ -314,6 +382,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110F7}.Release|ARM.Build.0 = Release|ARM
{706083F7-6AA4-4558-A153-6352EF9110F7}.Release|ARM64.ActiveCfg = Release|ARM64
{706083F7-6AA4-4558-A153-6352EF9110F7}.Release|ARM64.Build.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F7}.Release|ARM64.Deploy.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F7}.Release|CHPE.ActiveCfg = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F7}.Release|CHPE.Build.0 = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F7}.Release|CHPE.Deploy.0 = Release|CHPE
{706083F7-6AA4-4558-A153-6352EF9110F7}.Release|x64.ActiveCfg = Release|x64
{706083F7-6AA4-4558-A153-6352EF9110F7}.Release|x64.Build.0 = Release|x64
{706083F7-6AA4-4558-A153-6352EF9110F7}.Release|x86.ActiveCfg = Release|Win32
@@ -322,6 +394,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110F7}.Test|ARM.Build.0 = Test|ARM
{706083F7-6AA4-4558-A153-6352EF9110F7}.Test|ARM64.ActiveCfg = Test|ARM64
{706083F7-6AA4-4558-A153-6352EF9110F7}.Test|ARM64.Build.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F7}.Test|ARM64.Deploy.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F7}.Test|CHPE.ActiveCfg = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F7}.Test|CHPE.Build.0 = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F7}.Test|CHPE.Deploy.0 = Test|CHPE
{706083F7-6AA4-4558-A153-6352EF9110F7}.Test|x64.ActiveCfg = Test|x64
{706083F7-6AA4-4558-A153-6352EF9110F7}.Test|x64.Build.0 = Test|x64
{706083F7-6AA4-4558-A153-6352EF9110F7}.Test|x86.ActiveCfg = Test|Win32
@@ -330,6 +406,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110F6}.Debug|ARM.Build.0 = Debug|ARM
{706083F7-6AA4-4558-A153-6352EF9110F6}.Debug|ARM64.ActiveCfg = Debug|ARM64
{706083F7-6AA4-4558-A153-6352EF9110F6}.Debug|ARM64.Build.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F6}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F6}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F6}.Debug|CHPE.Build.0 = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F6}.Debug|CHPE.Deploy.0 = Debug|CHPE
{706083F7-6AA4-4558-A153-6352EF9110F6}.Debug|x64.ActiveCfg = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9110F6}.Debug|x64.Build.0 = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9110F6}.Debug|x86.ActiveCfg = Debug|Win32
@@ -338,6 +418,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110F6}.Release|ARM.Build.0 = Release|ARM
{706083F7-6AA4-4558-A153-6352EF9110F6}.Release|ARM64.ActiveCfg = Release|ARM64
{706083F7-6AA4-4558-A153-6352EF9110F6}.Release|ARM64.Build.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F6}.Release|ARM64.Deploy.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F6}.Release|CHPE.ActiveCfg = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F6}.Release|CHPE.Build.0 = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F6}.Release|CHPE.Deploy.0 = Release|CHPE
{706083F7-6AA4-4558-A153-6352EF9110F6}.Release|x64.ActiveCfg = Release|x64
{706083F7-6AA4-4558-A153-6352EF9110F6}.Release|x64.Build.0 = Release|x64
{706083F7-6AA4-4558-A153-6352EF9110F6}.Release|x86.ActiveCfg = Release|Win32
@@ -346,6 +430,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110F6}.Test|ARM.Build.0 = Test|ARM
{706083F7-6AA4-4558-A153-6352EF9110F6}.Test|ARM64.ActiveCfg = Test|ARM64
{706083F7-6AA4-4558-A153-6352EF9110F6}.Test|ARM64.Build.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F6}.Test|ARM64.Deploy.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110F6}.Test|CHPE.ActiveCfg = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F6}.Test|CHPE.Build.0 = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110F6}.Test|CHPE.Deploy.0 = Test|CHPE
{706083F7-6AA4-4558-A153-6352EF9110F6}.Test|x64.ActiveCfg = Test|x64
{706083F7-6AA4-4558-A153-6352EF9110F6}.Test|x64.Build.0 = Test|x64
{706083F7-6AA4-4558-A153-6352EF9110F6}.Test|x86.ActiveCfg = Test|Win32
@@ -354,6 +442,10 @@ Global
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|ARM.Build.0 = Debug|ARM
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|ARM64.ActiveCfg = Debug|ARM64
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|ARM64.Build.0 = Debug|ARM64
+ {BB4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {BB4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {BB4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|CHPE.Build.0 = Debug|CHPE
+ {BB4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|CHPE.Deploy.0 = Debug|CHPE
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|x64.ActiveCfg = Debug|x64
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|x64.Build.0 = Debug|x64
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|x86.ActiveCfg = Debug|Win32
@@ -362,6 +454,10 @@ Global
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Release|ARM.Build.0 = Release|ARM
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Release|ARM64.ActiveCfg = Release|ARM64
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Release|ARM64.Build.0 = Release|ARM64
+ {BB4153FF-AC3E-4734-B562-CC23812DF31B}.Release|ARM64.Deploy.0 = Release|ARM64
+ {BB4153FF-AC3E-4734-B562-CC23812DF31B}.Release|CHPE.ActiveCfg = Release|CHPE
+ {BB4153FF-AC3E-4734-B562-CC23812DF31B}.Release|CHPE.Build.0 = Release|CHPE
+ {BB4153FF-AC3E-4734-B562-CC23812DF31B}.Release|CHPE.Deploy.0 = Release|CHPE
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Release|x64.ActiveCfg = Release|x64
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Release|x64.Build.0 = Release|x64
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Release|x86.ActiveCfg = Release|Win32
@@ -370,6 +466,10 @@ Global
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Test|ARM.Build.0 = Test|ARM
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Test|ARM64.ActiveCfg = Test|ARM64
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Test|ARM64.Build.0 = Test|ARM64
+ {BB4153FF-AC3E-4734-B562-CC23812DF31B}.Test|ARM64.Deploy.0 = Test|ARM64
+ {BB4153FF-AC3E-4734-B562-CC23812DF31B}.Test|CHPE.ActiveCfg = Test|CHPE
+ {BB4153FF-AC3E-4734-B562-CC23812DF31B}.Test|CHPE.Build.0 = Test|CHPE
+ {BB4153FF-AC3E-4734-B562-CC23812DF31B}.Test|CHPE.Deploy.0 = Test|CHPE
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Test|x64.ActiveCfg = Test|x64
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Test|x64.Build.0 = Test|x64
{BB4153FF-AC3E-4734-B562-CC23812DF31B}.Test|x86.ActiveCfg = Test|Win32
@@ -378,6 +478,10 @@ Global
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|ARM.Build.0 = Debug|ARM
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|ARM64.ActiveCfg = Debug|ARM64
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|ARM64.Build.0 = Debug|ARM64
+ {CC4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {CC4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {CC4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|CHPE.Build.0 = Debug|CHPE
+ {CC4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|CHPE.Deploy.0 = Debug|CHPE
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|x64.ActiveCfg = Debug|x64
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|x64.Build.0 = Debug|x64
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Debug|x86.ActiveCfg = Debug|Win32
@@ -386,6 +490,10 @@ Global
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Release|ARM.Build.0 = Release|ARM
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Release|ARM64.ActiveCfg = Release|ARM64
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Release|ARM64.Build.0 = Release|ARM64
+ {CC4153FF-AC3E-4734-B562-CC23812DF31B}.Release|ARM64.Deploy.0 = Release|ARM64
+ {CC4153FF-AC3E-4734-B562-CC23812DF31B}.Release|CHPE.ActiveCfg = Release|CHPE
+ {CC4153FF-AC3E-4734-B562-CC23812DF31B}.Release|CHPE.Build.0 = Release|CHPE
+ {CC4153FF-AC3E-4734-B562-CC23812DF31B}.Release|CHPE.Deploy.0 = Release|CHPE
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Release|x64.ActiveCfg = Release|x64
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Release|x64.Build.0 = Release|x64
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Release|x86.ActiveCfg = Release|Win32
@@ -394,6 +502,10 @@ Global
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Test|ARM.Build.0 = Test|ARM
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Test|ARM64.ActiveCfg = Test|ARM64
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Test|ARM64.Build.0 = Test|ARM64
+ {CC4153FF-AC3E-4734-B562-CC23812DF31B}.Test|ARM64.Deploy.0 = Test|ARM64
+ {CC4153FF-AC3E-4734-B562-CC23812DF31B}.Test|CHPE.ActiveCfg = Test|CHPE
+ {CC4153FF-AC3E-4734-B562-CC23812DF31B}.Test|CHPE.Build.0 = Test|CHPE
+ {CC4153FF-AC3E-4734-B562-CC23812DF31B}.Test|CHPE.Deploy.0 = Test|CHPE
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Test|x64.ActiveCfg = Test|x64
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Test|x64.Build.0 = Test|x64
{CC4153FF-AC3E-4734-B562-CC23812DF31B}.Test|x86.ActiveCfg = Test|Win32
@@ -402,6 +514,10 @@ Global
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Debug|ARM.Build.0 = Debug|ARM
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Debug|ARM64.ActiveCfg = Debug|ARM64
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Debug|ARM64.Build.0 = Debug|ARM64
+ {5643D42A-C38D-4D82-9662-58470B3AC9F7}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {5643D42A-C38D-4D82-9662-58470B3AC9F7}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {5643D42A-C38D-4D82-9662-58470B3AC9F7}.Debug|CHPE.Build.0 = Debug|CHPE
+ {5643D42A-C38D-4D82-9662-58470B3AC9F7}.Debug|CHPE.Deploy.0 = Debug|CHPE
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Debug|x64.ActiveCfg = Debug|x64
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Debug|x64.Build.0 = Debug|x64
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Debug|x86.ActiveCfg = Debug|Win32
@@ -410,6 +526,10 @@ Global
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Release|ARM.Build.0 = Release|ARM
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Release|ARM64.ActiveCfg = Release|ARM64
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Release|ARM64.Build.0 = Release|ARM64
+ {5643D42A-C38D-4D82-9662-58470B3AC9F7}.Release|ARM64.Deploy.0 = Release|ARM64
+ {5643D42A-C38D-4D82-9662-58470B3AC9F7}.Release|CHPE.ActiveCfg = Release|CHPE
+ {5643D42A-C38D-4D82-9662-58470B3AC9F7}.Release|CHPE.Build.0 = Release|CHPE
+ {5643D42A-C38D-4D82-9662-58470B3AC9F7}.Release|CHPE.Deploy.0 = Release|CHPE
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Release|x64.ActiveCfg = Release|x64
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Release|x64.Build.0 = Release|x64
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Release|x86.ActiveCfg = Release|Win32
@@ -418,6 +538,10 @@ Global
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Test|ARM.Build.0 = Test|ARM
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Test|ARM64.ActiveCfg = Test|ARM64
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Test|ARM64.Build.0 = Test|ARM64
+ {5643D42A-C38D-4D82-9662-58470B3AC9F7}.Test|ARM64.Deploy.0 = Test|ARM64
+ {5643D42A-C38D-4D82-9662-58470B3AC9F7}.Test|CHPE.ActiveCfg = Test|CHPE
+ {5643D42A-C38D-4D82-9662-58470B3AC9F7}.Test|CHPE.Build.0 = Test|CHPE
+ {5643D42A-C38D-4D82-9662-58470B3AC9F7}.Test|CHPE.Deploy.0 = Test|CHPE
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Test|x64.ActiveCfg = Test|x64
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Test|x64.Build.0 = Test|x64
{5643D42A-C38D-4D82-9662-58470B3AC9F7}.Test|x86.ActiveCfg = Test|Win32
@@ -426,6 +550,10 @@ Global
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Debug|ARM.Build.0 = Debug|ARM
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Debug|ARM64.ActiveCfg = Debug|ARM64
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Debug|ARM64.Build.0 = Debug|ARM64
+ {FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Debug|CHPE.Build.0 = Debug|CHPE
+ {FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Debug|CHPE.Deploy.0 = Debug|CHPE
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Debug|x64.ActiveCfg = Debug|x64
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Debug|x64.Build.0 = Debug|x64
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Debug|x86.ActiveCfg = Debug|Win32
@@ -434,6 +562,10 @@ Global
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Release|ARM.Build.0 = Release|ARM
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Release|ARM64.ActiveCfg = Release|ARM64
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Release|ARM64.Build.0 = Release|ARM64
+ {FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Release|ARM64.Deploy.0 = Release|ARM64
+ {FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Release|CHPE.ActiveCfg = Release|CHPE
+ {FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Release|CHPE.Build.0 = Release|CHPE
+ {FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Release|CHPE.Deploy.0 = Release|CHPE
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Release|x64.ActiveCfg = Release|x64
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Release|x64.Build.0 = Release|x64
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Release|x86.ActiveCfg = Release|Win32
@@ -442,6 +574,10 @@ Global
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Test|ARM.Build.0 = Test|ARM
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Test|ARM64.ActiveCfg = Test|ARM64
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Test|ARM64.Build.0 = Test|ARM64
+ {FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Test|ARM64.Deploy.0 = Test|ARM64
+ {FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Test|CHPE.ActiveCfg = Test|CHPE
+ {FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Test|CHPE.Build.0 = Test|CHPE
+ {FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Test|CHPE.Deploy.0 = Test|CHPE
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Test|x64.ActiveCfg = Test|x64
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Test|x64.Build.0 = Test|x64
{FD8EEC40-4141-448A-BF4B-1589FBE4F60D}.Test|x86.ActiveCfg = Test|Win32
@@ -450,6 +586,10 @@ Global
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Debug|ARM.Build.0 = Debug|ARM
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Debug|ARM64.ActiveCfg = Debug|ARM64
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Debug|ARM64.Build.0 = Debug|ARM64
+ {BB4153FF-AC3E-4734-B562-FF23812DF31B}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {BB4153FF-AC3E-4734-B562-FF23812DF31B}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {BB4153FF-AC3E-4734-B562-FF23812DF31B}.Debug|CHPE.Build.0 = Debug|CHPE
+ {BB4153FF-AC3E-4734-B562-FF23812DF31B}.Debug|CHPE.Deploy.0 = Debug|CHPE
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Debug|x64.ActiveCfg = Debug|x64
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Debug|x64.Build.0 = Debug|x64
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Debug|x86.ActiveCfg = Debug|Win32
@@ -458,6 +598,10 @@ Global
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Release|ARM.Build.0 = Release|ARM
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Release|ARM64.ActiveCfg = Release|ARM64
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Release|ARM64.Build.0 = Release|ARM64
+ {BB4153FF-AC3E-4734-B562-FF23812DF31B}.Release|ARM64.Deploy.0 = Release|ARM64
+ {BB4153FF-AC3E-4734-B562-FF23812DF31B}.Release|CHPE.ActiveCfg = Release|CHPE
+ {BB4153FF-AC3E-4734-B562-FF23812DF31B}.Release|CHPE.Build.0 = Release|CHPE
+ {BB4153FF-AC3E-4734-B562-FF23812DF31B}.Release|CHPE.Deploy.0 = Release|CHPE
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Release|x64.ActiveCfg = Release|x64
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Release|x64.Build.0 = Release|x64
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Release|x86.ActiveCfg = Release|Win32
@@ -466,6 +610,10 @@ Global
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Test|ARM.Build.0 = Test|ARM
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Test|ARM64.ActiveCfg = Test|ARM64
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Test|ARM64.Build.0 = Test|ARM64
+ {BB4153FF-AC3E-4734-B562-FF23812DF31B}.Test|ARM64.Deploy.0 = Test|ARM64
+ {BB4153FF-AC3E-4734-B562-FF23812DF31B}.Test|CHPE.ActiveCfg = Test|CHPE
+ {BB4153FF-AC3E-4734-B562-FF23812DF31B}.Test|CHPE.Build.0 = Test|CHPE
+ {BB4153FF-AC3E-4734-B562-FF23812DF31B}.Test|CHPE.Deploy.0 = Test|CHPE
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Test|x64.ActiveCfg = Test|x64
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Test|x64.Build.0 = Test|x64
{BB4153FF-AC3E-4734-B562-FF23812DF31B}.Test|x86.ActiveCfg = Test|Win32
@@ -474,6 +622,10 @@ Global
{18CF279F-188D-4655-B03D-74F65388E7D1}.Debug|ARM.Build.0 = Debug|ARM
{18CF279F-188D-4655-B03D-74F65388E7D1}.Debug|ARM64.ActiveCfg = Debug|ARM64
{18CF279F-188D-4655-B03D-74F65388E7D1}.Debug|ARM64.Build.0 = Debug|ARM64
+ {18CF279F-188D-4655-B03D-74F65388E7D1}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {18CF279F-188D-4655-B03D-74F65388E7D1}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {18CF279F-188D-4655-B03D-74F65388E7D1}.Debug|CHPE.Build.0 = Debug|CHPE
+ {18CF279F-188D-4655-B03D-74F65388E7D1}.Debug|CHPE.Deploy.0 = Debug|CHPE
{18CF279F-188D-4655-B03D-74F65388E7D1}.Debug|x64.ActiveCfg = Debug|x64
{18CF279F-188D-4655-B03D-74F65388E7D1}.Debug|x64.Build.0 = Debug|x64
{18CF279F-188D-4655-B03D-74F65388E7D1}.Debug|x86.ActiveCfg = Debug|Win32
@@ -482,6 +634,10 @@ Global
{18CF279F-188D-4655-B03D-74F65388E7D1}.Release|ARM.Build.0 = Release|ARM
{18CF279F-188D-4655-B03D-74F65388E7D1}.Release|ARM64.ActiveCfg = Release|ARM64
{18CF279F-188D-4655-B03D-74F65388E7D1}.Release|ARM64.Build.0 = Release|ARM64
+ {18CF279F-188D-4655-B03D-74F65388E7D1}.Release|ARM64.Deploy.0 = Release|ARM64
+ {18CF279F-188D-4655-B03D-74F65388E7D1}.Release|CHPE.ActiveCfg = Release|CHPE
+ {18CF279F-188D-4655-B03D-74F65388E7D1}.Release|CHPE.Build.0 = Release|CHPE
+ {18CF279F-188D-4655-B03D-74F65388E7D1}.Release|CHPE.Deploy.0 = Release|CHPE
{18CF279F-188D-4655-B03D-74F65388E7D1}.Release|x64.ActiveCfg = Release|x64
{18CF279F-188D-4655-B03D-74F65388E7D1}.Release|x64.Build.0 = Release|x64
{18CF279F-188D-4655-B03D-74F65388E7D1}.Release|x86.ActiveCfg = Release|Win32
@@ -490,6 +646,10 @@ Global
{18CF279F-188D-4655-B03D-74F65388E7D1}.Test|ARM.Build.0 = Test|ARM
{18CF279F-188D-4655-B03D-74F65388E7D1}.Test|ARM64.ActiveCfg = Test|ARM64
{18CF279F-188D-4655-B03D-74F65388E7D1}.Test|ARM64.Build.0 = Test|ARM64
+ {18CF279F-188D-4655-B03D-74F65388E7D1}.Test|ARM64.Deploy.0 = Test|ARM64
+ {18CF279F-188D-4655-B03D-74F65388E7D1}.Test|CHPE.ActiveCfg = Test|CHPE
+ {18CF279F-188D-4655-B03D-74F65388E7D1}.Test|CHPE.Build.0 = Test|CHPE
+ {18CF279F-188D-4655-B03D-74F65388E7D1}.Test|CHPE.Deploy.0 = Test|CHPE
{18CF279F-188D-4655-B03D-74F65388E7D1}.Test|x64.ActiveCfg = Test|x64
{18CF279F-188D-4655-B03D-74F65388E7D1}.Test|x64.Build.0 = Test|x64
{18CF279F-188D-4655-B03D-74F65388E7D1}.Test|x86.ActiveCfg = Test|Win32
@@ -498,6 +658,10 @@ Global
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Debug|ARM.Build.0 = Debug|ARM
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Debug|ARM64.ActiveCfg = Debug|ARM64
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Debug|ARM64.Build.0 = Debug|ARM64
+ {F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Debug|CHPE.Build.0 = Debug|CHPE
+ {F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Debug|CHPE.Deploy.0 = Debug|CHPE
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Debug|x64.ActiveCfg = Debug|x64
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Debug|x64.Build.0 = Debug|x64
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Debug|x86.ActiveCfg = Debug|Win32
@@ -506,6 +670,10 @@ Global
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Release|ARM.Build.0 = Release|ARM
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Release|ARM64.ActiveCfg = Release|ARM64
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Release|ARM64.Build.0 = Release|ARM64
+ {F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Release|ARM64.Deploy.0 = Release|ARM64
+ {F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Release|CHPE.ActiveCfg = Release|CHPE
+ {F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Release|CHPE.Build.0 = Release|CHPE
+ {F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Release|CHPE.Deploy.0 = Release|CHPE
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Release|x64.ActiveCfg = Release|x64
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Release|x64.Build.0 = Release|x64
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Release|x86.ActiveCfg = Release|Win32
@@ -514,6 +682,10 @@ Global
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Test|ARM.Build.0 = Test|ARM
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Test|ARM64.ActiveCfg = Test|ARM64
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Test|ARM64.Build.0 = Test|ARM64
+ {F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Test|ARM64.Deploy.0 = Test|ARM64
+ {F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Test|CHPE.ActiveCfg = Test|CHPE
+ {F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Test|CHPE.Build.0 = Test|CHPE
+ {F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Test|CHPE.Deploy.0 = Test|CHPE
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Test|x64.ActiveCfg = Test|x64
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Test|x64.Build.0 = Test|x64
{F6FAD160-5A4B-476A-93AC-33E0B3A18C0C}.Test|x86.ActiveCfg = Test|Win32
@@ -522,6 +694,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9220F5}.Debug|ARM.Build.0 = Debug|ARM
{706083F7-6AA4-4558-A153-6352EF9220F5}.Debug|ARM64.ActiveCfg = Debug|ARM64
{706083F7-6AA4-4558-A153-6352EF9220F5}.Debug|ARM64.Build.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9220F5}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9220F5}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9220F5}.Debug|CHPE.Build.0 = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9220F5}.Debug|CHPE.Deploy.0 = Debug|CHPE
{706083F7-6AA4-4558-A153-6352EF9220F5}.Debug|x64.ActiveCfg = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9220F5}.Debug|x64.Build.0 = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9220F5}.Debug|x86.ActiveCfg = Debug|Win32
@@ -530,6 +706,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9220F5}.Release|ARM.Build.0 = Release|ARM
{706083F7-6AA4-4558-A153-6352EF9220F5}.Release|ARM64.ActiveCfg = Release|ARM64
{706083F7-6AA4-4558-A153-6352EF9220F5}.Release|ARM64.Build.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9220F5}.Release|ARM64.Deploy.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9220F5}.Release|CHPE.ActiveCfg = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9220F5}.Release|CHPE.Build.0 = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9220F5}.Release|CHPE.Deploy.0 = Release|CHPE
{706083F7-6AA4-4558-A153-6352EF9220F5}.Release|x64.ActiveCfg = Release|x64
{706083F7-6AA4-4558-A153-6352EF9220F5}.Release|x64.Build.0 = Release|x64
{706083F7-6AA4-4558-A153-6352EF9220F5}.Release|x86.ActiveCfg = Release|Win32
@@ -538,6 +718,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9220F5}.Test|ARM.Build.0 = Test|ARM
{706083F7-6AA4-4558-A153-6352EF9220F5}.Test|ARM64.ActiveCfg = Test|ARM64
{706083F7-6AA4-4558-A153-6352EF9220F5}.Test|ARM64.Build.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9220F5}.Test|ARM64.Deploy.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9220F5}.Test|CHPE.ActiveCfg = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9220F5}.Test|CHPE.Build.0 = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9220F5}.Test|CHPE.Deploy.0 = Test|CHPE
{706083F7-6AA4-4558-A153-6352EF9220F5}.Test|x64.ActiveCfg = Test|x64
{706083F7-6AA4-4558-A153-6352EF9220F5}.Test|x64.Build.0 = Test|x64
{706083F7-6AA4-4558-A153-6352EF9220F5}.Test|x86.ActiveCfg = Test|Win32
@@ -546,6 +730,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9220EE}.Debug|ARM.Build.0 = Debug|ARM
{706083F7-6AA4-4558-A153-6352EF9220EE}.Debug|ARM64.ActiveCfg = Debug|ARM64
{706083F7-6AA4-4558-A153-6352EF9220EE}.Debug|ARM64.Build.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9220EE}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9220EE}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9220EE}.Debug|CHPE.Build.0 = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9220EE}.Debug|CHPE.Deploy.0 = Debug|CHPE
{706083F7-6AA4-4558-A153-6352EF9220EE}.Debug|x64.ActiveCfg = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9220EE}.Debug|x64.Build.0 = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9220EE}.Debug|x86.ActiveCfg = Debug|Win32
@@ -554,6 +742,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9220EE}.Release|ARM.Build.0 = Release|ARM
{706083F7-6AA4-4558-A153-6352EF9220EE}.Release|ARM64.ActiveCfg = Release|ARM64
{706083F7-6AA4-4558-A153-6352EF9220EE}.Release|ARM64.Build.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9220EE}.Release|ARM64.Deploy.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9220EE}.Release|CHPE.ActiveCfg = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9220EE}.Release|CHPE.Build.0 = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9220EE}.Release|CHPE.Deploy.0 = Release|CHPE
{706083F7-6AA4-4558-A153-6352EF9220EE}.Release|x64.ActiveCfg = Release|x64
{706083F7-6AA4-4558-A153-6352EF9220EE}.Release|x64.Build.0 = Release|x64
{706083F7-6AA4-4558-A153-6352EF9220EE}.Release|x86.ActiveCfg = Release|Win32
@@ -562,6 +754,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9220EE}.Test|ARM.Build.0 = Test|ARM
{706083F7-6AA4-4558-A153-6352EF9220EE}.Test|ARM64.ActiveCfg = Test|ARM64
{706083F7-6AA4-4558-A153-6352EF9220EE}.Test|ARM64.Build.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9220EE}.Test|ARM64.Deploy.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9220EE}.Test|CHPE.ActiveCfg = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9220EE}.Test|CHPE.Build.0 = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9220EE}.Test|CHPE.Deploy.0 = Test|CHPE
{706083F7-6AA4-4558-A153-6352EF9220EE}.Test|x64.ActiveCfg = Test|x64
{706083F7-6AA4-4558-A153-6352EF9220EE}.Test|x64.Build.0 = Test|x64
{706083F7-6AA4-4558-A153-6352EF9220EE}.Test|x86.ActiveCfg = Test|Win32
@@ -570,6 +766,10 @@ Global
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Debug|ARM.Build.0 = Debug|ARM
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Debug|ARM64.ActiveCfg = Debug|ARM64
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Debug|ARM64.Build.0 = Debug|ARM64
+ {ABC904AD-9415-46F8-AA23-E33193F81F7C}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {ABC904AD-9415-46F8-AA23-E33193F81F7C}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {ABC904AD-9415-46F8-AA23-E33193F81F7C}.Debug|CHPE.Build.0 = Debug|CHPE
+ {ABC904AD-9415-46F8-AA23-E33193F81F7C}.Debug|CHPE.Deploy.0 = Debug|CHPE
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Debug|x64.ActiveCfg = Debug|x64
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Debug|x64.Build.0 = Debug|x64
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Debug|x86.ActiveCfg = Debug|Win32
@@ -578,6 +778,10 @@ Global
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Release|ARM.Build.0 = Release|ARM
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Release|ARM64.ActiveCfg = Release|ARM64
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Release|ARM64.Build.0 = Release|ARM64
+ {ABC904AD-9415-46F8-AA23-E33193F81F7C}.Release|ARM64.Deploy.0 = Release|ARM64
+ {ABC904AD-9415-46F8-AA23-E33193F81F7C}.Release|CHPE.ActiveCfg = Release|CHPE
+ {ABC904AD-9415-46F8-AA23-E33193F81F7C}.Release|CHPE.Build.0 = Release|CHPE
+ {ABC904AD-9415-46F8-AA23-E33193F81F7C}.Release|CHPE.Deploy.0 = Release|CHPE
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Release|x64.ActiveCfg = Release|x64
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Release|x64.Build.0 = Release|x64
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Release|x86.ActiveCfg = Release|Win32
@@ -586,6 +790,10 @@ Global
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Test|ARM.Build.0 = Test|ARM
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Test|ARM64.ActiveCfg = Test|ARM64
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Test|ARM64.Build.0 = Test|ARM64
+ {ABC904AD-9415-46F8-AA23-E33193F81F7C}.Test|ARM64.Deploy.0 = Test|ARM64
+ {ABC904AD-9415-46F8-AA23-E33193F81F7C}.Test|CHPE.ActiveCfg = Test|CHPE
+ {ABC904AD-9415-46F8-AA23-E33193F81F7C}.Test|CHPE.Build.0 = Test|CHPE
+ {ABC904AD-9415-46F8-AA23-E33193F81F7C}.Test|CHPE.Deploy.0 = Test|CHPE
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Test|x64.ActiveCfg = Test|x64
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Test|x64.Build.0 = Test|x64
{ABC904AD-9415-46F8-AA23-E33193F81F7C}.Test|x86.ActiveCfg = Test|Win32
@@ -594,6 +802,10 @@ Global
{6979EC58-7A28-465C-A694-F3323A1F5401}.Debug|ARM.Build.0 = Debug|ARM
{6979EC58-7A28-465C-A694-F3323A1F5401}.Debug|ARM64.ActiveCfg = Debug|ARM64
{6979EC58-7A28-465C-A694-F3323A1F5401}.Debug|ARM64.Build.0 = Debug|ARM64
+ {6979EC58-7A28-465C-A694-F3323A1F5401}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {6979EC58-7A28-465C-A694-F3323A1F5401}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {6979EC58-7A28-465C-A694-F3323A1F5401}.Debug|CHPE.Build.0 = Debug|CHPE
+ {6979EC58-7A28-465C-A694-F3323A1F5401}.Debug|CHPE.Deploy.0 = Debug|CHPE
{6979EC58-7A28-465C-A694-F3323A1F5401}.Debug|x64.ActiveCfg = Debug|x64
{6979EC58-7A28-465C-A694-F3323A1F5401}.Debug|x64.Build.0 = Debug|x64
{6979EC58-7A28-465C-A694-F3323A1F5401}.Debug|x86.ActiveCfg = Debug|Win32
@@ -602,6 +814,10 @@ Global
{6979EC58-7A28-465C-A694-F3323A1F5401}.Release|ARM.Build.0 = Release|ARM
{6979EC58-7A28-465C-A694-F3323A1F5401}.Release|ARM64.ActiveCfg = Release|ARM64
{6979EC58-7A28-465C-A694-F3323A1F5401}.Release|ARM64.Build.0 = Release|ARM64
+ {6979EC58-7A28-465C-A694-F3323A1F5401}.Release|ARM64.Deploy.0 = Release|ARM64
+ {6979EC58-7A28-465C-A694-F3323A1F5401}.Release|CHPE.ActiveCfg = Release|CHPE
+ {6979EC58-7A28-465C-A694-F3323A1F5401}.Release|CHPE.Build.0 = Release|CHPE
+ {6979EC58-7A28-465C-A694-F3323A1F5401}.Release|CHPE.Deploy.0 = Release|CHPE
{6979EC58-7A28-465C-A694-F3323A1F5401}.Release|x64.ActiveCfg = Release|x64
{6979EC58-7A28-465C-A694-F3323A1F5401}.Release|x64.Build.0 = Release|x64
{6979EC58-7A28-465C-A694-F3323A1F5401}.Release|x86.ActiveCfg = Release|Win32
@@ -610,6 +826,10 @@ Global
{6979EC58-7A28-465C-A694-F3323A1F5401}.Test|ARM.Build.0 = Test|ARM
{6979EC58-7A28-465C-A694-F3323A1F5401}.Test|ARM64.ActiveCfg = Test|ARM64
{6979EC58-7A28-465C-A694-F3323A1F5401}.Test|ARM64.Build.0 = Test|ARM64
+ {6979EC58-7A28-465C-A694-F3323A1F5401}.Test|ARM64.Deploy.0 = Test|ARM64
+ {6979EC58-7A28-465C-A694-F3323A1F5401}.Test|CHPE.ActiveCfg = Test|CHPE
+ {6979EC58-7A28-465C-A694-F3323A1F5401}.Test|CHPE.Build.0 = Test|CHPE
+ {6979EC58-7A28-465C-A694-F3323A1F5401}.Test|CHPE.Deploy.0 = Test|CHPE
{6979EC58-7A28-465C-A694-F3323A1F5401}.Test|x64.ActiveCfg = Test|x64
{6979EC58-7A28-465C-A694-F3323A1F5401}.Test|x64.Build.0 = Test|x64
{6979EC58-7A28-465C-A694-F3323A1F5401}.Test|x86.ActiveCfg = Test|Win32
@@ -618,6 +838,8 @@ Global
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Debug|ARM.Build.0 = Debug|ARM
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Debug|ARM64.ActiveCfg = Debug|ARM64
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Debug|ARM64.Build.0 = Debug|ARM64
+ {0216C4BE-86CE-478D-A134-23EAEE545B9D}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {0216C4BE-86CE-478D-A134-23EAEE545B9D}.Debug|CHPE.Build.0 = Debug|CHPE
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Debug|x64.ActiveCfg = Debug|x64
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Debug|x64.Build.0 = Debug|x64
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Debug|x86.ActiveCfg = Debug|Win32
@@ -626,6 +848,8 @@ Global
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Release|ARM.Build.0 = Release|ARM
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Release|ARM64.ActiveCfg = Release|ARM64
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Release|ARM64.Build.0 = Release|ARM64
+ {0216C4BE-86CE-478D-A134-23EAEE545B9D}.Release|CHPE.ActiveCfg = Release|CHPE
+ {0216C4BE-86CE-478D-A134-23EAEE545B9D}.Release|CHPE.Build.0 = Release|CHPE
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Release|x64.ActiveCfg = Release|x64
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Release|x64.Build.0 = Release|x64
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Release|x86.ActiveCfg = Release|Win32
@@ -634,6 +858,8 @@ Global
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Test|ARM.Build.0 = Test|ARM
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Test|ARM64.ActiveCfg = Test|ARM64
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Test|ARM64.Build.0 = Test|ARM64
+ {0216C4BE-86CE-478D-A134-23EAEE545B9D}.Test|CHPE.ActiveCfg = Test|CHPE
+ {0216C4BE-86CE-478D-A134-23EAEE545B9D}.Test|CHPE.Build.0 = Test|CHPE
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Test|x64.ActiveCfg = Test|x64
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Test|x64.Build.0 = Test|x64
{0216C4BE-86CE-478D-A134-23EAEE545B9D}.Test|x86.ActiveCfg = Test|Win32
@@ -642,6 +868,10 @@ Global
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Debug|ARM.Build.0 = Debug|ARM
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Debug|ARM64.ActiveCfg = Debug|ARM64
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Debug|ARM64.Build.0 = Debug|ARM64
+ {80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Debug|CHPE.Build.0 = Debug|CHPE
+ {80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Debug|CHPE.Deploy.0 = Debug|CHPE
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Debug|x64.ActiveCfg = Debug|x64
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Debug|x64.Build.0 = Debug|x64
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Debug|x86.ActiveCfg = Debug|Win32
@@ -650,6 +880,10 @@ Global
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Release|ARM.Build.0 = Release|ARM
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Release|ARM64.ActiveCfg = Release|ARM64
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Release|ARM64.Build.0 = Release|ARM64
+ {80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Release|ARM64.Deploy.0 = Release|ARM64
+ {80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Release|CHPE.ActiveCfg = Release|CHPE
+ {80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Release|CHPE.Build.0 = Release|CHPE
+ {80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Release|CHPE.Deploy.0 = Release|CHPE
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Release|x64.ActiveCfg = Release|x64
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Release|x64.Build.0 = Release|x64
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Release|x86.ActiveCfg = Release|Win32
@@ -658,6 +892,10 @@ Global
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Test|ARM.Build.0 = Test|ARM
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Test|ARM64.ActiveCfg = Test|ARM64
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Test|ARM64.Build.0 = Test|ARM64
+ {80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Test|ARM64.Deploy.0 = Test|ARM64
+ {80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Test|CHPE.ActiveCfg = Test|CHPE
+ {80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Test|CHPE.Build.0 = Test|CHPE
+ {80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Test|CHPE.Deploy.0 = Test|CHPE
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Test|x64.ActiveCfg = Test|x64
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Test|x64.Build.0 = Test|x64
{80A70F57-0F89-458F-AFD3-CE2159EB9BB1}.Test|x86.ActiveCfg = Test|Win32
@@ -666,6 +904,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110EE}.Debug|ARM.Build.0 = Debug|ARM
{706083F7-6AA4-4558-A153-6352EF9110EE}.Debug|ARM64.ActiveCfg = Debug|ARM64
{706083F7-6AA4-4558-A153-6352EF9110EE}.Debug|ARM64.Build.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110EE}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110EE}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110EE}.Debug|CHPE.Build.0 = Debug|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110EE}.Debug|CHPE.Deploy.0 = Debug|CHPE
{706083F7-6AA4-4558-A153-6352EF9110EE}.Debug|x64.ActiveCfg = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9110EE}.Debug|x64.Build.0 = Debug|x64
{706083F7-6AA4-4558-A153-6352EF9110EE}.Debug|x86.ActiveCfg = Debug|Win32
@@ -674,6 +916,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110EE}.Release|ARM.Build.0 = Release|ARM
{706083F7-6AA4-4558-A153-6352EF9110EE}.Release|ARM64.ActiveCfg = Release|ARM64
{706083F7-6AA4-4558-A153-6352EF9110EE}.Release|ARM64.Build.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110EE}.Release|ARM64.Deploy.0 = Release|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110EE}.Release|CHPE.ActiveCfg = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110EE}.Release|CHPE.Build.0 = Release|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110EE}.Release|CHPE.Deploy.0 = Release|CHPE
{706083F7-6AA4-4558-A153-6352EF9110EE}.Release|x64.ActiveCfg = Release|x64
{706083F7-6AA4-4558-A153-6352EF9110EE}.Release|x64.Build.0 = Release|x64
{706083F7-6AA4-4558-A153-6352EF9110EE}.Release|x86.ActiveCfg = Release|Win32
@@ -682,6 +928,10 @@ Global
{706083F7-6AA4-4558-A153-6352EF9110EE}.Test|ARM.Build.0 = Test|ARM
{706083F7-6AA4-4558-A153-6352EF9110EE}.Test|ARM64.ActiveCfg = Test|ARM64
{706083F7-6AA4-4558-A153-6352EF9110EE}.Test|ARM64.Build.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110EE}.Test|ARM64.Deploy.0 = Test|ARM64
+ {706083F7-6AA4-4558-A153-6352EF9110EE}.Test|CHPE.ActiveCfg = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110EE}.Test|CHPE.Build.0 = Test|CHPE
+ {706083F7-6AA4-4558-A153-6352EF9110EE}.Test|CHPE.Deploy.0 = Test|CHPE
{706083F7-6AA4-4558-A153-6352EF9110EE}.Test|x64.ActiveCfg = Test|x64
{706083F7-6AA4-4558-A153-6352EF9110EE}.Test|x64.Build.0 = Test|x64
{706083F7-6AA4-4558-A153-6352EF9110EE}.Test|x86.ActiveCfg = Test|Win32
@@ -690,6 +940,10 @@ Global
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Debug|ARM.Build.0 = Debug|ARM
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Debug|ARM64.ActiveCfg = Debug|ARM64
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Debug|ARM64.Build.0 = Debug|ARM64
+ {53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Debug|CHPE.Build.0 = Debug|CHPE
+ {53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Debug|CHPE.Deploy.0 = Debug|CHPE
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Debug|x64.ActiveCfg = Debug|x64
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Debug|x64.Build.0 = Debug|x64
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Debug|x86.ActiveCfg = Debug|Win32
@@ -698,6 +952,10 @@ Global
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Release|ARM.Build.0 = Release|ARM
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Release|ARM64.ActiveCfg = Release|ARM64
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Release|ARM64.Build.0 = Release|ARM64
+ {53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Release|ARM64.Deploy.0 = Release|ARM64
+ {53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Release|CHPE.ActiveCfg = Release|CHPE
+ {53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Release|CHPE.Build.0 = Release|CHPE
+ {53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Release|CHPE.Deploy.0 = Release|CHPE
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Release|x64.ActiveCfg = Release|x64
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Release|x64.Build.0 = Release|x64
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Release|x86.ActiveCfg = Release|Win32
@@ -706,6 +964,10 @@ Global
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Test|ARM.Build.0 = Test|ARM
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Test|ARM64.ActiveCfg = Test|ARM64
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Test|ARM64.Build.0 = Test|ARM64
+ {53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Test|ARM64.Deploy.0 = Test|ARM64
+ {53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Test|CHPE.ActiveCfg = Test|CHPE
+ {53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Test|CHPE.Build.0 = Test|CHPE
+ {53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Test|CHPE.Deploy.0 = Test|CHPE
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Test|x64.ActiveCfg = Test|x64
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Test|x64.Build.0 = Test|x64
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}.Test|x86.ActiveCfg = Test|Win32
@@ -714,6 +976,10 @@ Global
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Debug|ARM.Build.0 = Debug|ARM
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Debug|ARM64.ActiveCfg = Debug|ARM64
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Debug|ARM64.Build.0 = Debug|ARM64
+ {73CE5C59-E0BA-413D-A73C-3EECE067891B}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {73CE5C59-E0BA-413D-A73C-3EECE067891B}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {73CE5C59-E0BA-413D-A73C-3EECE067891B}.Debug|CHPE.Build.0 = Debug|CHPE
+ {73CE5C59-E0BA-413D-A73C-3EECE067891B}.Debug|CHPE.Deploy.0 = Debug|CHPE
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Debug|x64.ActiveCfg = Debug|x64
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Debug|x64.Build.0 = Debug|x64
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Debug|x86.ActiveCfg = Debug|Win32
@@ -722,6 +988,10 @@ Global
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Release|ARM.Build.0 = Release|ARM
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Release|ARM64.ActiveCfg = Release|ARM64
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Release|ARM64.Build.0 = Release|ARM64
+ {73CE5C59-E0BA-413D-A73C-3EECE067891B}.Release|ARM64.Deploy.0 = Release|ARM64
+ {73CE5C59-E0BA-413D-A73C-3EECE067891B}.Release|CHPE.ActiveCfg = Release|CHPE
+ {73CE5C59-E0BA-413D-A73C-3EECE067891B}.Release|CHPE.Build.0 = Release|CHPE
+ {73CE5C59-E0BA-413D-A73C-3EECE067891B}.Release|CHPE.Deploy.0 = Release|CHPE
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Release|x64.ActiveCfg = Release|x64
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Release|x64.Build.0 = Release|x64
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Release|x86.ActiveCfg = Release|Win32
@@ -730,6 +1000,10 @@ Global
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Test|ARM.Build.0 = Test|ARM
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Test|ARM64.ActiveCfg = Test|ARM64
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Test|ARM64.Build.0 = Test|ARM64
+ {73CE5C59-E0BA-413D-A73C-3EECE067891B}.Test|ARM64.Deploy.0 = Test|ARM64
+ {73CE5C59-E0BA-413D-A73C-3EECE067891B}.Test|CHPE.ActiveCfg = Test|CHPE
+ {73CE5C59-E0BA-413D-A73C-3EECE067891B}.Test|CHPE.Build.0 = Test|CHPE
+ {73CE5C59-E0BA-413D-A73C-3EECE067891B}.Test|CHPE.Deploy.0 = Test|CHPE
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Test|x64.ActiveCfg = Test|x64
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Test|x64.Build.0 = Test|x64
{73CE5C59-E0BA-413D-A73C-3EECE067891B}.Test|x86.ActiveCfg = Test|Win32
@@ -738,6 +1012,10 @@ Global
{129AC184-877C-441F-AC49-A692CE700E62}.Debug|ARM.Build.0 = Debug|ARM
{129AC184-877C-441F-AC49-A692CE700E62}.Debug|ARM64.ActiveCfg = Debug|ARM64
{129AC184-877C-441F-AC49-A692CE700E62}.Debug|ARM64.Build.0 = Debug|ARM64
+ {129AC184-877C-441F-AC49-A692CE700E62}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {129AC184-877C-441F-AC49-A692CE700E62}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {129AC184-877C-441F-AC49-A692CE700E62}.Debug|CHPE.Build.0 = Debug|CHPE
+ {129AC184-877C-441F-AC49-A692CE700E62}.Debug|CHPE.Deploy.0 = Debug|CHPE
{129AC184-877C-441F-AC49-A692CE700E62}.Debug|x64.ActiveCfg = Debug|x64
{129AC184-877C-441F-AC49-A692CE700E62}.Debug|x64.Build.0 = Debug|x64
{129AC184-877C-441F-AC49-A692CE700E62}.Debug|x86.ActiveCfg = Debug|Win32
@@ -746,6 +1024,10 @@ Global
{129AC184-877C-441F-AC49-A692CE700E62}.Release|ARM.Build.0 = Release|ARM
{129AC184-877C-441F-AC49-A692CE700E62}.Release|ARM64.ActiveCfg = Release|ARM64
{129AC184-877C-441F-AC49-A692CE700E62}.Release|ARM64.Build.0 = Release|ARM64
+ {129AC184-877C-441F-AC49-A692CE700E62}.Release|ARM64.Deploy.0 = Release|ARM64
+ {129AC184-877C-441F-AC49-A692CE700E62}.Release|CHPE.ActiveCfg = Release|CHPE
+ {129AC184-877C-441F-AC49-A692CE700E62}.Release|CHPE.Build.0 = Release|CHPE
+ {129AC184-877C-441F-AC49-A692CE700E62}.Release|CHPE.Deploy.0 = Release|CHPE
{129AC184-877C-441F-AC49-A692CE700E62}.Release|x64.ActiveCfg = Release|x64
{129AC184-877C-441F-AC49-A692CE700E62}.Release|x64.Build.0 = Release|x64
{129AC184-877C-441F-AC49-A692CE700E62}.Release|x86.ActiveCfg = Release|Win32
@@ -754,6 +1036,10 @@ Global
{129AC184-877C-441F-AC49-A692CE700E62}.Test|ARM.Build.0 = Test|ARM
{129AC184-877C-441F-AC49-A692CE700E62}.Test|ARM64.ActiveCfg = Test|ARM64
{129AC184-877C-441F-AC49-A692CE700E62}.Test|ARM64.Build.0 = Test|ARM64
+ {129AC184-877C-441F-AC49-A692CE700E62}.Test|ARM64.Deploy.0 = Test|ARM64
+ {129AC184-877C-441F-AC49-A692CE700E62}.Test|CHPE.ActiveCfg = Test|CHPE
+ {129AC184-877C-441F-AC49-A692CE700E62}.Test|CHPE.Build.0 = Test|CHPE
+ {129AC184-877C-441F-AC49-A692CE700E62}.Test|CHPE.Deploy.0 = Test|CHPE
{129AC184-877C-441F-AC49-A692CE700E62}.Test|x64.ActiveCfg = Test|x64
{129AC184-877C-441F-AC49-A692CE700E62}.Test|x64.Build.0 = Test|x64
{129AC184-877C-441F-AC49-A692CE700E62}.Test|x86.ActiveCfg = Test|Win32
@@ -762,6 +1048,10 @@ Global
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Debug|ARM.Build.0 = Debug|ARM
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Debug|ARM64.ActiveCfg = Debug|ARM64
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Debug|ARM64.Build.0 = Debug|ARM64
+ {FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Debug|CHPE.Build.0 = Debug|CHPE
+ {FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Debug|CHPE.Deploy.0 = Debug|CHPE
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Debug|x64.ActiveCfg = Debug|x64
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Debug|x64.Build.0 = Debug|x64
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Debug|x86.ActiveCfg = Debug|Win32
@@ -770,6 +1060,10 @@ Global
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Release|ARM.Build.0 = Release|ARM
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Release|ARM64.ActiveCfg = Release|ARM64
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Release|ARM64.Build.0 = Release|ARM64
+ {FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Release|ARM64.Deploy.0 = Release|ARM64
+ {FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Release|CHPE.ActiveCfg = Release|CHPE
+ {FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Release|CHPE.Build.0 = Release|CHPE
+ {FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Release|CHPE.Deploy.0 = Release|CHPE
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Release|x64.ActiveCfg = Release|x64
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Release|x64.Build.0 = Release|x64
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Release|x86.ActiveCfg = Release|Win32
@@ -778,6 +1072,10 @@ Global
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Test|ARM.Build.0 = Test|ARM
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Test|ARM64.ActiveCfg = Test|ARM64
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Test|ARM64.Build.0 = Test|ARM64
+ {FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Test|ARM64.Deploy.0 = Test|ARM64
+ {FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Test|CHPE.ActiveCfg = Test|CHPE
+ {FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Test|CHPE.Build.0 = Test|CHPE
+ {FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Test|CHPE.Deploy.0 = Test|CHPE
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Test|x64.ActiveCfg = Test|x64
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Test|x64.Build.0 = Test|x64
{FFD0FA88-7A39-407E-A92D-D3A06273E1AC}.Test|x86.ActiveCfg = Test|Win32
@@ -786,6 +1084,10 @@ Global
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Debug|ARM.Build.0 = Debug|ARM
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Debug|ARM64.ActiveCfg = Debug|ARM64
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Debug|ARM64.Build.0 = Debug|ARM64
+ {0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Debug|CHPE.Build.0 = Debug|CHPE
+ {0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Debug|CHPE.Deploy.0 = Debug|CHPE
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Debug|x64.ActiveCfg = Debug|x64
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Debug|x64.Build.0 = Debug|x64
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Debug|x86.ActiveCfg = Debug|Win32
@@ -794,6 +1096,10 @@ Global
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Release|ARM.Build.0 = Release|ARM
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Release|ARM64.ActiveCfg = Release|ARM64
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Release|ARM64.Build.0 = Release|ARM64
+ {0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Release|ARM64.Deploy.0 = Release|ARM64
+ {0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Release|CHPE.ActiveCfg = Release|CHPE
+ {0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Release|CHPE.Build.0 = Release|CHPE
+ {0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Release|CHPE.Deploy.0 = Release|CHPE
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Release|x64.ActiveCfg = Release|x64
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Release|x64.Build.0 = Release|x64
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Release|x86.ActiveCfg = Release|Win32
@@ -802,6 +1108,10 @@ Global
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Test|ARM.Build.0 = Test|ARM
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Test|ARM64.ActiveCfg = Test|ARM64
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Test|ARM64.Build.0 = Test|ARM64
+ {0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Test|ARM64.Deploy.0 = Test|ARM64
+ {0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Test|CHPE.ActiveCfg = Test|CHPE
+ {0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Test|CHPE.Build.0 = Test|CHPE
+ {0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Test|CHPE.Deploy.0 = Test|CHPE
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Test|x64.ActiveCfg = Test|x64
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Test|x64.Build.0 = Test|x64
{0DB5ECBC-9385-4A65-BE2C-4EF7C65CB719}.Test|x86.ActiveCfg = Test|Win32
@@ -810,6 +1120,10 @@ Global
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Debug|ARM.Build.0 = Debug|ARM
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Debug|ARM64.ActiveCfg = Debug|ARM64
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Debug|ARM64.Build.0 = Debug|ARM64
+ {31024620-7B97-4EC7-96E8-E7B296A17DF4}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {31024620-7B97-4EC7-96E8-E7B296A17DF4}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {31024620-7B97-4EC7-96E8-E7B296A17DF4}.Debug|CHPE.Build.0 = Debug|CHPE
+ {31024620-7B97-4EC7-96E8-E7B296A17DF4}.Debug|CHPE.Deploy.0 = Debug|CHPE
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Debug|x64.ActiveCfg = Debug|x64
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Debug|x64.Build.0 = Debug|x64
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Debug|x86.ActiveCfg = Debug|Win32
@@ -818,6 +1132,10 @@ Global
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Release|ARM.Build.0 = Release|ARM
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Release|ARM64.ActiveCfg = Release|ARM64
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Release|ARM64.Build.0 = Release|ARM64
+ {31024620-7B97-4EC7-96E8-E7B296A17DF4}.Release|ARM64.Deploy.0 = Release|ARM64
+ {31024620-7B97-4EC7-96E8-E7B296A17DF4}.Release|CHPE.ActiveCfg = Release|CHPE
+ {31024620-7B97-4EC7-96E8-E7B296A17DF4}.Release|CHPE.Build.0 = Release|CHPE
+ {31024620-7B97-4EC7-96E8-E7B296A17DF4}.Release|CHPE.Deploy.0 = Release|CHPE
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Release|x64.ActiveCfg = Release|x64
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Release|x64.Build.0 = Release|x64
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Release|x86.ActiveCfg = Release|Win32
@@ -826,6 +1144,10 @@ Global
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Test|ARM.Build.0 = Test|ARM
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Test|ARM64.ActiveCfg = Test|ARM64
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Test|ARM64.Build.0 = Test|ARM64
+ {31024620-7B97-4EC7-96E8-E7B296A17DF4}.Test|ARM64.Deploy.0 = Test|ARM64
+ {31024620-7B97-4EC7-96E8-E7B296A17DF4}.Test|CHPE.ActiveCfg = Test|CHPE
+ {31024620-7B97-4EC7-96E8-E7B296A17DF4}.Test|CHPE.Build.0 = Test|CHPE
+ {31024620-7B97-4EC7-96E8-E7B296A17DF4}.Test|CHPE.Deploy.0 = Test|CHPE
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Test|x64.ActiveCfg = Test|x64
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Test|x64.Build.0 = Test|x64
{31024620-7B97-4EC7-96E8-E7B296A17DF4}.Test|x86.ActiveCfg = Test|Win32
@@ -834,6 +1156,10 @@ Global
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Debug|ARM.Build.0 = Debug|ARM
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Debug|ARM64.ActiveCfg = Debug|ARM64
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Debug|ARM64.Build.0 = Debug|ARM64
+ {EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Debug|CHPE.Build.0 = Debug|CHPE
+ {EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Debug|CHPE.Deploy.0 = Debug|CHPE
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Debug|x64.ActiveCfg = Debug|x64
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Debug|x64.Build.0 = Debug|x64
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Debug|x86.ActiveCfg = Debug|Win32
@@ -842,6 +1168,10 @@ Global
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Release|ARM.Build.0 = Release|ARM
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Release|ARM64.ActiveCfg = Release|ARM64
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Release|ARM64.Build.0 = Release|ARM64
+ {EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Release|ARM64.Deploy.0 = Release|ARM64
+ {EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Release|CHPE.ActiveCfg = Release|CHPE
+ {EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Release|CHPE.Build.0 = Release|CHPE
+ {EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Release|CHPE.Deploy.0 = Release|CHPE
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Release|x64.ActiveCfg = Release|x64
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Release|x64.Build.0 = Release|x64
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Release|x86.ActiveCfg = Release|Win32
@@ -850,6 +1180,10 @@ Global
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Test|ARM.Build.0 = Test|ARM
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Test|ARM64.ActiveCfg = Test|ARM64
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Test|ARM64.Build.0 = Test|ARM64
+ {EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Test|ARM64.Deploy.0 = Test|ARM64
+ {EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Test|CHPE.ActiveCfg = Test|CHPE
+ {EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Test|CHPE.Build.0 = Test|CHPE
+ {EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Test|CHPE.Deploy.0 = Test|CHPE
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Test|x64.ActiveCfg = Test|x64
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Test|x64.Build.0 = Test|x64
{EDEB02E2-F389-4CBF-AE7D-3041A934F86B}.Test|x86.ActiveCfg = Test|Win32
@@ -858,6 +1192,10 @@ Global
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Debug|ARM.Build.0 = Debug|ARM
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Debug|ARM64.ActiveCfg = Debug|ARM64
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Debug|ARM64.Build.0 = Debug|ARM64
+ {2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Debug|CHPE.Build.0 = Debug|CHPE
+ {2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Debug|CHPE.Deploy.0 = Debug|CHPE
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Debug|x64.ActiveCfg = Debug|x64
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Debug|x64.Build.0 = Debug|x64
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Debug|x86.ActiveCfg = Debug|Win32
@@ -866,6 +1204,10 @@ Global
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Release|ARM.Build.0 = Release|ARM
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Release|ARM64.ActiveCfg = Release|ARM64
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Release|ARM64.Build.0 = Release|ARM64
+ {2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Release|ARM64.Deploy.0 = Release|ARM64
+ {2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Release|CHPE.ActiveCfg = Release|CHPE
+ {2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Release|CHPE.Build.0 = Release|CHPE
+ {2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Release|CHPE.Deploy.0 = Release|CHPE
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Release|x64.ActiveCfg = Release|x64
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Release|x64.Build.0 = Release|x64
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Release|x86.ActiveCfg = Release|Win32
@@ -874,34 +1216,68 @@ Global
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Test|ARM.Build.0 = Test|ARM
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Test|ARM64.ActiveCfg = Test|ARM64
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Test|ARM64.Build.0 = Test|ARM64
+ {2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Test|ARM64.Deploy.0 = Test|ARM64
+ {2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Test|CHPE.ActiveCfg = Test|CHPE
+ {2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Test|CHPE.Build.0 = Test|CHPE
+ {2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Test|CHPE.Deploy.0 = Test|CHPE
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Test|x64.ActiveCfg = Test|x64
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Test|x64.Build.0 = Test|x64
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Test|x86.ActiveCfg = Test|Win32
{2F6A1847-BFAF-4B8A-9463-AC39FB46B96A}.Test|x86.Build.0 = Test|Win32
{02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Debug|ARM.ActiveCfg = Debug|Win32
{02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Debug|ARM64.ActiveCfg = Debug|ARM64
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Debug|ARM64.Build.0 = Debug|ARM64
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Debug|CHPE.Build.0 = Debug|CHPE
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Debug|CHPE.Deploy.0 = Debug|CHPE
{02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Debug|x64.ActiveCfg = Debug|x64
{02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Debug|x86.ActiveCfg = Debug|Win32
{02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Release|ARM.ActiveCfg = Release|Win32
{02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Release|ARM64.ActiveCfg = Release|ARM64
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Release|ARM64.Build.0 = Release|ARM64
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Release|ARM64.Deploy.0 = Release|ARM64
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Release|CHPE.ActiveCfg = Release|CHPE
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Release|CHPE.Build.0 = Release|CHPE
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Release|CHPE.Deploy.0 = Release|CHPE
{02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Release|x64.ActiveCfg = Release|x64
{02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Release|x86.ActiveCfg = Release|Win32
{02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Test|ARM.ActiveCfg = Release|x64
{02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Test|ARM64.ActiveCfg = Test|ARM64
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Test|ARM64.Build.0 = Test|ARM64
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Test|ARM64.Deploy.0 = Test|ARM64
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Test|CHPE.ActiveCfg = Test|CHPE
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Test|CHPE.Build.0 = Test|CHPE
+ {02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Test|CHPE.Deploy.0 = Test|CHPE
{02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Test|x64.ActiveCfg = Release|x64
{02D4FD92-AD34-40CA-85DF-4D6C7E3A1F22}.Test|x86.ActiveCfg = Release|Win32
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Debug|ARM.ActiveCfg = Debug|ARM
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Debug|ARM64.ActiveCfg = Debug|ARM64
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Debug|ARM64.Build.0 = Debug|ARM64
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Debug|CHPE.Build.0 = Debug|CHPE
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Debug|CHPE.Deploy.0 = Debug|CHPE
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Debug|x64.ActiveCfg = Debug|x64
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Debug|x64.Build.0 = Debug|x64
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Debug|x86.ActiveCfg = Debug|Win32
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Debug|x86.Build.0 = Debug|Win32
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Release|ARM.ActiveCfg = Release|ARM
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Release|ARM64.ActiveCfg = Release|ARM64
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Release|ARM64.Build.0 = Release|ARM64
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Release|ARM64.Deploy.0 = Release|ARM64
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Release|CHPE.ActiveCfg = Release|CHPE
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Release|CHPE.Build.0 = Release|CHPE
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Release|CHPE.Deploy.0 = Release|CHPE
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Release|x64.ActiveCfg = Release|x64
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Release|x86.ActiveCfg = Release|Win32
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Test|ARM.ActiveCfg = Test|ARM
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Test|ARM64.ActiveCfg = Test|ARM64
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Test|ARM64.Build.0 = Test|ARM64
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Test|ARM64.Deploy.0 = Test|ARM64
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Test|CHPE.ActiveCfg = Test|CHPE
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Test|CHPE.Build.0 = Test|CHPE
+ {F48B3491-81DF-4F49-B35F-3308CBE6A379}.Test|CHPE.Deploy.0 = Test|CHPE
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Test|x64.ActiveCfg = Test|x64
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Test|x64.Build.0 = Test|x64
{F48B3491-81DF-4F49-B35F-3308CBE6A379}.Test|x86.ActiveCfg = Test|Win32
@@ -910,6 +1286,10 @@ Global
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Debug|ARM.Build.0 = Debug|ARM
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Debug|ARM64.ActiveCfg = Debug|ARM64
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Debug|ARM64.Build.0 = Debug|ARM64
+ {EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Debug|CHPE.Build.0 = Debug|CHPE
+ {EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Debug|CHPE.Deploy.0 = Debug|CHPE
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Debug|x64.ActiveCfg = Debug|x64
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Debug|x64.Build.0 = Debug|x64
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Debug|x86.ActiveCfg = Debug|Win32
@@ -918,6 +1298,10 @@ Global
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Release|ARM.Build.0 = Release|ARM
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Release|ARM64.ActiveCfg = Release|ARM64
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Release|ARM64.Build.0 = Release|ARM64
+ {EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Release|ARM64.Deploy.0 = Release|ARM64
+ {EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Release|CHPE.ActiveCfg = Release|CHPE
+ {EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Release|CHPE.Build.0 = Release|CHPE
+ {EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Release|CHPE.Deploy.0 = Release|CHPE
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Release|x64.ActiveCfg = Release|x64
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Release|x64.Build.0 = Release|x64
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Release|x86.ActiveCfg = Release|Win32
@@ -926,6 +1310,10 @@ Global
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Test|ARM.Build.0 = Test|ARM
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Test|ARM64.ActiveCfg = Test|ARM64
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Test|ARM64.Build.0 = Test|ARM64
+ {EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Test|ARM64.Deploy.0 = Test|ARM64
+ {EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Test|CHPE.ActiveCfg = Test|CHPE
+ {EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Test|CHPE.Build.0 = Test|CHPE
+ {EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Test|CHPE.Deploy.0 = Test|CHPE
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Test|x64.ActiveCfg = Test|x64
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Test|x64.Build.0 = Test|x64
{EE2A3111-4D85-427C-B0AB-E6B0EA7FFB44}.Test|x86.ActiveCfg = Test|Win32
@@ -934,6 +1322,10 @@ Global
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Debug|ARM.Build.0 = Debug|ARM
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Debug|ARM64.ActiveCfg = Debug|ARM64
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Debug|ARM64.Build.0 = Debug|ARM64
+ {347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Debug|CHPE.Build.0 = Debug|CHPE
+ {347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Debug|CHPE.Deploy.0 = Debug|CHPE
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Debug|x64.ActiveCfg = Debug|x64
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Debug|x64.Build.0 = Debug|x64
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Debug|x86.ActiveCfg = Debug|Win32
@@ -942,6 +1334,10 @@ Global
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Release|ARM.Build.0 = Release|ARM
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Release|ARM64.ActiveCfg = Release|ARM64
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Release|ARM64.Build.0 = Release|ARM64
+ {347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Release|ARM64.Deploy.0 = Release|ARM64
+ {347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Release|CHPE.ActiveCfg = Release|CHPE
+ {347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Release|CHPE.Build.0 = Release|CHPE
+ {347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Release|CHPE.Deploy.0 = Release|CHPE
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Release|x64.ActiveCfg = Release|x64
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Release|x64.Build.0 = Release|x64
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Release|x86.ActiveCfg = Release|Win32
@@ -950,6 +1346,10 @@ Global
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Test|ARM.Build.0 = Test|ARM
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Test|ARM64.ActiveCfg = Test|ARM64
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Test|ARM64.Build.0 = Test|ARM64
+ {347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Test|ARM64.Deploy.0 = Test|ARM64
+ {347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Test|CHPE.ActiveCfg = Test|CHPE
+ {347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Test|CHPE.Build.0 = Test|CHPE
+ {347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Test|CHPE.Deploy.0 = Test|CHPE
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Test|x64.ActiveCfg = Test|x64
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Test|x64.Build.0 = Test|x64
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0}.Test|x86.ActiveCfg = Test|Win32
@@ -958,6 +1358,10 @@ Global
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Debug|ARM.Build.0 = Debug|ARM
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Debug|ARM64.ActiveCfg = Debug|ARM64
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Debug|ARM64.Build.0 = Debug|ARM64
+ {0494C753-5BB9-45AA-874E-E61B9922E88F}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {0494C753-5BB9-45AA-874E-E61B9922E88F}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {0494C753-5BB9-45AA-874E-E61B9922E88F}.Debug|CHPE.Build.0 = Debug|CHPE
+ {0494C753-5BB9-45AA-874E-E61B9922E88F}.Debug|CHPE.Deploy.0 = Debug|CHPE
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Debug|x64.ActiveCfg = Debug|x64
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Debug|x64.Build.0 = Debug|x64
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Debug|x86.ActiveCfg = Debug|Win32
@@ -966,6 +1370,10 @@ Global
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Release|ARM.Build.0 = Release|ARM
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Release|ARM64.ActiveCfg = Release|ARM64
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Release|ARM64.Build.0 = Release|ARM64
+ {0494C753-5BB9-45AA-874E-E61B9922E88F}.Release|ARM64.Deploy.0 = Release|ARM64
+ {0494C753-5BB9-45AA-874E-E61B9922E88F}.Release|CHPE.ActiveCfg = Release|CHPE
+ {0494C753-5BB9-45AA-874E-E61B9922E88F}.Release|CHPE.Build.0 = Release|CHPE
+ {0494C753-5BB9-45AA-874E-E61B9922E88F}.Release|CHPE.Deploy.0 = Release|CHPE
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Release|x64.ActiveCfg = Release|x64
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Release|x64.Build.0 = Release|x64
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Release|x86.ActiveCfg = Release|Win32
@@ -974,6 +1382,10 @@ Global
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Test|ARM.Build.0 = Test|ARM
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Test|ARM64.ActiveCfg = Test|ARM64
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Test|ARM64.Build.0 = Test|ARM64
+ {0494C753-5BB9-45AA-874E-E61B9922E88F}.Test|ARM64.Deploy.0 = Test|ARM64
+ {0494C753-5BB9-45AA-874E-E61B9922E88F}.Test|CHPE.ActiveCfg = Test|CHPE
+ {0494C753-5BB9-45AA-874E-E61B9922E88F}.Test|CHPE.Build.0 = Test|CHPE
+ {0494C753-5BB9-45AA-874E-E61B9922E88F}.Test|CHPE.Deploy.0 = Test|CHPE
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Test|x64.ActiveCfg = Test|x64
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Test|x64.Build.0 = Test|x64
{0494C753-5BB9-45AA-874E-E61B9922E88F}.Test|x86.ActiveCfg = Test|Win32
@@ -982,6 +1394,10 @@ Global
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Debug|ARM.Build.0 = Debug|ARM
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Debug|ARM64.ActiveCfg = Debug|ARM64
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Debug|ARM64.Build.0 = Debug|ARM64
+ {E14F373D-05A0-4259-A5E9-AFE8405FB847}.Debug|ARM64.Deploy.0 = Debug|ARM64
+ {E14F373D-05A0-4259-A5E9-AFE8405FB847}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {E14F373D-05A0-4259-A5E9-AFE8405FB847}.Debug|CHPE.Build.0 = Debug|CHPE
+ {E14F373D-05A0-4259-A5E9-AFE8405FB847}.Debug|CHPE.Deploy.0 = Debug|CHPE
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Debug|x64.ActiveCfg = Debug|x64
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Debug|x64.Build.0 = Debug|x64
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Debug|x86.ActiveCfg = Debug|Win32
@@ -990,6 +1406,10 @@ Global
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Release|ARM.Build.0 = Release|ARM
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Release|ARM64.ActiveCfg = Release|ARM64
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Release|ARM64.Build.0 = Release|ARM64
+ {E14F373D-05A0-4259-A5E9-AFE8405FB847}.Release|ARM64.Deploy.0 = Release|ARM64
+ {E14F373D-05A0-4259-A5E9-AFE8405FB847}.Release|CHPE.ActiveCfg = Release|CHPE
+ {E14F373D-05A0-4259-A5E9-AFE8405FB847}.Release|CHPE.Build.0 = Release|CHPE
+ {E14F373D-05A0-4259-A5E9-AFE8405FB847}.Release|CHPE.Deploy.0 = Release|CHPE
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Release|x64.ActiveCfg = Release|x64
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Release|x64.Build.0 = Release|x64
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Release|x86.ActiveCfg = Release|Win32
@@ -998,10 +1418,44 @@ Global
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Test|ARM.Build.0 = Test|ARM
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Test|ARM64.ActiveCfg = Test|ARM64
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Test|ARM64.Build.0 = Test|ARM64
+ {E14F373D-05A0-4259-A5E9-AFE8405FB847}.Test|ARM64.Deploy.0 = Test|ARM64
+ {E14F373D-05A0-4259-A5E9-AFE8405FB847}.Test|CHPE.ActiveCfg = Test|CHPE
+ {E14F373D-05A0-4259-A5E9-AFE8405FB847}.Test|CHPE.Build.0 = Test|CHPE
+ {E14F373D-05A0-4259-A5E9-AFE8405FB847}.Test|CHPE.Deploy.0 = Test|CHPE
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Test|x64.ActiveCfg = Test|x64
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Test|x64.Build.0 = Test|x64
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Test|x86.ActiveCfg = Test|Win32
{E14F373D-05A0-4259-A5E9-AFE8405FB847}.Test|x86.Build.0 = Test|Win32
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Debug|ARM.ActiveCfg = Debug|ARM
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Debug|ARM.Build.0 = Debug|ARM
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Debug|ARM64.ActiveCfg = Debug|ARM64
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Debug|ARM64.Build.0 = Debug|ARM64
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Debug|CHPE.ActiveCfg = Debug|CHPE
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Debug|CHPE.Build.0 = Debug|CHPE
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Debug|x64.ActiveCfg = Debug|x64
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Debug|x64.Build.0 = Debug|x64
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Debug|x86.ActiveCfg = Debug|Win32
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Debug|x86.Build.0 = Debug|Win32
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Release|ARM.ActiveCfg = Release|ARM
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Release|ARM.Build.0 = Release|ARM
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Release|ARM64.ActiveCfg = Release|ARM64
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Release|ARM64.Build.0 = Release|ARM64
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Release|CHPE.ActiveCfg = Release|CHPE
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Release|CHPE.Build.0 = Release|CHPE
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Release|x64.ActiveCfg = Release|x64
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Release|x64.Build.0 = Release|x64
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Release|x86.ActiveCfg = Release|Win32
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Release|x86.Build.0 = Release|Win32
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Test|ARM.ActiveCfg = Test|ARM
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Test|ARM.Build.0 = Test|ARM
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Test|ARM64.ActiveCfg = Test|ARM64
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Test|ARM64.Build.0 = Test|ARM64
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Test|CHPE.ActiveCfg = Test|CHPE
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Test|CHPE.Build.0 = Test|CHPE
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Test|x64.ActiveCfg = Test|x64
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Test|x64.Build.0 = Test|x64
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Test|x86.ActiveCfg = Test|Win32
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5}.Test|x86.Build.0 = Test|Win32
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -1049,6 +1503,7 @@ Global
{347824B1-7100-4EE6-8A6B-4FF64E66B0C0} = {6C6BC844-3D86-42B4-B3C4-7478487D2C38}
{0494C753-5BB9-45AA-874E-E61B9922E88F} = {6C6BC844-3D86-42B4-B3C4-7478487D2C38}
{E14F373D-05A0-4259-A5E9-AFE8405FB847} = {6C6BC844-3D86-42B4-B3C4-7478487D2C38}
+ {4DA3A367-6ED2-4EE8-9698-5BCD0B8AF7F5} = {D8216B93-BD6E-4293-8D98-79CEF7CF66BC}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {1F6CA1BC-6C01-4C82-8505-6A7690EBD556}
diff --git a/Build/Common.Build.Default.props b/Build/Common.Build.Default.props
index 07d1dd35f69..799e098759b 100644
--- a/Build/Common.Build.Default.props
+++ b/Build/Common.Build.Default.props
@@ -18,6 +18,7 @@
v140
v141
v142
+ v143
diff --git a/Build/NuGet/.pack-version b/Build/NuGet/.pack-version
index de1a208d561..feaae22bac7 100644
--- a/Build/NuGet/.pack-version
+++ b/Build/NuGet/.pack-version
@@ -1 +1 @@
-1.11.19
+1.13.0
diff --git a/Build/NuGet/Microsoft.ChakraCore.ARM.nuspec b/Build/NuGet/Microsoft.ChakraCore.ARM.nuspec
deleted file mode 100644
index dd3793be051..00000000000
--- a/Build/NuGet/Microsoft.ChakraCore.ARM.nuspec
+++ /dev/null
@@ -1,29 +0,0 @@
-
-
-
- Microsoft.ChakraCore.ARM
-
- $version$
- Chakra Team
- Chakra Team
- https://github.com/Microsoft/ChakraCore/blob/master/LICENSE.txt
- https://github.com/Microsoft/ChakraCore
- false
- true
- ChakraCore is the core part of the Chakra Javascript engine that powers Microsoft Edge.
- https://github.com/Microsoft/ChakraCore/wiki/Roadmap#release-notes
- Copyright (C) 2016 Microsoft
- en-US
- Chakra,ChakraCore,javascript,js,ecmascript,compiler,platform,oss,opensource,native
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/Build/NuGet/Microsoft.ChakraCore.ARM.props b/Build/NuGet/Microsoft.ChakraCore.ARM.props
deleted file mode 100644
index 710a7fa5a42..00000000000
--- a/Build/NuGet/Microsoft.ChakraCore.ARM.props
+++ /dev/null
@@ -1,10 +0,0 @@
-
-
-
-
- %(Filename)%(Extension)
- PreserveNewest
- False
-
-
-
diff --git a/Build/NuGet/Microsoft.ChakraCore.Symbols.nuspec b/Build/NuGet/Microsoft.ChakraCore.Symbols.nuspec
deleted file mode 100644
index 2e7c7c9420c..00000000000
--- a/Build/NuGet/Microsoft.ChakraCore.Symbols.nuspec
+++ /dev/null
@@ -1,31 +0,0 @@
-
-
-
- Microsoft.ChakraCore.Symbols
-
- $version$
- Chakra Team
- Chakra Team
- https://github.com/Microsoft/ChakraCore/blob/master/LICENSE.txt
- https://github.com/Microsoft/ChakraCore
- false
- true
- ChakraCore is the core part of the Chakra Javascript engine that powers Microsoft Edge.
- https://github.com/Microsoft/ChakraCore/wiki/Roadmap#release-notes
- Copyright (C) 2016 Microsoft
- en-US
- Chakra,ChakraCore,javascript,js,ecmascript,compiler,platform,oss,opensource,native
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/Build/NuGet/Microsoft.ChakraCore.Symbols.props b/Build/NuGet/Microsoft.ChakraCore.Symbols.props
deleted file mode 100644
index 4c87ee4dc7f..00000000000
--- a/Build/NuGet/Microsoft.ChakraCore.Symbols.props
+++ /dev/null
@@ -1,36 +0,0 @@
-
-
-
-
- x86\%(Filename)%(Extension)
- PreserveNewest
- False
-
-
- x64\%(Filename)%(Extension)
- PreserveNewest
- False
-
-
-
-
- %(Filename)%(Extension)
- PreserveNewest
- False
-
-
-
-
- %(Filename)%(Extension)
- PreserveNewest
- False
-
-
-
-
- %(Filename)%(Extension)
- PreserveNewest
- False
-
-
-
diff --git a/Build/NuGet/Microsoft.ChakraCore.X64.nuspec b/Build/NuGet/Microsoft.ChakraCore.X64.nuspec
deleted file mode 100644
index cebcb03e7c4..00000000000
--- a/Build/NuGet/Microsoft.ChakraCore.X64.nuspec
+++ /dev/null
@@ -1,29 +0,0 @@
-
-
-
- Microsoft.ChakraCore.X64
-
- $version$
- Chakra Team
- Chakra Team
- https://github.com/Microsoft/ChakraCore/blob/master/LICENSE.txt
- https://github.com/Microsoft/ChakraCore
- false
- true
- ChakraCore is the core part of the Chakra Javascript engine that powers Microsoft Edge.
- https://github.com/Microsoft/ChakraCore/wiki/Roadmap#release-notes
- Copyright (C) 2016 Microsoft
- en-US
- Chakra,ChakraCore,javascript,js,ecmascript,compiler,platform,oss,opensource,native
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/Build/NuGet/Microsoft.ChakraCore.X64.props b/Build/NuGet/Microsoft.ChakraCore.X64.props
deleted file mode 100644
index b764d023db1..00000000000
--- a/Build/NuGet/Microsoft.ChakraCore.X64.props
+++ /dev/null
@@ -1,10 +0,0 @@
-
-
-
-
- %(Filename)%(Extension)
- PreserveNewest
- False
-
-
-
diff --git a/Build/NuGet/Microsoft.ChakraCore.X86.nuspec b/Build/NuGet/Microsoft.ChakraCore.X86.nuspec
deleted file mode 100644
index f451423ebed..00000000000
--- a/Build/NuGet/Microsoft.ChakraCore.X86.nuspec
+++ /dev/null
@@ -1,29 +0,0 @@
-
-
-
- Microsoft.ChakraCore.X86
-
- $version$
- Chakra Team
- Chakra Team
- https://github.com/Microsoft/ChakraCore/blob/master/LICENSE.txt
- https://github.com/Microsoft/ChakraCore
- false
- true
- ChakraCore is the core part of the Chakra Javascript engine that powers Microsoft Edge.
- https://github.com/Microsoft/ChakraCore/wiki/Roadmap#release-notes
- Copyright (C) 2016 Microsoft
- en-US
- Chakra,ChakraCore,javascript,js,ecmascript,compiler,platform,oss,opensource,native
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/Build/NuGet/Microsoft.ChakraCore.X86.props b/Build/NuGet/Microsoft.ChakraCore.X86.props
deleted file mode 100644
index dad146def81..00000000000
--- a/Build/NuGet/Microsoft.ChakraCore.X86.props
+++ /dev/null
@@ -1,10 +0,0 @@
-
-
-
-
- %(Filename)%(Extension)
- PreserveNewest
- False
-
-
-
diff --git a/Build/NuGet/Microsoft.ChakraCore.nuspec b/Build/NuGet/Microsoft.ChakraCore.nuspec
deleted file mode 100644
index 27c5c2121ae..00000000000
--- a/Build/NuGet/Microsoft.ChakraCore.nuspec
+++ /dev/null
@@ -1,31 +0,0 @@
-
-
-
- Microsoft.ChakraCore
-
- $version$
- Microsoft
- Chakra Team
- https://github.com/Microsoft/ChakraCore/blob/master/LICENSE.txt
- https://github.com/Microsoft/ChakraCore
- false
- true
- ChakraCore is the core part of the Chakra Javascript engine that powers Microsoft Edge.
- https://github.com/Microsoft/ChakraCore/wiki/Roadmap#release-notes
- © Microsoft Corporation. All rights reserved.
- en-US
- Chakra,ChakraCore,javascript,js,ecmascript,compiler,platform,oss,opensource,native
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/Build/NuGet/Microsoft.ChakraCore.props b/Build/NuGet/Microsoft.ChakraCore.props
deleted file mode 100644
index 575ed9d5c6e..00000000000
--- a/Build/NuGet/Microsoft.ChakraCore.props
+++ /dev/null
@@ -1,36 +0,0 @@
-
-
-
-
- x86\%(Filename)%(Extension)
- PreserveNewest
- False
-
-
- x64\%(Filename)%(Extension)
- PreserveNewest
- False
-
-
-
-
- %(Filename)%(Extension)
- PreserveNewest
- False
-
-
-
-
- %(Filename)%(Extension)
- PreserveNewest
- False
-
-
-
-
- %(Filename)%(Extension)
- PreserveNewest
- False
-
-
-
diff --git a/Build/NuGet/Microsoft.ChakraCore.vc140.nuspec b/Build/NuGet/Microsoft.ChakraCore.vc140.nuspec
deleted file mode 100644
index 1a9961bd5cf..00000000000
--- a/Build/NuGet/Microsoft.ChakraCore.vc140.nuspec
+++ /dev/null
@@ -1,69 +0,0 @@
-
-
-
- Microsoft.ChakraCore.vc140
-
- $version$
- Microsoft
- Chakra Team
- https://github.com/Microsoft/ChakraCore/blob/master/LICENSE.txt
- https://github.com/Microsoft/ChakraCore
- false
- true
- ChakraCore is the core part of the Chakra Javascript engine that powers Microsoft Edge.
- https://github.com/Microsoft/ChakraCore/wiki/Roadmap#release-notes
- © Microsoft Corporation. All rights reserved.
- en-US
- Chakra,ChakraCore,javascript,js,ecmascript,compiler,platform,oss,opensource,native,nativepackage,C++,vc140
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/Build/NuGet/Microsoft.ChakraCore.vc140.targets b/Build/NuGet/Windows.Cpp.All/Items.targets
similarity index 76%
rename from Build/NuGet/Microsoft.ChakraCore.vc140.targets
rename to Build/NuGet/Windows.Cpp.All/Items.targets
index 35f83799d39..971cb3741bb 100644
--- a/Build/NuGet/Microsoft.ChakraCore.vc140.targets
+++ b/Build/NuGet/Windows.Cpp.All/Items.targets
@@ -2,6 +2,8 @@
+ $(MSBuildThisFileDirectory)..\..\lib\native\v140\arm64\Debug\ChakraCore.lib;%(AdditionalDependencies)
+ $(MSBuildThisFileDirectory)..\..\lib\native\v140\arm64\Release\ChakraCore.lib;%(AdditionalDependencies)
$(MSBuildThisFileDirectory)..\..\lib\native\v140\arm\Debug\ChakraCore.lib;%(AdditionalDependencies)
$(MSBuildThisFileDirectory)..\..\lib\native\v140\arm\Release\ChakraCore.lib;%(AdditionalDependencies)
$(MSBuildThisFileDirectory)..\..\lib\native\v140\x64\Debug\ChakraCore.lib;%(AdditionalDependencies)
@@ -11,8 +13,16 @@
$(MSBuildThisFileDirectory)include;%(AdditionalIncludeDirectories)
-
+
+
+
+
+
+
+
+
+
@@ -37,4 +47,4 @@
-
\ No newline at end of file
+
diff --git a/Build/NuGet/Windows.Cpp.All/Primary.nuspec b/Build/NuGet/Windows.Cpp.All/Primary.nuspec
new file mode 100644
index 00000000000..5762de4b96d
--- /dev/null
+++ b/Build/NuGet/Windows.Cpp.All/Primary.nuspec
@@ -0,0 +1,77 @@
+
+
+
+ $id$
+ $version$
+ $description$
+ $releaseNotes$
+ $tags$
+ $CommonMetadataElements$
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $CommonFileElements$
+
+
diff --git a/Build/NuGet/Windows.DotNet.All/Primary.nuspec b/Build/NuGet/Windows.DotNet.All/Primary.nuspec
new file mode 100644
index 00000000000..dae095107aa
--- /dev/null
+++ b/Build/NuGet/Windows.DotNet.All/Primary.nuspec
@@ -0,0 +1,20 @@
+
+
+
+ $id$
+ $version$
+ $description$
+ $releaseNotes$
+ $tags$
+ $CommonMetadataElements$
+
+
+
+
+
+
+
+
+ $CommonFileElements$
+
+
diff --git a/Build/NuGet/Windows.DotNet.All/Symbols.nuspec b/Build/NuGet/Windows.DotNet.All/Symbols.nuspec
new file mode 100644
index 00000000000..0effd364479
--- /dev/null
+++ b/Build/NuGet/Windows.DotNet.All/Symbols.nuspec
@@ -0,0 +1,20 @@
+
+
+
+ $id$
+ $version$
+ $description$
+ $releaseNotes$
+ $tags$
+ $CommonMetadataElements$
+
+
+
+
+
+
+
+
+ $CommonFileElements$
+
+
diff --git a/Build/NuGet/Windows.DotNet.Arch/Install.ps1.mustache b/Build/NuGet/Windows.DotNet.Arch/Install.ps1.mustache
new file mode 100644
index 00000000000..aa280a9ea7c
--- /dev/null
+++ b/Build/NuGet/Windows.DotNet.Arch/Install.ps1.mustache
@@ -0,0 +1,19 @@
+#-------------------------------------------------------------------------------------------------------
+# Copyright (C) Microsoft. All rights reserved.
+# Copyright (c) ChakraCore Project Contributors. All rights reserved.
+# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
+#-------------------------------------------------------------------------------------------------------
+
+param($installPath, $toolsPath, $package, $project)
+
+if ($project.Type -eq 'Web Site') {
+ $projectDir = $project.Properties.Item('FullPath').Value
+
+ $assemblyDestDir = Join-Path $projectDir 'bin/{{{platformArchitecture}}}'
+ if (!(Test-Path $assemblyDestDir)) {
+ New-Item -ItemType Directory -Force -Path $assemblyDestDir
+ }
+
+ $assemblySourceFiles = Join-Path $installPath 'runtimes/{{{runtimeIdentifier}}}/native/*.*'
+ Copy-Item $assemblySourceFiles $assemblyDestDir -Force
+}
diff --git a/Build/NuGet/Windows.DotNet.Arch/Items.props.mustache b/Build/NuGet/Windows.DotNet.Arch/Items.props.mustache
new file mode 100644
index 00000000000..b079edad254
--- /dev/null
+++ b/Build/NuGet/Windows.DotNet.Arch/Items.props.mustache
@@ -0,0 +1,16 @@
+
+
+
+
+ {{platformArchitecture}}\%(Filename)%(Extension)
+ PreserveNewest
+ False
+
+
+
+ %(Filename)%(Extension)
+ PreserveNewest
+ False
+
+
+
diff --git a/Build/NuGet/Windows.DotNet.Arch/Primary.nuspec b/Build/NuGet/Windows.DotNet.Arch/Primary.nuspec
new file mode 100644
index 00000000000..ae13b44e9f9
--- /dev/null
+++ b/Build/NuGet/Windows.DotNet.Arch/Primary.nuspec
@@ -0,0 +1,20 @@
+
+
+
+ $id$
+ $version$
+ $description$
+ $releaseNotes$
+ $tags$
+ $CommonMetadataElements$
+
+
+
+
+
+
+
+
+ $CommonFileElements$
+
+
diff --git a/Build/NuGet/Windows.DotNet.Arch/Symbols.nuspec b/Build/NuGet/Windows.DotNet.Arch/Symbols.nuspec
new file mode 100644
index 00000000000..ba2bc74df57
--- /dev/null
+++ b/Build/NuGet/Windows.DotNet.Arch/Symbols.nuspec
@@ -0,0 +1,20 @@
+
+
+
+ $id$
+ $version$
+ $description$
+ $releaseNotes$
+ $tags$
+ $CommonMetadataElements$
+
+
+
+
+
+
+
+
+ $CommonFileElements$
+
+
diff --git a/Build/NuGet/Windows.DotNet.Arch/Uninstall.ps1.mustache b/Build/NuGet/Windows.DotNet.Arch/Uninstall.ps1.mustache
new file mode 100644
index 00000000000..4b4df62a908
--- /dev/null
+++ b/Build/NuGet/Windows.DotNet.Arch/Uninstall.ps1.mustache
@@ -0,0 +1,19 @@
+#-------------------------------------------------------------------------------------------------------
+# Copyright (C) Microsoft. All rights reserved.
+# Copyright (c) ChakraCore Project Contributors. All rights reserved.
+# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
+#-------------------------------------------------------------------------------------------------------
+
+param($installPath, $toolsPath, $package, $project)
+
+if ($project.Type -eq 'Web Site') {
+ $projectDir = $project.Properties.Item('FullPath').Value
+ $assemblySourceFiles = Join-Path $installPath 'runtimes/{{{runtimeIdentifier}}}/native/*.*'
+
+ foreach ($assemblySourceFileInfo in Get-Item($assemblySourceFiles)) {
+ $assemblyFile = Join-Path $projectDir "bin/{{{platformArchitecture}}}/$($assemblySourceFileInfo.Name)"
+ if (Test-Path $assemblyFile) {
+ Remove-Item $assemblyFile -Force
+ }
+ }
+}
diff --git a/Build/NuGet/icon.png b/Build/NuGet/icon.png
new file mode 100644
index 00000000000..4c5bb21243c
Binary files /dev/null and b/Build/NuGet/icon.png differ
diff --git a/Build/NuGet/package-classes.psm1 b/Build/NuGet/package-classes.psm1
new file mode 100644
index 00000000000..cb7fdd32f4a
--- /dev/null
+++ b/Build/NuGet/package-classes.psm1
@@ -0,0 +1,226 @@
+#-------------------------------------------------------------------------------------------------------
+# Copyright (C) Microsoft. All rights reserved.
+# Copyright (c) ChakraCore Project Contributors. All rights reserved.
+# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
+#-------------------------------------------------------------------------------------------------------
+
+using namespace System.Collections
+using namespace System.IO
+using namespace System.Security
+using namespace System.Text.RegularExpressions
+using namespace System.Xml
+
+Set-StrictMode -Version 5.1
+
+class PrimitiveMustacheRenderer {
+ hidden static [string]$mustacheTagPatternFormat = (
+ '(?\{{{{{0}}})\s*' +
+ '(?[\w-]+)' +
+ '\s*(?\}}{{{0}}})'
+ )
+ hidden static [regex]$mustacheTagsRegex = [regex]::new((
+ # Tag that is replaced with an unescaped value (e.g. `{{{name}}}`).
+ ([PrimitiveMustacheRenderer]::mustacheTagPatternFormat -f 3) +
+ '|' +
+ # Tag that is replaced with an escaped value (e.g. `{{name}}`).
+ ([PrimitiveMustacheRenderer]::mustacheTagPatternFormat -f 2)
+
+ ))
+
+ static [string]RenderTemplate([string]$template, [Hashtable]$data) {
+ return [PrimitiveMustacheRenderer]::mustacheTagsRegex.Replace(
+ $template,
+ {
+ param([Match]$match)
+
+ $startDelimiter = $match.Groups['startDelimiter'].Value
+ $endDelimiter = $match.Groups['endDelimiter'].Value
+ $tokenName = $match.Groups['tokenName'].Value
+ $tokenValue = [string]::Empty
+
+ if ($data.Contains($tokenName)) {
+ $tokenValue = $data[$tokenName]
+ if ($startDelimiter -eq '{{{' -and $endDelimiter -eq '}}}') {
+ # Skip escaping of the token value.
+ return $tokenValue
+ }
+
+ # Converts a token value into an XML-encoded string.
+ $tokenValue = [SecurityElement]::Escape($tokenValue)
+ }
+
+ return $tokenValue
+ }
+ )
+ }
+
+ static [bool]ContainsTag([string]$content) {
+ return $content.Contains('{{') -and $content.Contains('}}')
+ }
+}
+
+class Package {
+ [ValidateNotNullOrEmpty()][string]$Id
+ [ValidateNotNullOrEmpty()][string]$NuspecFile
+ [Hashtable]$Properties
+ [PreprocessableFile[]]$PreprocessableFiles
+
+ Package([string]$id, [string]$nuspecFile, [Hashtable]$properties,
+ [PreprocessableFile[]]$preprocessableFiles
+ ) {
+ $this.Id = $id
+ $this.NuspecFile = $nuspecFile
+ $this.Properties = $properties
+ $this.PreprocessableFiles = $preprocessableFiles
+ }
+
+ static [Package[]]GetPackages([string]$packageDataFile) {
+ $packageDataXml = [xml](Get-Content $packageDataFile -Encoding utf8)
+ $packageDataElem = $packageDataXml.DocumentElement
+ $commonPropertiesElem = $packageDataElem.commonProperties
+ $packageElems = $packageDataElem.packages.SelectNodes('child::*')
+
+ $packageDataDir = Split-Path -Parent $packageDataFile
+ $commonProperties = [Package]::GetPackageCommonProperties($commonPropertiesElem)
+ $packageCount = $packageElems.Count
+ $packages = [Package[]]::new($packageCount)
+
+ for ($packageIndex = 0; $packageIndex -lt $packageCount; $packageIndex++) {
+ $packageElem = $packageElems[$packageIndex]
+ $propertyElems = $null
+ if ($packageElem['properties']) {
+ $propertyElems = $packageElem.properties.SelectNodes('child::*')
+ }
+ $preprocessableFileElems = $null
+ if ($packageElem['preprocessableFiles']) {
+ $preprocessableFileElems = $packageElem.preprocessableFiles.SelectNodes('child::*')
+ }
+
+ $packageId = $packageElem.id
+ $packageNuspecFile = (Join-Path $packageDataDir $packageElem.nuspecFile)
+ $packageProperties = [Package]::ConvertXmlElementsToHashtable($propertyElems)
+ $packageProperties = [Package]::MergePackageProperties($commonProperties,
+ $packageProperties)
+ $packagePreprocessableFiles = [Package]::GetPreprocessableFiles($preprocessableFileElems,
+ $packageDataDir)
+
+ $packages[$packageIndex] = [Package]::new($packageId, $packageNuspecFile,
+ $packageProperties, $packagePreprocessableFiles)
+ }
+
+ return $packages
+ }
+
+ hidden static [Hashtable]GetPackageCommonProperties([XmlElement]$commonPropertiesElem) {
+ if (!$commonPropertiesElem) {
+ return @{}
+ }
+
+ $defaultPropertyElems = $null
+ if ($commonPropertiesElem['defaultProperties']) {
+ $defaultPropertyElems = $commonPropertiesElem.defaultProperties.SelectNodes('child::*')
+ }
+
+ $commonProperties = @{
+ CommonMetadataElements = $commonPropertiesElem.commonMetadataElements.InnerXml
+ CommonFileElements = $commonPropertiesElem.commonFileElements.InnerXml
+ }
+ $commonProperties += [Package]::ConvertXmlElementsToHashtable($defaultPropertyElems)
+
+ return $commonProperties
+ }
+
+ hidden static [PreprocessableFile[]]GetPreprocessableFiles(
+ [XmlNodeList]$fileElems,
+ [string]$baseDir
+ ) {
+ if (!$fileElems) {
+ return [PreprocessableFile[]]::new(0)
+ }
+
+ $fileCount = $fileElems.Count
+ $files = [PreprocessableFile[]]::new($fileCount)
+
+ for ($fileIndex = 0; $fileIndex -lt $fileCount; $fileIndex++) {
+ $fileElem = $fileElems[$fileIndex]
+ $srcFile = Join-Path $baseDir $fileElem.src
+ $targetFile = Join-Path $baseDir $fileElem.target
+
+ $files[$fileIndex] = [PreprocessableFile]::new($srcFile, $targetFile)
+ }
+
+ return $files
+ }
+
+ hidden static [Hashtable]MergePackageProperties(
+ [Hashtable]$commonProperties,
+ [Hashtable]$properties
+ ) {
+ $mergedProperties = $commonProperties.Clone()
+
+ foreach ($propertyName in $properties.Keys) {
+ $propertyValue = $properties[$propertyName]
+ if ($mergedProperties.ContainsKey($propertyName) `
+ -and [PrimitiveMustacheRenderer]::ContainsTag($propertyValue)
+ ) {
+ $basePropertyValue = $mergedProperties[$propertyName]
+ $propertyValue = [PrimitiveMustacheRenderer]::RenderTemplate($propertyValue,
+ @{ base = $basePropertyValue })
+ }
+ $mergedProperties[$propertyName] = $propertyValue
+ }
+
+ return $mergedProperties
+ }
+
+ hidden static [Hashtable]ConvertXmlElementsToHashtable([XmlNodeList]$elems) {
+ if (!$elems) {
+ return @{}
+ }
+
+ $hashtable = @{}
+
+ foreach ($elem in $elems) {
+ $hashtable.Add($elem.Name, $elem.'#text')
+ }
+
+ return $hashtable
+ }
+
+ [void]PreprocessFiles() {
+ foreach ($file in $this.PreprocessableFiles) {
+ $file.Preprocess($this.Properties)
+ }
+ }
+
+ [void]RemovePreprocessedFiles() {
+ foreach ($file in $this.PreprocessableFiles) {
+ if ($file.IsPreprocessed) {
+ Remove-Item $file.Target
+ }
+ }
+ }
+}
+
+class PreprocessableFile {
+ [ValidateNotNullOrEmpty()][string]$Src
+ [ValidateNotNullOrEmpty()][string]$Target
+ [bool]$IsPreprocessed
+
+ PreprocessableFile([string]$src, [string]$target) {
+ $this.Src = $src
+ $this.Target = $target
+ $this.IsPreprocessed = $false
+ }
+
+ [void]Preprocess([Hashtable]$properties) {
+ $content = Get-Content $this.Src -Raw -Encoding utf8
+ $preprocessedContent = [PrimitiveMustacheRenderer]::RenderTemplate($content, $properties)
+ $targetFile = [PrimitiveMustacheRenderer]::RenderTemplate($this.Target, $properties)
+
+ Set-Content $targetFile $preprocessedContent -Encoding utf8 -NoNewline
+
+ $this.Target = $targetFile
+ $this.IsPreprocessed = $true
+ }
+}
diff --git a/Build/NuGet/package-data.xml b/Build/NuGet/package-data.xml
new file mode 100644
index 00000000000..472fb191fd1
--- /dev/null
+++ b/Build/NuGet/package-data.xml
@@ -0,0 +1,150 @@
+
+
+
+
+ ChakraCore is the core part of the Chakra Javascript engine that powers Microsoft Edge.
+ https://github.com/Microsoft/ChakraCore/wiki/Roadmap#release-notes
+ Chakra,ChakraCore,javascript,js,ecmascript,compiler,platform,oss,opensource,native
+
+
+ Microsoft
+ Chakra Team
+ LICENSE.txt
+ https://github.com/Microsoft/ChakraCore
+ icon.png
+ https://raw.githubusercontent.com/chakra-core/ChakraCore/master/Build/NuGet/icon.png
+ false
+ true
+ © Microsoft Corporation. All rights reserved.
+ en-US
+
+
+
+
+
+
+
+
+
+
+
+ x86
+ win-x86
+
+
+
+
+
+
+
+
+
+ x86
+ win-x86
+
+
+
+
+
+
+
+
+
+ x64
+ win-x64
+
+
+
+
+
+
+
+
+
+ x64
+ win-x64
+
+
+
+
+
+
+
+
+
+ arm
+ win-arm
+
+
+
+
+
+
+
+
+
+ arm
+ win-arm
+
+
+
+
+
+
+
+
+
+ arm64
+ win-arm64
+
+
+
+
+
+
+
+
+
+ arm64
+ win-arm64
+
+
+
+
+
+
+
+
+
+ {{{base}}},nativepackage,C++,vc140
+
+
+
+
diff --git a/Build/NuGet/package-data.xsd b/Build/NuGet/package-data.xsd
new file mode 100644
index 00000000000..901953b396e
--- /dev/null
+++ b/Build/NuGet/package-data.xsd
@@ -0,0 +1,198 @@
+
+
+
+
+ The root element of the package data
+
+
+
+
+
+
+ Contains a custom common properties whose values are used to replace $-delimited tokens
+ (for example, `$description$`) in the `.nuspec` files
+
+
+
+
+
+
+
+ Contains a default values for common properties of the packages.
+ The values of these properties can be overridden by properties of specific packages.
+ For naming these properties, it is recommended to use the camelCase style.
+
+
+
+
+
+
+
+
+
+
+
+ Defines a common metadata of the packages. XML content of this element is
+ substituted into a `$CommonMetadataElements$` token in the `.nuspec` files.
+ This XML content is completely static and cannot contain $-delimited tokens.
+
+
+
+
+
+
+
+
+
+
+
+ Defines a list of common files to include in the packages. XML content of this
+ element is substituted into a `$CommonFileElements$` token in the `.nuspec` files.
+ This XML content is completely static and cannot contain $-delimited tokens.
+
+
+
+
+
+
+ File or files to include in the package
+
+
+
+
+
+ The location of the file or files to include relative to the `.nuspec`
+ file
+
+
+
+
+
+
+ Relative path to the directory within the package where the files will
+ be placed
+
+
+
+
+
+
+ The file or files to exclude within the `src` location
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Defines a list of data about the packages
+
+
+
+
+
+ Data about the package on basis of which the `.nupkg` file are created
+
+
+
+
+
+
+
+ Contains a custom properties of the package that are used to replace
+ $-delimited tokens (for example, `$tags$`) in the `.nuspec` files.
+ These properties can override common properties. The property value can
+ contain a Mustache tag with special token name (`{{{base}}}`) that is replaced
+ with the value of overridable common property.
+
+
+
+
+
+
+
+
+
+
+
+ Defines a list of files that must be processed before creating a `.nupkg`
+ files
+
+
+
+
+
+
+
+ Information about the file that must be processed before creating a
+ `.nupkg` file.
+
+
+
+
+
+
+ The location of the pre-processable file relative to the XML file
+ with package data. Pre-processable file is a primitive Mustache
+ template.
+
+
+
+
+
+
+ The location of the target file relative to the XML file with
+ package data. Target file is created based on primitive Mustache
+ template and properties of the package. The path to target file can
+ contain a Mustache tags that are replaced during processing with the
+ values of corresponding package properties.
+
+
+
+
+
+
+
+
+
+
+
+ Unique identifier for the package
+
+
+
+
+
+ The location of the `.nuspec` file relative to the XML file with package data
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Build/NuGet/package.ps1 b/Build/NuGet/package.ps1
index ac46a4120a7..e17495aa5bf 100644
--- a/Build/NuGet/package.ps1
+++ b/Build/NuGet/package.ps1
@@ -1,50 +1,88 @@
#-------------------------------------------------------------------------------------------------------
# Copyright (C) Microsoft. All rights reserved.
+# Copyright (c) ChakraCore Project Contributors. All rights reserved.
# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
#-------------------------------------------------------------------------------------------------------
-$root = (split-path -parent $MyInvocation.MyCommand.Definition) + '\..'
+using namespace System.Text
-$packageRoot = "$root\NuGet"
-$packageVersionFile = "$packageRoot\.pack-version"
-$packageArtifacts = "$packageRoot\Artifacts"
-$targetNugetExe = "$packageRoot\nuget.exe"
+using module '.\package-classes.psm1'
-If (Test-Path $packageArtifacts)
-{
- # Delete any existing output.
- Remove-Item $packageArtifacts\*.nupkg
-}
+Set-StrictMode -Version 5.1
+
+$packageRoot = Split-Path -Parent $MyInvocation.MyCommand.Definition
+$packageVersionFile = Join-Path $packageRoot '.pack-version'
+$packageDataFile = Join-Path $packageRoot 'package-data.xml'
+$packageArtifactsDir = Join-Path $packageRoot 'Artifacts'
+$localNugetExe = Join-Path $packageRoot 'nuget.exe'
-If (!(Test-Path $targetNugetExe))
-{
- $sourceNugetExe = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
+# helper to download file with retry
+function DownloadFileWithRetry([string]$sourceUrl, [string]$destFile, [int]$retries) {
+ $delayTimeInSeconds = 5
- Write-Host "NuGet.exe not found - downloading latest from $sourceNugetExe"
+ while ($true) {
+ try {
+ Invoke-WebRequest $sourceUrl -OutFile $destFile
+ break
+ }
+ catch {
+ Write-Host "Failed to download $sourceUrl"
- $sourceNugetExe = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
+ if ($retries -gt 0) {
+ $retries--
- Invoke-WebRequest $sourceNugetExe -OutFile $targetNugetExe
+ Write-Host "Waiting $delayTimeInSeconds seconds before retrying. Retries left: $retries"
+ Start-Sleep -Seconds $delayTimeInSeconds
+ }
+ else {
+ $exception = $_.Exception
+ throw $exception
+ }
+ }
+ }
}
-$versionStr = (Get-Content $packageVersionFile)
+# helper to create NuGet package
+function CreateNugetPackage ([Package]$package, [string]$version, [string]$outputDir) {
+ $properties = $package.Properties.Clone()
+ $properties['id'] = $package.Id
+ $properties['version'] = $version
+
+ $sb = New-Object StringBuilder
+
+ foreach ($propertyName in $properties.Keys) {
+ $propertyValue = $properties[$propertyName]
+
+ if ($sb.Length -gt 0) {
+ [void]$sb.Append(';')
+ }
+ [void]$sb.AppendFormat('{0}={1}', $propertyName, $propertyValue.Replace('"', '""'))
+ }
-Write-Host "Setting .nuspec version tag to $versionStr"
+ $propertiesStr = $sb.toString()
+ [void]$sb.Clear()
-$compiledNuspec = "$root\nuget\compiled.nuspec"
+ $package.PreprocessFiles()
+ & $localNugetExe pack $package.NuspecFile -OutputDirectory $outputDir -Properties $propertiesStr
+ $package.RemovePreprocessedFiles()
+}
+
+if (Test-Path $packageArtifactsDir) {
+ # Delete any existing output.
+ Remove-Item "$packageArtifactsDir\*.nupkg"
+}
-# Create new packages for any nuspec files that exist in this directory.
-Foreach ($nuspec in $(Get-Item $packageRoot\*.nuspec))
-{
- $content = (Get-Content $nuspec)
- $content = $content -replace '\$version\$',$versionStr
- $content | Out-File $compiledNuspec
+if (!(Test-Path $localNugetExe)) {
+ $nugetDistUrl = 'https://dist.nuget.org/win-x86-commandline/latest/nuget.exe'
- & $targetNugetExe pack $compiledNuspec -outputdirectory $packageArtifacts
+ Write-Host "NuGet.exe not found - downloading latest from $nugetDistUrl"
+ DownloadFileWithRetry $nugetDistUrl $localNugetExe -retries 3
}
-# Delete compiled temporary nuspec.
-If (Test-Path $compiledNuspec)
-{
- Remove-Item $compiledNuspec
-}
\ No newline at end of file
+# Create new NuGet packages based on data from an XML file.
+$version = (Get-Content $packageVersionFile)
+$packages = [Package]::GetPackages($packageDataFile)
+
+foreach ($package in $packages) {
+ CreateNugetPackage $package $version $packageArtifactsDir
+}
diff --git a/Build/scripts/add_msbuild_path.cmd b/Build/scripts/add_msbuild_path.cmd
index 538aa72cb6f..4c29e25c645 100644
--- a/Build/scripts/add_msbuild_path.cmd
+++ b/Build/scripts/add_msbuild_path.cmd
@@ -1,26 +1,80 @@
::-------------------------------------------------------------------------------------------------------
:: Copyright (C) Microsoft. All rights reserved.
+:: Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
:: Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
::-------------------------------------------------------------------------------------------------------
:: add_msbuild_path.cmd
::
:: Locate msbuild.exe and add it to the PATH
+@echo off
set FORCE_MSBUILD_VERSION=%1
if "%FORCE_MSBUILD_VERSION%" == "msbuild14" (
- echo Skipping Dev15 and trying Dev14...
+ echo Skipping Dev17 and trying Dev14...
goto :LABEL_USE_MSBUILD_14
)
+if "%FORCE_MSBUILD_VERSION%" == "msbuild15" (
+ echo Skipping Dev17 and trying Dev15...
+ goto :LABEL_USE_MSBUILD_15
+)
+if "%FORCE_MSBUILD_VERSION%" == "msbuild16" (
+ echo Skipping Dev17 and trying Dev16...
+ goto :LABEL_USE_MSBUILD_16
+)
where /q msbuild.exe
if "%ERRORLEVEL%" == "0" (
goto :SkipMsBuildSetup
)
-REM Try Dev15 first
+REM Try Dev17 first, then older versions
+
+echo Trying to locate Dev17...
+
+:LABEL_USE_MSBUILD_17
+set MSBUILD_VERSION=17.0
+set "MSBUILD_PATH=%ProgramFiles(x86)%\Microsoft Visual Studio\Preview\Enterprise\MSBuild\%MSBUILD_VERSION%\Bin"
+
+if not exist "%MSBUILD_PATH%\msbuild.exe" (
+ set "MSBUILD_PATH=%ProgramFiles%\Microsoft Visual Studio\2022\Enterprise\MSBuild\Current\Bin\amd64"
+)
+
+if not exist "%MSBUILD_PATH%\msbuild.exe" (
+ set "MSBUILD_PATH=%ProgramFiles(x86)%\Microsoft Visual Studio\2022\Enterprise\MSBuild\Current\Bin"
+)
+
+if not exist "%MSBUILD_PATH%\msbuild.exe" (
+ set "MSBUILD_PATH=%ProgramFiles(x86)%\Microsoft Visual Studio\2022\Enterprise\MSBuild\Current\Bin\amd64"
+)
+
+if exist "%MSBUILD_PATH%\msbuild.exe" (
+ goto :MSBuildFound
+)
+
+echo Dev17 not found, trying to locate Dev16...
+
+:LABEL_USE_MSBUILD_16
+set MSBUILD_VERSION=16.0
+set "MSBUILD_PATH=%ProgramFiles(x86)%\Microsoft Visual Studio\Preview\Enterprise\MSBuild\%MSBUILD_VERSION%\Bin"
+
+if not exist "%MSBUILD_PATH%\msbuild.exe" (
+ set "MSBUILD_PATH=%ProgramFiles%\Microsoft Visual Studio\2019\Enterprise\MSBuild\Current\Bin\x86"
+)
+
+if not exist "%MSBUILD_PATH%\msbuild.exe" (
+ set "MSBUILD_PATH=%ProgramFiles(x86)%\Microsoft Visual Studio\2019\Enterprise\MSBuild\Current\Bin"
+)
+
+if not exist "%MSBUILD_PATH%\msbuild.exe" (
+ set "MSBUILD_PATH=%ProgramFiles(x86)%\Microsoft Visual Studio\2019\Enterprise\MSBuild\Current\Bin\amd64"
+)
+
+if exist "%MSBUILD_PATH%\msbuild.exe" (
+ goto :MSBuildFound
+)
-echo Trying to locate Dev15...
+echo Dev16 not found, trying to locate Dev15...
:LABEL_USE_MSBUILD_15
set MSBUILD_VERSION=15.0
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 750292f30cc..cd077075d4a 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,35 +1,59 @@
cmake_minimum_required(VERSION 3.2)
project (CHAKRACORE)
-# Keep CMake from caching static/shared library
-# option. Otherwise, CMake fails to update cached
-# references
-
-# todo: create a sub cmake file to take care of _SH uncaching...
-if(SHARED_LIBRARY_SH)
+set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "-O3 -g")
+
+# Disable expected CMake workflow
+option(CHAKRACORE_BUILD_SH "Use build.sh")
+
+if(NOT CHAKRACORE_BUILD_SH)
+ option(DISABLE_JIT "Disable JIT compilation" OFF)
+ option(INTL_ICU "Enable Intl" ON)
+ option(EMBED_ICU "Build ICU within ChakraCore build" OFF)
+ set(ICU_INCLUDE_PATH "" CACHE STRING "libicu iclude path")
+ if (NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES)
+ set(CMAKE_BUILD_TYPE "Debug" CACHE STRING "Build type" FORCE)
+ endif (NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES)
+else(NOT CHAKRACORE_BUILD_SH)
+
+ # Keep CMake from caching static/shared library
+ # option. Otherwise, CMake fails to update cached
+ # references
+
+ # todo: create a sub cmake file to take care of _SH uncaching...
+ if(SHARED_LIBRARY_SH)
unset(SHARED_LIBRARY_SH CACHE)
unset(STATIC_LIBRARY_SH CACHE)
- unset(STATIC_LIBRARY CACHE)
- set(SHARED_LIBRARY 1)
-endif()
+ unset(STATIC_LIBRARY CACHE)
+ set(SHARED_LIBRARY 1)
+ endif()
-if(STATIC_LIBRARY_SH)
- unset(SHARED_LIBRARY_SH CACHE)
- unset(STATIC_LIBRARY_SH CACHE)
- unset(SHARED_LIBRARY CACHE)
- set(STATIC_LIBRARY 1)
-endif()
+ if(STATIC_LIBRARY_SH)
+ unset(SHARED_LIBRARY_SH CACHE)
+ unset(STATIC_LIBRARY_SH CACHE)
+ unset(SHARED_LIBRARY CACHE)
+ set(STATIC_LIBRARY 1)
+ endif()
-if(LIBS_ONLY_BUILD_SH)
- unset(LIBS_ONLY_BUILD_SH CACHE)
- set(CC_LIBS_ONLY_BUILD 1)
-endif()
+ if(LIBS_ONLY_BUILD_SH)
+ unset(LIBS_ONLY_BUILD_SH CACHE)
+ set(CC_LIBS_ONLY_BUILD 1)
+ endif()
+
+ if (CLANG_SANITIZE_SH)
+ set(CLANG_SANITIZE ${CLANG_SANITIZE_SH})
+ unset(CLANG_SANITIZE_SH CACHE)
+ endif()
-if(CC_USES_SYSTEM_ARCH_SH)
+endif(NOT CHAKRACORE_BUILD_SH)
+
+if(CC_USES_SYSTEM_ARCH_SH OR NOT CHAKRACORE_BUILD_SH)
if(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64")
set(CC_TARGETS_AMD64_SH 1)
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "armv7l")
set(CC_TARGETS_ARM_SH 1)
+ elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "arm64")
+ set(CC_TARGETS_ARM64_SH 1)
endif()
unset(CC_USES_SYSTEM_ARCH_SH CACHE)
endif()
@@ -40,11 +64,16 @@ elseif(CC_TARGETS_ARM_SH)
set(CC_TARGETS_ARM 1)
add_definitions(-D_ARM_=1)
set(CMAKE_SYSTEM_PROCESSOR "armv7l")
+elseif(CC_TARGETS_ARM64_SH)
+ add_definitions(-D_ARM64_=1)
+ add_definitions(-D__arm64__=1)
+ set(CC_TARGETS_ARM64 1)
+ set(CMAKE_SYSTEM_PROCESSOr "arm64")
elseif(CC_TARGETS_X86_SH)
set(CC_TARGETS_X86 1)
set(CMAKE_SYSTEM_PROCESSOR "i386")
else()
- message(FATAL_ERROR "Couldn't detect target processor, try `--arch` argument with build.sh")
+ message(FATAL_ERROR "Unsupported target processor: ${CMAKE_SYSTEM_PROCESSOR}")
endif()
unset(CC_TARGETS_ARM_SH CACHE)
@@ -75,6 +104,7 @@ if(ICU_SETTINGS_RESET)
unset(NO_ICU_SH CACHE)
unset(LOCAL_ICU_SH CACHE)
unset(SYSTEM_ICU_SH CACHE)
+ unset(EMBED_ICU_SH CACHE)
endif()
if(CC_TARGET_OS_ANDROID_SH)
@@ -125,15 +155,57 @@ if(SYSTEM_ICU_SH)
unset(SYSTEM_ICU_SH CACHE)
endif()
-if(INTL_ICU_SH)
- unset(INTL_ICU_SH CACHE)
- set(INTL_ICU 1)
-else()
- unset(INTL_ICU_SH CACHE)
- set(INTL_ICU 0)
+if(CHAKRACORE_BUILD_SH)
+ if(INTL_ICU_SH)
+ unset(INTL_ICU_SH CACHE)
+ set(INTL_ICU 1)
+ else()
+ unset(INTL_ICU_SH CACHE)
+ set(INTL_ICU 0)
+ endif()
+endif(CHAKRACORE_BUILD_SH)
+
+if(EMBED_ICU_SH)
+ set(EMBED_ICU 1)
+ unset(EMBED_ICU_SH CACHE)
endif()
-if(ICU_INCLUDE_PATH)
+if(EMBED_ICU AND ICU_INCLUDE_PATH)
+ message(FATAL_ERROR "Embedded ICU and ICU include path cannot be set at the same time")
+endif()
+
+if(EMBED_ICU)
+ # Keep consistent with what ICU download script used to print
+ message("Note: ICU installation and use is subject to it's publisher's licensing terms")
+
+ set(ICU_PREFIX ${CMAKE_CURRENT_SOURCE_DIR}/deps/thirdparty/icu)
+ set(ICU_DOWNLOAD_DIR ${ICU_PREFIX}/download)
+ set(ICU_SOURCE_DIR ${ICU_PREFIX}/stage)
+ set(EMBEDDED_ICU_TARGET icu4c)
+ set(ICU_INCLUDE_PATH ${ICU_PREFIX}/include)
+ set(ICU_LIBRARY_PATH ${ICU_PREFIX}/lib)
+ add_definitions(-DHAS_REAL_ICU=1)
+ add_definitions(-DHAS_ICU)
+ add_definitions(-DINTL_ICU=1)
+ set(ICU_LIBRARIES
+ ${ICU_LIBRARY_PATH}/libicuuc.a
+ ${ICU_LIBRARY_PATH}/libicui18n.a
+ ${ICU_LIBRARY_PATH}/libicudata.a
+ )
+
+ include(ExternalProject)
+ ExternalProject_Add(${EMBEDDED_ICU_TARGET}
+ PREFIX ${ICU_PREFIX}
+ DOWNLOAD_DIR ${ICU_DOWNLOAD_DIR}
+ SOURCE_DIR ${ICU_SOURCE_DIR}
+ URL https://github.com/unicode-org/icu/releases/download/release-63-2/icu4c-63_2-src.tgz
+ URL_HASH SHA512=5fa9092efd8d6da6dfc8d498e4026167fda43423eaafc754d1789cf8fd4f6e76377878ebcaa32e14f314836136b764873511a93bfbcc5419b758841cc6df8f32
+ CONFIGURE_COMMAND ${ICU_SOURCE_DIR}/source/configure --prefix=${ICU_PREFIX} --with-data-packaging=static --enable-static --disable-shared --with-library-bits=64 --disable-icuio --disable-layout --disable-tests --disable-samples
+ BUILD_COMMAND make STATICCFLAGS="-fPIC" STATICCXXFLAGS="-fPIC" STATICCPPFLAGS="-DPIC"
+ INSTALL_COMMAND make install
+ BYPRODUCTS ${ICU_LIBRARIES}
+ )
+elseif(ICU_INCLUDE_PATH)
add_definitions(-DHAS_REAL_ICU=1)
add_definitions(-DHAS_ICU)
set(ICU_LIBRARY_PATH "${ICU_INCLUDE_PATH}/../lib/")
@@ -214,8 +286,15 @@ elseif(CC_TARGETS_ARM)
# reduce link time memory usage
set(LINKER_REDUCED_MEMORY "-Xlinker --no-keep-memory")
endif()
+elseif(CC_TARGETS_ARM64)
+ add_definitions(-D__aarch64__)
+ add_definitions(-DTARGET_64)
+ add_definitions(-D_M_ARM32_OR_ARM64)
+ if(CC_TARGET_OS_OSX)
+ add_compile_options(-arch arm64)
+ endif()
else()
- message(FATAL_ERROR "Only AMD64, ARM and I386 are supported")
+ message(FATAL_ERROR "Only AMD64, ARM, ARM64 and I386 are supported")
endif()
if(CAN_BUILD_WABT)
@@ -253,7 +332,7 @@ elseif(CC_TARGET_OS_OSX)
endif()
endif()
else()
- message(FATAL_ERROR "This OS is not supported")
+ message(FATAL_ERROR "Unsupported OS: ${CMAKE_SYSTEM_NAME}")
endif()
if (CMAKE_CXX_COMPILER_ID STREQUAL AppleClang
@@ -282,7 +361,10 @@ if(CLR_CMAKE_PLATFORM_XPLAT)
if(CC_TARGETS_AMD64)
set(IS_64BIT_BUILD 1)
add_definitions(-D_M_X64 -D_M_AMD64 -D_AMD64_)
- endif(CC_TARGETS_AMD64)
+ elseif(CC_TARGETS_ARM64)
+ set(IS_64BIT_BUILD 1)
+ add_definitions(-D_M_ARM64 -D_ARM64_)
+ endif()
add_definitions(
-DUNICODE
@@ -292,11 +374,6 @@ if(CLR_CMAKE_PLATFORM_XPLAT)
set(CMAKE_CXX_STANDARD 11)
- # CC WARNING FLAGS
- set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} \
- -Wno-implicit-function-declaration"
- )
-
# todo: fix general visibility of the interface
# do not set to `fvisibility=hidden` as it is going to
# prevent the required interface is being exported
@@ -305,9 +382,7 @@ if(CLR_CMAKE_PLATFORM_XPLAT)
# CXX WARNING FLAGS
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} \
-Wno-ignored-attributes\
- -Wno-deprecated-declarations\
-Wno-parentheses-equality\
- -Wno-missing-braces\
-Wno-reorder\
-Wno-microsoft\
-Wno-unused-value\
@@ -328,7 +403,6 @@ if(CLR_CMAKE_PLATFORM_XPLAT)
-Wno-null-conversion\
-Wno-return-type\
-Wno-switch\
- -Wno-implicit-function-declaration\
-Wno-int-to-pointer-cast\
-Wno-tautological-constant-compare\
-Wno-enum-compare-switch\
@@ -402,19 +476,12 @@ if(CMAKE_BUILD_TYPE STREQUAL Debug)
-D_DEBUG=1 # for PAL
-DDBG_DUMP=1
)
-elseif(CMAKE_BUILD_TYPE STREQUAL Test)
+elseif(CMAKE_BUILD_TYPE STREQUAL RelWithDebInfo)
add_definitions(
-DENABLE_DEBUG_CONFIG_OPTIONS=1
)
- add_compile_options(-g)
endif(CMAKE_BUILD_TYPE STREQUAL Debug)
-if(NOT CMAKE_BUILD_TYPE STREQUAL Debug)
- add_compile_options(-O3)
-else()
- add_compile_options(-O0)
-endif(NOT CMAKE_BUILD_TYPE STREQUAL Debug)
-
if(IS_64BIT_BUILD)
add_definitions(
-DBIT64=1
@@ -426,6 +493,9 @@ if(NO_JIT_SH)
unset(NO_JIT_SH CACHE) # don't cache
unset(BuildJIT CACHE) # also clear it just in case
add_definitions(-DDISABLE_JIT=1)
+elseif(DISABLE_JIT)
+ set(BuildJIT 0)
+ add_definitions(-DDISABLE_JIT=1)
else()
set(BuildJIT 1)
endif()
@@ -448,6 +518,16 @@ else()
set(DYN_LIB_EXT "so")
endif()
+if(CC_TARGETS_ARM64)
+ if(CC_TARGET_OS_LINUX)
+ message(WARNING "ARM64 linux build has not yet been tested, this build is unsupported.")
+ endif()
+ if(BuildJIT)
+ message(WARNING "ARM64 Jit not yet functional on platforms other than windows.")
+ message(WARNING "For use rather than development please build with Jit disabled --no-jit with ./build.sh or -DDISABLE_JIT=1 if using CMake directly")
+ endif()
+endif()
+
################# Write-barrier check/analyze ##################
if (WB_CHECK_SH OR WB_ANALYZE_SH)
add_definitions(
@@ -499,12 +579,11 @@ endif()
include_directories(SYSTEM /usr/local/include)
include(pal/src/configure.cmake)
-# this should be after `detect feature` to not to affect feature detection
-# Clang -fsanitize.
-if (CLANG_SANITIZE_SH)
+# Clang sanitizer support, this should be after `detect feature` to not to
+# affect feature detection
+if (CLANG_SANITIZE)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=${CLANG_SANITIZE_SH}")
set(CMAKE_CXX_LINK_FLAGS "${CMAKE_CXX_LINK_FLAGS} -fsanitize=${CLANG_SANITIZE_SH}")
- unset(CLANG_SANITIZE_SH CACHE) # don't cache
endif()
add_subdirectory (pal)
@@ -532,3 +611,5 @@ endif()
add_subdirectory (lib)
add_subdirectory (bin)
+
+add_subdirectory(test)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index a12f80b8f7f..2e89cfcbf50 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -8,17 +8,20 @@ These two blogs posts on contributing code to open source projects are a good re
## Security
-If you believe you have found a security issue in ChakraCore, please share it with us privately following the guidance at the Microsoft [Security TechCenter](https://technet.microsoft.com/en-us/security/ff852094). Reporting it via this channel helps minimize risk to projects built with ChakraCore.
+If you believe you have found a security issue in ChakraCore 1.11, please share it with Microsoft privately following the guidance at the Microsoft [Security TechCenter](https://technet.microsoft.com/en-us/security/ff852094). Reporting it via this channel helps minimize risk to projects built with ChakraCore.
+
+If you find a security issue in the Master branch of ChakraCore but not in 1.11 please join our discord server and private message one of the Core team members.
## Legal
-You will need to complete a Contributor License Agreement (CLA) before your pull request can be accepted. This agreement testifies that you are granting us permission to use the source code you are submitting, and that this work is being submitted under appropriate license that we can use it.
+You will need to complete a Contribution Agreement before your pull request can be accepted. This agreement testifies that you are granting us permission to use the source code you are submitting, and that this work is being submitted under appropriate license that we can use it.
-You can complete the CLA by going through the steps at https://cla.microsoft.com. Once we have received the signed CLA, we'll review the request. You will only need to do this once.
+You can read the agreement here: [Contribution Agreement](ContributionAgreement.md)
## Housekeeping
Your pull request should:
+
* Include a description of what your change intends to do
* Be a child commit of a reasonably recent commit in the master branch
* Pass all unit tests
@@ -28,15 +31,17 @@ Your pull request should:
* Tests should include reasonable permutations of the target fix/change
* Include baseline changes with your change
-Submissions that have met these requirements will be assigned to a ChakraCore team member for additional testing. Submissions must meet functional and performance expectations, including meeting requirements in scenarios for which the team doesn’t yet have open source tests. This means you may be asked to fix and resubmit your pull request against a new open test case if it fails one of these tests. The ChakraCore team may verify your change by crawling the web with your change built into Chakra. Failures discovered when testing with this technique will not be analyzed by the team, but we will do our best to communicate the issue discovered to you. This approach needs further refinement, we acknowledge.
+Submissions that have met these requirements will be will reviewed by a core contributor. Submissions must meet functional and performance expectations, including meeting requirements in scenarios for which the team doesn’t yet have open source tests. This means you may be asked to fix and resubmit your pull request against a new open test case if it fails one of these tests.
-ChakraCore is an organically grown codebase. The consistency of style reflects this. For the most part, the team follows these [coding conventions](https://github.com/Microsoft/ChakraCore/wiki/Coding-Convention). Contributors should also follow them when making submissions. Otherwise, follow the general coding conventions adhered to in the code surrounding your changes. Pull requests that reformat the code will not be accepted.
+ChakraCore is an organically grown codebase. The consistency of style reflects this. For the most part, the team follows these [coding conventions](https://github.com/chakra-core/ChakraCore/wiki/Coding-Convention). Contributors should also follow them when making submissions. Otherwise, follow the general coding conventions adhered to in the code surrounding your changes. Pull requests that reformat the code will not be accepted.
## Running the tests
-The unit tests can be run by following these steps:
-* Choose a build configuration to build and test, e.g. debug and x64.
-* Build `Chakra.Core.sln` for that config.
+The unit tests can be run offline with following these steps:
+
+### a) Windows
+
+* Build `Chakra.Core.sln` for the version of ChakraCore you wish to test e.g. x64 Debug.
* Specifically, running tests requires that `rl.exe`, `ch.exe`, and `ChakraCore.dll` be built.
* Call `test\runtests.cmd` and specify the build config
@@ -49,20 +54,25 @@ For full coverage, please run unit tests against debug and test for both x86 and
* `test\runtests.cmd -x86test`
`runtests.cmd` can take more switches that are useful for running a subset of tests. Read the script file for more information.
-
`runtests.cmd` looks for the build output in the default build output folder `Build\VcBuild\bin`. If the build output path is changed from this default then use the `-bindir` switch to specify that path.
-## Code Flow into Microsoft Edge
+### b) Linux or macOS
+
+Build the version of ChakraCore you wish to test - either a Debug or Test (RelWithDebugInfo) build. You will need the ChakraCore library and the `ch` application built.
+
+If building with `cmake` you can then use the `make check` or `ninja check` command to run the test suite.
+Alternatively you can directly run `test/runtests.py` you'll need to specify `-t` (Test build) or `-d` (Debug build).
-Changes that make it into our ChakraCore GitHub master branch have a short journey to Chakra.dll. Code flows daily from GitHub to the internal repository from which builds of Chakra.dll are produced and then it flows into Windows and Microsoft Edge. While code flows quickly on this first leg of the journey, code flow from our internal branch to a Windows flighting branch is subject to any number of delays. So it is difficult to predict when your change in our GitHub repo will make it into a particular Windows flight.
+`runtests.py` can take more switches that are useful for running a subset of tests. Read the script file for more information.
+`runtests.py` looks for the build output in the default build output folder `out/test/ch` or `out/debug/ch`. If you've used a different path then use `--binary=path` to specify it
## Issue Labels
- - [`help wanted`](https://github.com/Microsoft/ChakraCore/labels/help%20wanted): these issues are specifically well suited for outside contributors.
- - [`good first issue`](https://github.com/Microsoft/ChakraCore/labels/good%20first%20issue): these issues are small and appropriate for people who wish to familiarize themselves with GitHub pull requests and/or ChakraCore's contributor guidelines, build process, and running tests. We're here to help you get started in open source.
+ - [`help wanted`](https://github.com/chakra-core/ChakraCore/labels/help%20wanted): these issues are specifically well suited for outside contributors.
+ - [`good first issue`](https://github.com/chakra-core/ChakraCore/labels/good%20first%20issue): these issues are small and appropriate for people who wish to familiarize themselves with GitHub pull requests and/or ChakraCore's contributor guidelines, build process, and running tests. We're here to help you get started in open source.
-You are welcome to work on issues that are not tagged with these labels. However, issues without these labels are often deeply involved with the requirements of the various components of ChakraCore. Therefore, please be sure to touch base with a maintainer via comments on the issue before attempting to solve it.
+You are welcome to work on issues that are not tagged with these labels. However, issues without these labels may be fairly complex, therefore please discuss with a core team member via comments on the issue before attempting to solve it.
-Remember, for all issues you choose to work on please communicate on the issue that you are claiming it to avoid duplicated work.
+For all issues you choose to work on please communicate on the issue that you are claiming it to avoid duplicated work.
To learn more about our GitHub labels see the [Label Glossary](https://github.com/Microsoft/ChakraCore/wiki/Label-Glossary) on our wiki page.
diff --git a/ContributionAgreement.md b/ContributionAgreement.md
new file mode 100644
index 00000000000..7d619b02ce2
--- /dev/null
+++ b/ContributionAgreement.md
@@ -0,0 +1,47 @@
+# Contributor Agreement
+
+All contributors to ChakraCore must digitally sign the below agreement unless their contribution is done on behalf of or licensed to Microsoft and covered by the MIT license in LICENSE.txt
+
+To sign the agreement please submit a commit that adds your name and github username to the bottom of this file.
+
+Anything I contribute to the ChakraCore repository is comprised of one or more of:
+
+1. My own work
+2. Existing code from the ChakraCore repository
+3. The work of a group of individuals all of whom agree to (and have signed) these terms
+4. Third party work licensed under terms that enable it to be incorporated into the ChakraCore project
+
+In the case of option 4 I will discuss this Third Party submission with the ChakraCore core contributors before submitting it.
+
+I agree that all contributions I submit will be included in ChakraCore and licensed under the license in LICENSE.txt.
+
+I surrender any rights that would prevent my contribution from being redistributed and used under the license in LICENSE.txt.
+
+I accept that future users of the ChakraCore including my contributions will not be required to give me any acknowledgement except as a "ChakraCore Project Contributor" as mentioned in LICENSE.txt.
+
+I agree that I will never pursue royalties or patent claims related to any contribution to ChakraCore.
+
+I confirm that I have the right to submit this contribution and it can be redistributed under the license in LICENSE.txt.
+
+I agree that a record of my contribution including comments and my name may be retained and displayed publicly in the ChakraCore repository.
+
+If I make any contributions in the course of my employment, then either:
+a) my employer has given my sufficient rights over the work I have done to submit it under the above terms or
+b) my employer has agreed to these terms and signed the agreement
+
+This agreement has been signed by:
+
+| Name | Github username |
+|---|---|
+|Richard Lawrence| rhuanjl|
+|Andrey Taritsyn| Taritsyn|
+|Sasha Syrotenko| Fly-Style|
+|Petr Penzin| ppenzin|
+|Yevhen Lukomskyi|ylukomskyi|
+|Evgeniy Istomin|MadProbe|
+|Wenlu Wang| Kingwl|
+|Kevin Cadieux|kevcadieux|
+|Aidan Bickford| BickfordA|
+|Ryoichi Kaida| camcam-lemon|
+|Lukas Kurz| ShortDevelopment|
+|Paul Pluzhnikov|EmployedRussian|
diff --git a/LICENSE.txt b/LICENSE.txt
index d5ced9877c2..a69ad5ae9d4 100644
--- a/LICENSE.txt
+++ b/LICENSE.txt
@@ -2,6 +2,7 @@ The MIT License (MIT)
Copyright (c) Microsoft Corporation
All rights reserved.
+Copyright (c) ChakraCore Project Contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/README.md b/README.md
index 4bd2a370869..16a3fa19427 100644
--- a/README.md
+++ b/README.md
@@ -1,98 +1,38 @@
# ChakraCore
-[](https://gitter.im/Microsoft/ChakraCore?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
+[](https://discord.gg/dgRawPdNuC)
[](https://github.com/Microsoft/ChakraCore/blob/master/LICENSE.txt)
+[](#contribute)
-ChakraCore is the core part of Chakra, the high-performance JavaScript engine that powers Microsoft Edge and Windows applications written in HTML/CSS/JS. ChakraCore supports Just-in-time (JIT) compilation of JavaScript for x86/x64/ARM, garbage collection, and a wide range of the latest JavaScript features. ChakraCore also supports the [JavaScript Runtime (JSRT) APIs](https://github.com/Microsoft/ChakraCore/wiki/JavaScript-Runtime-%28JSRT%29-Overview), which allows you to easily embed ChakraCore in your applications.
-
-You can stay up-to-date on progress by following the [MSEdge developer blog](https://blogs.windows.com/msedgedev/).
-
-## [Build Status](https://github.com/Microsoft/ChakraCore/wiki/Build-Status)
-
-| | __Debug__ | __Test__ | __Release__ |
-|:-----------------------------:|:---------:|:--------:|:-----------:|
-| __Windows (x64)__ | [![x64debug][x64dbgicon]][x64dbglink] | [![x64test][x64testicon]][x64testlink] | [![x64release][x64relicon]][x64rellink] |
-| __Windows (x86)__ | [![x86debug][x86dbgicon]][x86dbglink] | [![x86test][x86testicon]][x86testlink] | [![x86release][x86relicon]][x86rellink] |
-| __Windows (ARM)__ | [![armdebug][armdbgicon]][armdbglink] | [![armtest][armtesticon]][armtestlink] | [![armrelease][armrelicon]][armrellink] |
-| __Ubuntu 16.04 (x64)[a]__ | [![linux_a_debug][linux_a_dbgicon]][linux_a_dbglink] | [![linux_a_test][linux_a_testicon]][linux_a_testlink] | [![linux_a_release][linux_a_relicon]][linux_a_rellink] |
-| __Ubuntu 16.04 (x64)[s]__ | [![linux_s_debug][linux_s_dbgicon]][linux_s_dbglink] | [![linux_s_test][linux_s_testicon]][linux_s_testlink] | [![linux_s_release][linux_s_relicon]][linux_s_rellink] |
-| __Ubuntu 16.04 (x64)[s][n]__ | * | [![linux_sn_test][linux_sn_testicon]][linux_sn_testlink] | * |
-| __OS X 10.9 (x64)[a]__ | [![osx_a_debug][osx_a_dbgicon]][osx_a_dbglink] | [![osx_a_test][osx_a_testicon]][osx_a_testlink] | [![osx_a_release][osx_a_relicon]][osx_a_rellink] |
-| __OS X 10.9 (x64)[s][n]__ | * | [![osx_sn_test][osx_sn_testicon]][osx_sn_testlink] | * |
-
-[a] Static | [s] Shared | [n] NoJIT | * Omitted
-
-[x64dbgicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/x64_debug/badge/icon
-[x64dbglink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/x64_debug/
-[x64testicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/x64_test/badge/icon
-[x64testlink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/x64_test/
-[x64relicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/x64_release/badge/icon
-[x64rellink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/x64_release/
-
-[x86dbgicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/x86_debug/badge/icon
-[x86dbglink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/x86_debug/
-[x86testicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/x86_test/badge/icon
-[x86testlink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/x86_test/
-[x86relicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/x86_release/badge/icon
-[x86rellink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/x86_release/
-
-[armdbgicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/arm_debug/badge/icon
-[armdbglink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/arm_debug/
-[armtesticon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/arm_test/badge/icon
-[armtestlink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/arm_test/
-[armrelicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/arm_release/badge/icon
-[armrellink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/arm_release/
-
-[linux_a_dbgicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/static_ubuntu_linux_debug/badge/icon
-[linux_a_dbglink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/static_ubuntu_linux_debug/
-[linux_a_testicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/static_ubuntu_linux_test/badge/icon
-[linux_a_testlink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/static_ubuntu_linux_test/
-[linux_a_relicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/static_ubuntu_linux_release/badge/icon
-[linux_a_rellink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/static_ubuntu_linux_release/
-
-[linux_s_dbgicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/shared_ubuntu_linux_debug/badge/icon
-[linux_s_dbglink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/shared_ubuntu_linux_debug/
-[linux_s_testicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/shared_ubuntu_linux_test/badge/icon
-[linux_s_testlink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/shared_ubuntu_linux_test/
-[linux_s_relicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/shared_ubuntu_linux_release/badge/icon
-[linux_s_rellink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/shared_ubuntu_linux_release/
-
-[linux_sn_dbgicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/_no_jit_shared_ubuntu_linux_debug/badge/icon
-[linux_sn_dbglink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/_no_jit_shared_ubuntu_linux_debug/
-[linux_sn_testicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/_no_jit_shared_ubuntu_linux_test/badge/icon
-[linux_sn_testlink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/_no_jit_shared_ubuntu_linux_test/
-[linux_sn_relicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/_no_jit_shared_ubuntu_linux_release/badge/icon
-[linux_sn_rellink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/_no_jit_shared_ubuntu_linux_release/
-
-[osx_a_dbgicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/static_osx_osx_debug/badge/icon
-[osx_a_dbglink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/static_osx_osx_debug/
-[osx_a_testicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/static_osx_osx_test/badge/icon
-[osx_a_testlink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/static_osx_osx_test/
-[osx_a_relicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/static_osx_osx_release/badge/icon
-[osx_a_rellink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/static_osx_osx_release/
-
-[osx_sn_dbgicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/_no_jit_shared_osx_osx_debug/badge/icon
-[osx_sn_dbglink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/_no_jit_shared_osx_osx_debug/
-[osx_sn_testicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/_no_jit_shared_osx_osx_test/badge/icon
-[osx_sn_testlink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/_no_jit_shared_osx_osx_test/
-[osx_sn_relicon]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/_no_jit_shared_osx_osx_release/badge/icon
-[osx_sn_rellink]: https://ci.dot.net/job/Microsoft_ChakraCore/job/master/job/_no_jit_shared_osx_osx_release/
-
-Above is a table of our rolling build status. We run additional builds on a daily basis. See [Build Status](https://github.com/Microsoft/ChakraCore/wiki/Build-Status) for the status of all builds and additional details.
+ChakraCore is a JavaScript engine with a C API you can use to add support for JavaScript to any C or C compatible project. It can be compiled for x64 processors on Linux macOS and Windows. And x86 and ARM for Windows only. It is a future goal to support x86 and ARM processors on Linux and ARM on macOS.
+
+## Future of ChakraCore
+
+As you may have heard Microsoft Edge no longer uses Chakra. Microsoft will continue to provide security updates for ChakraCore 1.11 until 9th March 2021 but do not intend to support it after that.
+
+ChakraCore is planned to continue as a community project targeted primarily at embedded use cases. We hope to produce future releases with new features and enhancements to support such use cases. We also would like to invite any interested parties to be involved in this project. For further details please see the following draft planning documents:
+[Overall plan](https://github.com/chakra-core/org/blob/master/ChakraCore%20Future%20Plan.md)
+[Version 1.12 plan](https://github.com/chakra-core/org/blob/master/Release%201.12%20plan.md)
+
+Also see discussion in issue [#6384](https://github.com/microsoft/ChakraCore/issues/6384)
+
+If you'd like to contact the community team please either open an issue or join the Discord chat linked above.
## Security
-If you believe you have found a security issue in ChakraCore, please share it with us privately following the guidance at the Microsoft [Security TechCenter](https://technet.microsoft.com/en-us/security/ff852094). Reporting it via this channel helps minimize risk to projects built with ChakraCore.
+If you believe you have found a security issue in ChakraCore 1.11, please share it with Microsoft privately following the guidance at the Microsoft [Security TechCenter](https://technet.microsoft.com/en-us/security/ff852094). Reporting it via this channel helps minimize risk to projects built with ChakraCore.
+
+If you find a security issue in the Master branch of Chakracore but not in 1.11 please join our Discord server and private message one of the Core team members.
## Documentation
-* [ChakraCore Architecture](https://github.com/Microsoft/ChakraCore/wiki/Architecture-Overview)
-* [Quickstart Embedding ChakraCore](https://github.com/Microsoft/ChakraCore/wiki/Embedding-ChakraCore)
-* [JSRT Reference](https://github.com/Microsoft/ChakraCore/wiki/JavaScript-Runtime-%28JSRT%29-Reference)
+* [ChakraCore Architecture](https://github.com/chakra-core/ChakraCore/wiki/Architecture-Overview)
+* [Quickstart Embedding ChakraCore](https://github.com/chakra-core/ChakraCore/wiki/Embedding-ChakraCore)
+* [API Reference](https://github.com/chakra-core/ChakraCore/wiki/JavaScript-Runtime-%28JSRT%29-Reference)
* [Contribution guidelines](CONTRIBUTING.md)
-* [Blogs, talks and other resources](https://github.com/Microsoft/ChakraCore/wiki/Resources)
+* [Blogs, talks and other resources](https://github.com/chakra-core/ChakraCore/wiki/Resources)
-## [Building ChakraCore](https://github.com/Microsoft/ChakraCore/wiki/Building-ChakraCore)
+## Building ChakraCore
You can build ChakraCore on Windows 7 SP1 or above, and Windows Server 2008 R2 or above, with either Visual Studio 2015 or 2017 with C++ support installed. Once you have Visual Studio installed:
@@ -100,7 +40,10 @@ You can build ChakraCore on Windows 7 SP1 or above, and Windows Server 2008 R2 o
* Open `Build\Chakra.Core.sln` in Visual Studio
* Build Solution
-More details in [Building ChakraCore](https://github.com/Microsoft/ChakraCore/wiki/Building-ChakraCore).
+On macOS you can build ChakraCore with the xcode command line tools and `cmake`.
+On Linux you can build ChakraCore with `cmake` and `ninja`.
+
+More details in [Building ChakraCore](https://github.com/chakra-core/ChakraCore/wiki/Building-ChakraCore).
Alternatively, see [Getting ChakraCore binaries](https://github.com/Microsoft/ChakraCore/wiki/Getting-ChakraCore-binaries) for pre-built ChakraCore binaries.
@@ -108,38 +51,32 @@ Alternatively, see [Getting ChakraCore binaries](https://github.com/Microsoft/Ch
Once built, you have a few options for how you can use ChakraCore:
-* The most basic is to test the engine is running correctly with the *ch.exe* binary. This app is a lightweight hosting of JSRT that you can use to run small applications. After building, you can find this binary in:
- * `Build\VcBuild\bin\${platform}_${configuration}`
- * (e.g. `Build\VcBuild\bin\x64_debug`)
-* You can [embed ChakraCore](https://github.com/Microsoft/ChakraCore/wiki/Embedding-ChakraCore) in your applications - see [documentation](https://github.com/Microsoft/ChakraCore/wiki/Embedding-ChakraCore) and [samples](https://aka.ms/chakracoresamples).
-* Finally, you can also use ChakraCore as the JavaScript engine in Node. You can learn more by reading how to use [Chakra as Node's JS engine](https://github.com/Microsoft/node)
+* The most basic is to test the engine is running correctly with the application *ch.exe* (ch on linux or macOS). This app is a lightweight host of ChakraCore that you can use to run small applications. After building, you can find this binary in:
+ * Windows: `Build\VcBuild\bin\${platform}_${configuration}` (e.g. `Build\VcBuild\bin\x64_debug`)
+ * macOS/Linux: `buildFolder/config/ch` (e.g. `out/Release/ch`)
+* You can [embed ChakraCore](https://github.com/chakra-core/ChakraCore/wiki/Embedding-ChakraCore) in your applications - see [documentation](https://github.com/chakra-core/ChakraCore/wiki/Embedding-ChakraCore) and [samples](https://aka.ms/chakracoresamples).
-_A note about using ChakraCore_: ChakraCore is the foundational JavaScript engine, but it does not include the external APIs that make up the modern JavaScript development experience. For example, DOM APIs like ```document.write()``` are additional APIs that are not available by default and would need to be provided. For debugging, you may instead want to use ```print()```.
+_A note about using ChakraCore_: ChakraCore is a JavaScript engine, it does not include the external APIs that are provided by a Web Browser or Node.js. For example, DOM APIs like ```document.write()``` are additional APIs that are not provided by ChakraCore, when embedding ChakraCore in an application you will need to implement your own input and output APIs. For debugging, in `ch` you can use ```print()``` to put text to the terminal.
-## [Contribute](CONTRIBUTING.md)
+Alternatively, if you are using the [vcpkg](https://github.com/Microsoft/vcpkg/) dependency manager you can download and install ChakraCore with CMake integration in a single command:
+* vcpkg install chakracore
-Contributions to ChakraCore are welcome. Here is how you can contribute to ChakraCore:
+## Contribute
-* [Submit bugs](https://github.com/Microsoft/ChakraCore/issues) and help us verify fixes (please refer to [External Issues](https://github.com/Microsoft/ChakraCore/wiki/External-Issues) for anything external, such as Microsoft Edge or Node-ChakraCore issues)
-* [Submit pull requests](https://github.com/Microsoft/ChakraCore/pulls) for bug fixes and features and discuss existing proposals
-* Chat about [@ChakraCore](https://twitter.com/ChakraCore) on Twitter
+Contributions to ChakraCore are welcome. Here is how you can contribute to ChakraCore:
-This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
+* [Submit bugs](https://github.com/chakra-core/ChakraCore/issues) and help us verify fixes.
+* [Submit pull requests](https://github.com/chakra-core/ChakraCore/pulls) for bug fixes and features and discuss existing proposals
Please refer to [Contribution Guidelines](CONTRIBUTING.md) for more details.
-## [License](https://github.com/Microsoft/ChakraCore/blob/master/LICENSE.txt)
-
-Code licensed under the [MIT License](https://github.com/Microsoft/ChakraCore/blob/master/LICENSE.txt).
-
-## [Roadmap](https://github.com/Microsoft/ChakraCore/wiki/Roadmap)
+## License
-For details on our planned features and future direction please refer to our [Roadmap](https://github.com/Microsoft/ChakraCore/wiki/Roadmap).
+Code licensed under the [MIT License](https://github.com/chakra-core/ChakraCore/blob/master/LICENSE.txt).
## Contact Us
If you have questions about ChakraCore, or you would like to reach out to us about an issue you're having or for development advice as you work on a ChakraCore issue, you can reach us as follows:
-* Open an [issue](https://github.com/Microsoft/ChakraCore/issues/new) and prefix the issue title with [Question]. See [Question](https://github.com/Microsoft/ChakraCore/issues?q=label%3AQuestion) tag for already-opened questions.
-* Discuss ChakraCore with the team and the community on our [Gitter Channel](https://gitter.im/Microsoft/ChakraCore).
-* You can also start private messages with individual ChakraCore developers via Gitter.
+* Open an [issue](https://github.com/chakra-core/ChakraCore/issues/new) and prefix the issue title with [Question]. See [Question](https://github.com/chakra-core/ChakraCore/issues?q=label%3AQuestion) tag for already-opened questions.
+* Discuss ChakraCore with the team and the community via the Discord link above
diff --git a/RegenAllByteCode.cmd b/RegenAllByteCode.cmd
deleted file mode 100644
index 11257f22361..00000000000
--- a/RegenAllByteCode.cmd
+++ /dev/null
@@ -1,83 +0,0 @@
-::-------------------------------------------------------------------------------------------------------
-:: Copyright (C) Microsoft. All rights reserved.
-:: Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
-::-------------------------------------------------------------------------------------------------------
-
-:: Regenerate all bytecode.
-:: ch.exe is used to generate Intl bytecodes.
-:: ch.exe (NoJIT variety) is used to generate NoJIT Intl bytecodes.
-:: Each set of bytecode requires an x86_debug and x64_debug binary.
-::
-:: Thus we need to build the following:
-:: [Core] ch.exe x64_debug
-:: [Core] ch.exe x86_debug
-:: [Core] ch.exe x64_debug (NoJIT)
-:: [Core] ch.exe x86_debug (NoJIT)
-
-setlocal
-pushd %~dp0
-
-:: ch.exe x64_debug
-:: ch.exe x86_debug
-call jenkins\buildone.cmd x64 debug
-if %errorlevel% neq 0 (
- echo There was a build error for x64 debug. Stopping bytecode generation.
- exit /b 1
-)
-call jenkins\buildone.cmd x86 debug
-if %errorlevel% neq 0 (
- echo There was a build error for x86 debug. Stopping bytecode generation.
- exit /b 1
-)
-
-pushd lib\Runtime\Library\InJavascript
-call GenByteCode.cmd
-if %errorlevel% neq 0 (
- echo There was an error when regenerating bytecode header.
- exit /b 1
-)
-popd
-
-pushd lib\Runtime\Library\JsBuiltIn
-call GenByteCode.cmd
-if %errorlevel% neq 0 (
- echo There was an error when regenerating bytecode header.
- exit /b 1
-)
-popd
-
-:: ch.exe x64_debug (NoJIT)
-:: ch.exe x86_debug (NoJIT)
-call jenkins\buildone.cmd x64 debug "/p:BuildJIT=false"
-if %errorlevel% neq 0 (
- echo There was a build error for x64 debug NoJIT. Stopping bytecode generation.
- exit /b 1
-)
-
-call jenkins\buildone.cmd x86 debug "/p:BuildJIT=false"
-if %errorlevel% neq 0 (
- echo There was a build error for x86 debug NoJIT. Stopping bytecode generation.
- exit /b 1
-)
-
-:: Generate Intl NoJIT Bytecodes using ch.exe (NoJIT)
-pushd lib\Runtime\Library\InJavascript
-call GenByteCode.cmd -nojit
-if %errorlevel% neq 0 (
- echo There was an error when regenerating bytecode header for NoJIT.
- exit /b 1
-)
-popd
-
-:: Generate BuiltIn NoJIT Bytecodes using ch.exe (NoJIT)
-pushd lib\Runtime\Library\JsBuiltIn
-call GenByteCode.cmd -nojit
-if %errorlevel% neq 0 (
- echo There was an error when regenerating bytecode header for NoJIT.
- exit /b 1
-)
-popd
-
-popd
-
-endlocal
diff --git a/RegenAllByteCodeNoBuild.cmd b/RegenAllByteCodeNoBuild.cmd
deleted file mode 100644
index 9f0baeb3515..00000000000
--- a/RegenAllByteCodeNoBuild.cmd
+++ /dev/null
@@ -1,40 +0,0 @@
-::-------------------------------------------------------------------------------------------------------
-:: Copyright (C) Microsoft. All rights reserved.
-:: Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
-::-------------------------------------------------------------------------------------------------------
-
-:: WARNING: be careful when using this script as it assumes that
-:: you already have bytecode-format-compatible builds for all required flavors.
-:: This script helps speed things up when you are only making changes to scripts,
-:: e.g. Intl.js, without making any changes to bytecode format, since rebuilding
-:: every flavor of ChakraCore.dll when there are no relevant changes is a waste of time.
-:: Please ensure that you use buddy builds to validate the results.
-
-:: Regenerate all bytecode (without rebuilding each flavor of ch.exe)
-:: ch.exe is used to generate Intl bytecodes.
-:: ch.exe (NoJIT variety) is used to generate NoJIT Intl bytecodes.
-:: Each set of bytecode requires an x86_debug and x64_debug binary.
-::
-:: Thus we need to already have compatible builds of the following:
-:: [Core] ch.exe x64_debug
-:: [Core] ch.exe x86_debug
-:: [Core] ch.exe x64_debug (NoJIT)
-:: [Core] ch.exe x86_debug (NoJIT)
-
-@echo off
-setlocal
- set _reporoot=%~dp0
- pushd %_reporoot%\lib\Runtime\Library\InJavascript
- call GenByteCode.cmd
- call GenByteCode.cmd -nojit
- popd
- pushd %_reporoot%\lib\Runtime\Library\JsBuiltIn
- call GenByteCode.cmd
- call GenByteCode.cmd -nojit
- popd
-
- pushd %_reporoot%\lib\Runtime\Library\JsBuiltIn
- call GenByteCode.cmd
- call GenByteCode.cmd -nojit
- popd
-endlocal
diff --git a/THIRD-PARTY-NOTICES.txt b/THIRD-PARTY-NOTICES.txt
index 6638eb7654d..fdad9a73a7e 100644
--- a/THIRD-PARTY-NOTICES.txt
+++ b/THIRD-PARTY-NOTICES.txt
@@ -1,19 +1,21 @@
ChakraCore uses third party material from the projects listed below.
The original copyright notice and the license under which Microsoft
-received such third party material are set forth below. Microsoft
-reserves all other rights not expressly granted, whether by
+or the ChakraCore Project Contributors received such third party material
+are set forth below. Microsoft, and where relevant the ChakraCore Project
+Contributors reserve all other rights not expressly granted, whether by
implication, estoppel or otherwise.
In the event that we accidentally failed to list a required notice, please
-bring it to our attention. Post an issue or email us: chakracore@microsoft.com
+bring it to our attention. Post an issue or message us on discord.
THIRD-PARTY SOFTWARE NOTICES AND INFORMATION
Do Not Translate or Localize
ChakraCore incorporates third party material from the projects listed below.
-The original copyright notice and the license under which Microsoft received
-such third party material are set forth below. Microsoft reserves all other
-rights not expressly granted, whether by implication, estoppel or otherwise.
+The original copyright notice and the license under which Microsoft or the
+ChakraCore Project Contributors received such third party material are set
+forth below. Microsoft, and where relevant the ChakraCore Project Contributors
+reserve all other rights not expressly granted, whether by implication, estoppel or otherwise.
1. backbone.suggestions (https://github.com/qloo/backbone.suggestions)
2. BeatDetektor.js
@@ -48,6 +50,7 @@ rights not expressly granted, whether by implication, estoppel or otherwise.
31. yargs-parser.js (https://github.com/yargs/yargs-parser)
32. camelcase.js (https://github.com/sindresorhus/camelcase)
33. ARES-6 (https://github.com/WebKit/webkit/tree/master/PerformanceTests/ARES-6)
+34. PAL (from dotNet)
%% backbone.suggestions NOTICES, INFORMATION, AND LICENSE BEGIN HERE
=========================================
@@ -2041,3 +2044,30 @@ END OF ARES-6 NOTICES, INFORMATION, AND LICENSE
---------------------------------------------
+%% PAL NOTICES, INFORMATION AND LICENSE BEGIN HERE
+=========================================
+The MIT License (MIT)
+
+Copyright (c) .NET Foundation and Contributors
+
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+=========================================
+END OF PAL NOTICES, INFORMATION, AND LICENSE
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
new file mode 100644
index 00000000000..0d21565fd58
--- /dev/null
+++ b/azure-pipelines.yml
@@ -0,0 +1,210 @@
+#-------------------------------------------------------------------------------------------------------
+# Copyright (c) ChakraCore Project Contributors. All rights reserved.
+# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
+#-------------------------------------------------------------------------------------------------------
+
+trigger:
+- master
+- release/*
+
+jobs:
+ - job: Style
+ timeoutInMinutes: 10
+
+ pool:
+ vmImage: 'ubuntu-latest'
+
+ steps:
+ - script: tools/StyleChecks/check_copyright.sh
+ displayName: "Copyright Check"
+
+ - script: tools/StyleChecks/check_ascii.sh
+ displayName: "Ascii Check"
+
+ - script: tools/StyleChecks/check_eol.sh
+ displayName: "EOL Check"
+
+ - script: tools/StyleChecks/check_tabs.sh
+ displayName: "Tab Check"
+
+ - job: CMake
+ timeoutInMinutes: 120
+ strategy:
+ maxParallel: 6
+ matrix:
+ Linux.Debug:
+ image_name: 'ubuntu-22.04'
+ deps: 'sudo apt-get install -y ninja-build clang libicu-dev'
+ build_type: 'Debug'
+ libtype_flag: ''
+ Linux.NoJit:
+ image_name: 'ubuntu-22.04'
+ deps: 'sudo apt-get install -y ninja-build clang libicu-dev'
+ build_type: 'Debug'
+ libtype_flag: '-DDISABLE_JIT=ON'
+ Linux.ReleaseWithDebug:
+ image_name: 'ubuntu-22.04'
+ deps: 'sudo apt-get install -y ninja-build clang libicu-dev'
+ build_type: 'RelWithDebInfo'
+ libtype_flag: ''
+ Linux.Release:
+ image_name: 'ubuntu-22.04'
+ deps: 'sudo apt-get install -y ninja-build clang libicu-dev'
+ build_type: 'Release'
+ libtype_flag: ''
+ Ubuntu20.ReleaseWithDebug:
+ image_name: 'ubuntu-20.04'
+ deps: 'sudo apt-get install -y ninja-build clang libicu-dev'
+ build_type: 'RelWithDebInfo'
+ libtype_flag: ''
+ Ubuntu20.Release:
+ image_name: 'ubuntu-20.04'
+ deps: 'sudo apt-get install -y ninja-build clang libicu-dev'
+ build_type: 'Release'
+ libtype_flag: ''
+ OSX.DebugNoICU:
+ image_name: 'macOS-latest'
+ deps: 'brew install ninja'
+ build_type: 'Debug'
+ libtype_flag: '-DSTATIC_LIBRARY=ON'
+ OSX.ReleaseWithDebug:
+ image_name: 'macOS-latest'
+ deps: 'brew install ninja icu4c'
+ build_type: 'RelWithDebInfo'
+ libtype_flag: '-DICU_INCLUDE_PATH=/usr/local/opt/icu4c/include'
+ OSX.Release:
+ image_name: 'macOS-latest'
+ deps: 'brew install ninja icu4c'
+ build_type: 'Release'
+ libtype_flag: '-DICU_INCLUDE_PATH=/usr/local/opt/icu4c/include'
+
+ pool:
+ vmImage: $(image_name)
+
+ steps:
+ - script: $(deps)
+ displayName: 'Install dependencies'
+
+ - script: |
+ mkdir -p build
+ displayName: 'Create build directories'
+
+ - script: |
+ cd build
+ cmake -GNinja -DCMAKE_BUILD_TYPE=$BUILD_TYPE $LIBTYPE -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_COMPILER=clang ..
+ displayName: CMake
+ env:
+ BUILD_TYPE: $(build_type)
+ LIBTYPE: $(libtype_flag)
+
+ - script: |
+ cd build
+ ninja
+ displayName: 'Build'
+
+ - publish: $(System.DefaultWorkingDirectory)/build/bin
+ artifact: $(Agent.JobName)
+
+ - script: |
+ cd build
+ ninja check
+ displayName: 'Test'
+
+ - job: MSVC
+ timeoutInMinutes: 120
+ strategy:
+ maxParallel: 4
+ matrix:
+ x86.Debug:
+ image_name: 'windows-2022'
+ build_type: 'debug'
+ target: 'x86'
+ special_build: ''
+ do_test: true
+ test_tags: ''
+ build_outdir_suffix: ''
+ x86.Test:
+ image_name: 'windows-2022'
+ build_type: 'test'
+ target: 'x86'
+ special_build: ''
+ do_test: true
+ test_tags: '--include-slow'
+ build_outdir_suffix: ''
+ x86.NoJit:
+ image_name: 'windows-2022'
+ build_type: 'debug'
+ target: 'x86'
+ special_build: '"/p:BuildJIT=false"'
+ do_test: true
+ test_tags: '-disablejit'
+ build_outdir_suffix: '.NoJIT'
+ x86.Release:
+ image_name: 'windows-2022'
+ build_type: 'release'
+ target: 'x86'
+ special_build: ''
+ do_test: false
+ test_tags: ''
+ build_outdir_suffix: ''
+ x64.Debug:
+ image_name: 'windows-2022'
+ build_type: 'debug'
+ target: 'x64'
+ special_build: ''
+ do_test: true
+ test_tags: ''
+ build_outdir_suffix: ''
+ x64.Test:
+ image_name: 'windows-2022'
+ build_type: 'test'
+ target: 'x64'
+ special_build: ''
+ do_test: true
+ test_tags: '--include-slow'
+ build_outdir_suffix: ''
+ x64.Release:
+ image_name: 'windows-2022'
+ build_type: 'release'
+ target: 'x64'
+ special_build: ''
+ do_test: false
+ test_tags: ''
+ build_outdir_suffix: ''
+ win19.x86.Release:
+ image_name: 'windows-2019'
+ build_type: 'release'
+ target: 'x86'
+ special_build: ''
+ do_test: false
+ test_tags: ''
+ build_outdir_suffix: ''
+ win19.x64.Release:
+ image_name: 'windows-2019'
+ build_type: 'release'
+ target: 'x64'
+ special_build: ''
+ do_test: false
+ test_tags: ''
+ build_outdir_suffix: ''
+ pool:
+ vmImage: $(image_name)
+
+ steps:
+ - script: test\ci.buildone.cmd %TARGET% %BUILD% %SPECIAL%
+ displayName: 'Build'
+ env:
+ TARGET: $(target)
+ BUILD: $(build_type)
+ SPECIAL: $(special_build)
+
+ - publish: $(System.DefaultWorkingDirectory)/Build/VcBuild$(build_outdir_suffix)/bin
+ artifact: $(Agent.JobName)
+
+ - script: test\ci.testone.cmd %TARGET% %BUILD% %TEST_TAGS%
+ displayName: 'Test'
+ condition: eq(variables['do_test'], true)
+ env:
+ TARGET: $(target)
+ BUILD: $(build_type)
+ TEST_TAGS: ${test_tags}
diff --git a/bin/ChakraCore/ChakraCore.def b/bin/ChakraCore/ChakraCore.def
index 903eca2a38b..b8801e1ccd6 100644
--- a/bin/ChakraCore/ChakraCore.def
+++ b/bin/ChakraCore/ChakraCore.def
@@ -68,5 +68,10 @@ JsSetHostPromiseRejectionTracker
JsGetProxyProperties
JsSerializeParserState
JsRunScriptWithParserState
+JsDeserializeParserState
JsGetPromiseState
JsGetPromiseResult
+
+JsQueueBackgroundParse_Experimental
+JsDiscardBackgroundParse_Experimental
+JsExecuteBackgroundParse_Experimental
diff --git a/bin/ChakraCore/ChakraCore.vcxproj b/bin/ChakraCore/ChakraCore.vcxproj
index 915a4fd670b..6a91cff6e72 100644
--- a/bin/ChakraCore/ChakraCore.vcxproj
+++ b/bin/ChakraCore/ChakraCore.vcxproj
@@ -164,6 +164,9 @@
{abc904ad-9415-46f8-aa23-e33193f81f7c}
+
+ {4da3a367-6ed2-4ee8-9698-5bcd0b8af7f5}
+
{53D52B0B-86D9-4D31-AD09-0D6B3C063ADD}
@@ -194,4 +197,4 @@
-
+
\ No newline at end of file
diff --git a/bin/ChakraCore/ChakraCoreDllFunc.cpp b/bin/ChakraCore/ChakraCoreDllFunc.cpp
index c0b5fc8ba25..624d3421956 100644
--- a/bin/ChakraCore/ChakraCoreDllFunc.cpp
+++ b/bin/ChakraCore/ChakraCoreDllFunc.cpp
@@ -153,6 +153,8 @@ EXTERN_C BOOL WINAPI DllMain(HINSTANCE hmod, DWORD dwReason, PVOID pvReserved)
#if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
else
{
+ ThreadBoundThreadContextManager::DestroyAllContexts();
+ DetachProcess();
ThreadContext::ReportAndCheckLeaksOnProcessDetach();
}
#endif
diff --git a/bin/ChakraCore/TestHooks.cpp b/bin/ChakraCore/TestHooks.cpp
index 2416eec2be3..6e5a9a1e85f 100644
--- a/bin/ChakraCore/TestHooks.cpp
+++ b/bin/ChakraCore/TestHooks.cpp
@@ -19,6 +19,13 @@ int LogicalStringCompareImpl(const char16* p1, int p1size, const char16* p2, int
}
}
+namespace Js
+{
+ static digit_t AddDigit(digit_t a, digit_t b, digit_t * carry);
+ static digit_t SubtractDigit(digit_t a, digit_t b, digit_t * borrow);
+ static digit_t MulDigit(digit_t a, digit_t b, digit_t * high);
+}
+
#ifdef ENABLE_TEST_HOOKS
HRESULT __stdcall SetConfigFlags(__in int argc, __in_ecount(argc) LPWSTR argv[], ICustomConfigFlags* customConfigFlags)
@@ -61,16 +68,6 @@ HRESULT __stdcall SetEnableCheckMemoryLeakOutput(bool flag)
return S_OK;
}
-#if ENABLE_NATIVE_CODEGEN
-#ifdef _WIN32
-void __stdcall ConnectJITServer(HANDLE processHandle, void* serverSecurityDescriptor, UUID connectionId)
-{
- JITManager::GetJITManager()->EnableOOPJIT();
- ThreadContext::SetJITConnectionInfo(processHandle, serverSecurityDescriptor, connectionId);
-}
-#endif
-#endif
-
void __stdcall NotifyUnhandledException(PEXCEPTION_POINTERS exceptionInfo)
{
#ifdef GENERATE_DUMP
@@ -168,6 +165,11 @@ HRESULT OnChakraCoreLoaded(OnChakraCoreLoadedPtr pfChakraCoreLoaded)
SetEnableCheckMemoryLeakOutput,
PlatformAgnostic::UnicodeText::Internal::LogicalStringCompareImpl,
+ //BigInt hooks
+ Js::JavascriptBigInt::AddDigit,
+ Js::JavascriptBigInt::SubDigit,
+ Js::JavascriptBigInt::MulDigit,
+
#define FLAG(type, name, description, defaultValue, ...) FLAG_##type##(name)
#define FLAGINCLUDE(name) \
IsEnabled##name##Flag, \
@@ -191,9 +193,6 @@ HRESULT OnChakraCoreLoaded(OnChakraCoreLoadedPtr pfChakraCoreLoaded)
#undef FLAG_NumberPairSet
#undef FLAG_NumberTrioSet
#undef FLAG_NumberRange
-#if ENABLE_NATIVE_CODEGEN && _WIN32
- ConnectJITServer,
-#endif
NotifyUnhandledException
};
return pfChakraCoreLoaded(testHooks);
diff --git a/bin/ChakraCore/TestHooks.h b/bin/ChakraCore/TestHooks.h
index 02b89958ca9..1b593c887d9 100644
--- a/bin/ChakraCore/TestHooks.h
+++ b/bin/ChakraCore/TestHooks.h
@@ -5,7 +5,7 @@
#pragma once
#ifdef ENABLE_TEST_HOOKS
-
+#include
interface ICustomConfigFlags;
#if defined(_WIN32) || defined(_MSC_VER)
@@ -31,6 +31,14 @@ struct TestHooks
SetEnableCheckMemoryLeakOutputPtr pfSetEnableCheckMemoryLeakOutput;
LogicalStringCompareImpl pfLogicalCompareStringImpl;
+ // Javasscript Bigint hooks
+ typedef digit_t(TESTHOOK_CALL *AddDigit)(digit_t a, digit_t b, digit_t* carry);
+ typedef digit_t(TESTHOOK_CALL *SubDigit)(digit_t a, digit_t b, digit_t* borrow);
+ typedef digit_t(TESTHOOK_CALL *MulDigit)(digit_t a, digit_t b, digit_t* high);
+ AddDigit pfAddDigit;
+ SubDigit pfSubDigit;
+ MulDigit pfMulDigit;
+
#define FLAG(type, name, description, defaultValue, ...) FLAG_##type##(name)
#define FLAG_String(name) \
bool (TESTHOOK_CALL *pfIsEnabled##name##Flag)(); \
@@ -61,13 +69,6 @@ struct TestHooks
#undef FLAG_NumberTrioSet
#undef FLAG_NumberRange
-#if ENABLE_NATIVE_CODEGEN
-#ifdef _WIN32
- typedef void(TESTHOOK_CALL * ConnectJITServer)(HANDLE processHandle, void* serverSecurityDescriptor, UUID connectionId);
- ConnectJITServer pfnConnectJITServer;
-#endif
-#endif
-
NotifyUnhandledExceptionPtr pfnNotifyUnhandledException;
};
diff --git a/bin/GCStress/GCStress.vcxproj b/bin/GCStress/GCStress.vcxproj
index 6e8bccbd712..14e91792ec5 100644
--- a/bin/GCStress/GCStress.vcxproj
+++ b/bin/GCStress/GCStress.vcxproj
@@ -32,6 +32,7 @@
$(ChakraRuntimePlatformAgnostic);
$(ChakraCommonLinkDependencies);
Ole32.lib;
+ Rpcrt4.lib;
Advapi32.lib;
%(AdditionalDependencies)
diff --git a/bin/NativeTests/BigUIntTest.cpp b/bin/NativeTests/BigUIntTest.cpp
new file mode 100644
index 00000000000..6042dab90f8
--- /dev/null
+++ b/bin/NativeTests/BigUIntTest.cpp
@@ -0,0 +1,275 @@
+//-------------------------------------------------------------------------------------------------------
+// Copyright (C) Microsoft. All rights reserved.
+// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
+//-------------------------------------------------------------------------------------------------------
+
+#include "stdafx.h"
+#pragma warning(disable:26434) // Function definition hides non-virtual function in base class
+#pragma warning(disable:26439) // Implicit noexcept
+#pragma warning(disable:26451) // Arithmetic overflow
+#pragma warning(disable:26495) // Uninitialized member variable
+#include "catch.hpp"
+#include "BigUIntTest.h"
+
+#pragma warning(disable:4100) // unreferenced formal parameter
+#pragma warning(disable:6387) // suppressing preFAST which raises warning for passing null to the JsRT APIs
+#pragma warning(disable:6262) // CATCH is using stack variables to report errors, suppressing the preFAST warning.
+
+namespace BigUIntTest
+{
+ TEST_CASE("Init_Compare", "[BigUIntTest]")
+ {
+ uint32 digits[1];
+ int32 length = 1;
+ Js::BigUInt bi1, bi2;
+ BOOL f;
+ int result;
+
+ digits[0] = 0x00001111;
+ f = bi1.FInitFromRglu(digits, length);
+ REQUIRE(f);
+
+ SECTION("Equal number init from the same array and length")
+ {
+ f = bi2.FInitFromRglu(digits, length);
+ REQUIRE(f);
+ result = bi1.Compare(&bi2);
+ CHECK(result == 0);
+ }
+
+ SECTION("Equal number init from other big int number")
+ {
+ f = bi2.FInitFromBigint(&bi1);
+ REQUIRE(f);
+ result = bi1.Compare(&bi2);
+ CHECK(result == 0);
+ }
+
+ SECTION("Greater number")
+ {
+ digits[0] = 0x00000001;
+ f = bi2.FInitFromRglu(digits, length);
+ REQUIRE(f);
+ result = bi1.Compare(&bi2);
+ CHECK(result == 1);
+ }
+
+ SECTION("Smaller number")
+ {
+ digits[0] = 0x00000001;
+ f = bi2.FInitFromRglu(digits, length);
+ REQUIRE(f);
+ result = bi2.Compare(&bi1);
+ CHECK(result == -1);
+ }
+ }
+
+ TEST_CASE("Addition", "[BigUIntTest]")
+ {
+ uint32 digits[1], digit1s[2];
+ int32 length = 1;
+ Js::BigUInt bi1, bi2, bi3;
+ BOOL f;
+ int result;
+
+ SECTION("Check 0x33331111 + 0x33331111 = 0x66662222")
+ {
+ digits[0] = 0x33331111;
+ f = bi1.FInitFromRglu(digits, length);
+ REQUIRE(f);
+ f = bi2.FInitFromBigint(&bi1);
+ REQUIRE(f);
+ f = bi1.FAdd(&bi2);
+ REQUIRE(f);
+ digits[0] = 0x66662222;
+ f = bi3.FInitFromRglu(digits, length);
+ REQUIRE(f);
+ result = bi1.Compare(&bi3);
+ CHECK(result == 0);
+ }
+
+ SECTION("Check 0xffffffff + 0x1 = 0x100000000")
+ {
+ digits[0] = 0xffffffff;
+ f = bi1.FInitFromRglu(digits, length);
+ REQUIRE(f);
+ digits[0] = 0x00000001;
+ f = bi2.FInitFromRglu(digits, length);
+ REQUIRE(f);
+ f = bi1.FAdd(&bi2);
+ digit1s[0] = 0x0;
+ digit1s[1] = 0x1;
+ f = bi3.FInitFromRglu(digit1s, 2);
+ REQUIRE(f);
+ result = bi1.Compare(&bi3);
+ CHECK(result == 0);
+ }
+
+ SECTION("Check 0xffffffffffffffff + 0x1 = 0x10000000000000000")
+ {
+ digit1s[0] = 0xffffffff;
+ digit1s[1] = 0xffffffff;
+ f = bi1.FInitFromRglu(digit1s, 2);
+ REQUIRE(f);
+ digits[0] = 0x00000001;
+ f = bi2.FInitFromRglu(digits, 1);
+ REQUIRE(f);
+ f = bi1.FAdd(&bi2);
+ uint32 digit2s[3];
+ digit2s[0] = 0x0;
+ digit2s[1] = 0x0;
+ digit2s[2] = 0x1;
+ f = bi3.FInitFromRglu(digit2s, 3);
+ REQUIRE(f);
+ result = bi1.Compare(&bi3);
+ CHECK(result == 0);
+ }
+ }
+
+ TEST_CASE("Addition_Subtraction_Large_Number", "[BigUIntTest]")
+ {
+ const int l1 = 50, l2 = 1;
+ uint32 digit1s[l1], digit2s[l2];
+ Js::BigUInt bi1, bi2;
+ BOOL f;
+
+ SECTION("Check 0xf...0xf + 0x1 = 0x1_0x0...0x0")
+ {
+ for (int i = 0; i < l1; i++)
+ {
+ digit1s[i] = 0xffffffff;
+ }
+ f = bi1.FInitFromRglu(digit1s, l1);
+ REQUIRE(f);
+ digit2s[0] = 0x1;
+ f = bi2.FInitFromRglu(digit2s, l2);
+ REQUIRE(f);
+ f = bi1.FAdd(&bi2);
+ REQUIRE(f);
+ int32 length = bi1.Clu();
+ CHECK(length == l1 + 1);
+ uint32 digit = bi1.Lu(length - 1);
+ CHECK(digit == 1);
+ for (int i = 0; i < length - 1; i++)
+ {
+ digit = bi1.Lu(i);
+ CHECK(digit == 0);
+ }
+ }
+ }
+
+ TEST_CASE("Subtraction", "[BigUIntTest]")
+ {
+ uint32 digits[1], digit1s[2];
+ int32 length = 1;
+ Js::BigUInt bi1, bi2, bi3;
+ BOOL f;
+ int result;
+
+ SECTION("Check 0x66662222 - 0x33331111 = 0x33331111")
+ {
+ digits[0] = 0x33331111;
+ f = bi1.FInitFromRglu(digits, length);
+ REQUIRE(f);
+ f = bi2.FInitFromBigint(&bi1);
+ REQUIRE(f);
+ digits[0] = 0x66662222;
+ f = bi3.FInitFromRglu(digits, length);
+ REQUIRE(f);
+ bi3.Subtract(&bi2);
+ result = bi1.Compare(&bi3);
+ CHECK(result == 0);
+ }
+
+ SECTION("Check 0x3_0x1 - 0x1_0x0 = 0x2_0x1")
+ {
+ digit1s[0] = 0x1;
+ digit1s[1] = 0x3;
+ f = bi3.FInitFromRglu(digit1s, 2);
+ REQUIRE(f);
+ digit1s[0] = 0x0;
+ digit1s[1] = 0x1;
+ f = bi2.FInitFromRglu(digit1s, 2);
+ REQUIRE(f);
+ bi3.Subtract(&bi2);
+ int l = bi3.Clu();
+ CHECK(l == 2);
+ int digit = bi3.Lu(1);
+ CHECK(digit == 2);
+ digit = bi3.Lu(0);
+ CHECK(digit == 1);
+ }
+
+ SECTION("Check 0x2_0x0 - 0x1 = 0x1_0xfffffff")
+ {
+ digit1s[0] = 0x0;
+ digit1s[1] = 0x2;
+ f = bi3.FInitFromRglu(digit1s, 2);
+ REQUIRE(f);
+ digits[0] = 0x1;
+ f = bi2.FInitFromRglu(digits, 1);
+ REQUIRE(f);
+ bi3.Subtract(&bi2);
+ int l = bi3.Clu();
+ CHECK(l == 2);
+ int digit = bi3.Lu(1);
+ CHECK(digit == 1);
+ digit = bi3.Lu(0);
+ CHECK(digit == 0xffffffff);
+ }
+
+ SECTION("Currently 0x1_0x0 - 0x1 is overflow")
+ {
+ }
+ }
+
+ TEST_CASE("Init_From_Char_Of_Digits", "[BigUIntTest]")
+ {
+ BigUInt biDec;
+ const char *charDigit;
+ bool result;
+ int charDigitLength;
+
+ SECTION("2**32-1 should have length = 1")
+ {
+ charDigit = "4294967295";
+ charDigitLength = 10;
+ result = biDec.FInitFromDigits(charDigit, charDigitLength, &charDigitLength);
+ REQUIRE(result);
+ int length = biDec.Clu();
+ CHECK(length == 1);
+ uint32 digit = biDec.Lu(0);
+ CHECK(digit == 4294967295);
+ }
+
+ SECTION("2**32+2 should have length = 2")
+ {
+ charDigit = "4294967298";
+ charDigitLength = 10;
+ result = biDec.FInitFromDigits(charDigit, charDigitLength, &charDigitLength);
+ REQUIRE(result);
+ int length = biDec.Clu();
+ CHECK(length == 2);
+ uint32 digit = biDec.Lu(0);
+ CHECK(digit == 2);
+ digit = biDec.Lu(1);
+ CHECK(digit == 1);
+ }
+
+ SECTION("2**64 should have length = 3")
+ {
+ charDigit = "18446744073709551616";
+ charDigitLength = 20;
+ result = biDec.FInitFromDigits(charDigit, charDigitLength, &charDigitLength);
+ REQUIRE(result);
+ int length = biDec.Clu();
+ CHECK(length == 3);
+ uint32 digit = biDec.Lu(0);
+ CHECK(digit == 0);
+ digit = biDec.Lu(1);
+ CHECK(digit == 0);
+ digit = biDec.Lu(2);
+ CHECK(digit == 1);
+ }
+ }
+}
diff --git a/bin/NativeTests/BigUIntTest.h b/bin/NativeTests/BigUIntTest.h
new file mode 100644
index 00000000000..d8401f85e67
--- /dev/null
+++ b/bin/NativeTests/BigUIntTest.h
@@ -0,0 +1,49 @@
+//-------------------------------------------------------------------------------------------------------
+// Copyright (C) Microsoft. All rights reserved.
+// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
+//-------------------------------------------------------------------------------------------------------
+
+// This file contains stubs needed to make BigIntTest successfully compile and link as well
+// as a means to emulate behavior of objects that interact with BigInt class
+
+#include "..\..\lib\Common\Warnings.h"
+#include "..\..\lib\Common\Core\Api.cpp"
+#include "..\..\lib\Common\Common\NumberUtilities.cpp"
+
+namespace Js
+{
+ void Throw::FatalInternalError(long)
+ {
+ Assert(false);
+ }
+
+ bool Throw::ReportAssert(__in char const *, unsigned int, __in char const *, __in char const *)
+ {
+ return false;
+ }
+
+ void Throw::LogAssert(void) {}
+}
+
+template
+double Js::NumberUtilities::StrToDbl(const EncodedChar *, const EncodedChar **, LikelyNumberType& , bool, bool)
+{
+ Assert(false);
+ return 0.0;// don't care
+}
+
+#if defined(_M_IX86) || defined(_M_X64)
+BOOL
+AutoSystemInfo::SSE3Available() const
+{
+ Assert(false);
+ return TRUE;
+}
+
+AutoSystemInfo AutoSystemInfo::Data;
+
+void AutoSystemInfo::Initialize(void){}
+#endif
+
+#include "..\..\lib\Common\DataStructures\BigUInt.h"
+#include "..\..\lib\Common\DataStructures\BigUInt.cpp"
diff --git a/test/Math/constants.js b/bin/NativeTests/ConfigFlagsList.h
similarity index 57%
rename from test/Math/constants.js
rename to bin/NativeTests/ConfigFlagsList.h
index e6d41d203a5..185addb2493 100644
--- a/test/Math/constants.js
+++ b/bin/NativeTests/ConfigFlagsList.h
@@ -1,15 +1,7 @@
-//-------------------------------------------------------------------------------------------------------
-// Copyright (C) Microsoft. All rights reserved.
-// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
-//-------------------------------------------------------------------------------------------------------
-
-// check constants, just echo
-WScript.Echo("Math constants:")
-WScript.Echo(Math.E);
-WScript.Echo(Math.PI);
-WScript.Echo(Math.LN10);
-WScript.Echo(Math.LN2);
-WScript.Echo(Math.LOG2E);
-WScript.Echo(Math.LOG10E);
-WScript.Echo(Math.SQRT1_2);
-WScript.Echo(Math.SQRT2);
\ No newline at end of file
+//-------------------------------------------------------------------------------------------------------
+// Copyright (C) Microsoft. All rights reserved.
+// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
+//-------------------------------------------------------------------------------------------------------
+#pragma once
+
+// stub file for ConfigFlagsList.h
diff --git a/bin/NativeTests/FunctionExecutionTest.h b/bin/NativeTests/FunctionExecutionTest.h
index 94c20cab1c2..0d792cd7981 100644
--- a/bin/NativeTests/FunctionExecutionTest.h
+++ b/bin/NativeTests/FunctionExecutionTest.h
@@ -127,6 +127,8 @@ namespace Js
FunctionEntryPointInfo* GetDefaultFunctionEntryPointInfo() { return &defaultInfo; }
FunctionEntryPointInfo *GetSimpleJitEntryPointInfo() { return &simpleInfo; }
void TraceExecutionMode(const char *const eventDescription = nullptr) const { UNREFERENCED_PARAMETER(eventDescription); }
+ // Dummy implementation to match the real FunctionBody's method
+ bool SkipAutoProfileForCoroutine() const { return false; }
FunctionBody(bool interpreterProfile, bool interpreterAutoProfile, bool simpleJit):
doInterpreterProfile(interpreterProfile),
diff --git a/bin/NativeTests/JavascriptBigIntTests.cpp b/bin/NativeTests/JavascriptBigIntTests.cpp
new file mode 100644
index 00000000000..9c9f35c3b15
--- /dev/null
+++ b/bin/NativeTests/JavascriptBigIntTests.cpp
@@ -0,0 +1,84 @@
+//-------------------------------------------------------------------------------------------------------
+// Copyright (C) Microsoft. All rights reserved.
+// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
+//-------------------------------------------------------------------------------------------------------
+
+#include "stdafx.h"
+#pragma warning(disable:26434) // Function definition hides non-virtual function in base class
+#pragma warning(disable:26439) // Implicit noexcept
+#pragma warning(disable:26451) // Arithmetic overflow
+#pragma warning(disable:26495) // Uninitialized member variable
+#include "catch.hpp"
+
+#pragma warning(disable:4100) // unreferenced formal parameter
+#pragma warning(disable:6387) // suppressing preFAST which raises warning for passing null to the JsRT APIs
+#pragma warning(disable:6262) // CATCH is using stack variables to report errors, suppressing the preFAST warning.
+
+namespace JavascriptBigIntTests
+{
+ void Test_AddDigit(digit_t digit1, digit_t digit2, digit_t * carry, digit_t expectedResult, digit_t expectedCarry)
+ {
+ REQUIRE(g_testHooksLoaded);
+
+ digit_t res = g_testHooks.pfAddDigit(digit1, digit2, carry);
+
+ //test to check that the result from call to AddDigit is the expected value
+ REQUIRE(res == expectedResult);
+ REQUIRE(expectedCarry == *carry);
+ }
+
+ void Test_SubDigit(digit_t digit1, digit_t digit2, digit_t * borrow, digit_t expectedResult, digit_t expectedBorrow)
+ {
+ REQUIRE(g_testHooksLoaded);
+
+ digit_t res = g_testHooks.pfSubDigit(digit1, digit2, borrow);
+
+ //test to check that the result from call to SubtractDigit is the expected value
+ REQUIRE(res == expectedResult);
+ REQUIRE(*borrow == expectedBorrow);
+ }
+
+ void Test_MulDigit(digit_t digit1, digit_t digit2, digit_t * high, digit_t expectedResult, digit_t expectedHigh)
+ {
+ REQUIRE(g_testHooksLoaded);
+
+ digit_t res = g_testHooks.pfMulDigit(digit1, digit2, high);
+
+ //test to check that the result from call to SubtractDigit is the expected value
+ REQUIRE(res == expectedResult);
+ REQUIRE(*high == expectedHigh);
+ }
+
+ TEST_CASE("AddDigit", "[JavascriptBigIntTests]")
+ {
+ digit_t carry = 0;
+ Test_AddDigit(1, 2, &carry, 3, 0);
+
+ digit_t d1 = UINTPTR_MAX;
+ digit_t d2 = UINTPTR_MAX;
+ carry = 0;
+ Test_AddDigit(d1, d2, &carry, UINTPTR_MAX-1, 1);
+ }
+
+ TEST_CASE("SubDigit", "[JavascriptBigIntTests]")
+ {
+ digit_t borrow = 0;
+ Test_SubDigit(3, 2, &borrow, 1, 0);
+
+ digit_t d1 = 0;
+ digit_t d2 = 1;
+ borrow = 0;
+ Test_SubDigit(d1, d2, &borrow, UINTPTR_MAX, 1);
+ }
+
+ TEST_CASE("MulDigit", "[JavascriptBigIntTests]")
+ {
+ digit_t high = 0;
+ Test_MulDigit(3, 2, &high, 6, 0);
+
+ digit_t d1 = UINTPTR_MAX;
+ digit_t d2 = 2;
+ high = 0;
+ Test_MulDigit(d1, d2, &high, UINTPTR_MAX-1, 1);
+ }
+}
diff --git a/bin/NativeTests/JsRTApiTest.cpp b/bin/NativeTests/JsRTApiTest.cpp
index 8274a98f616..4d0e08dd166 100644
--- a/bin/NativeTests/JsRTApiTest.cpp
+++ b/bin/NativeTests/JsRTApiTest.cpp
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#include "stdafx.h"
@@ -252,11 +253,54 @@ namespace JsRTApiTest
JsRTApiTest::RunWithAttributes(JsRTApiTest::DeleteObjectIndexedPropertyBug);
}
+ void HasOwnItemTest(JsRuntimeAttributes attributes, JsRuntimeHandle runtime)
+ {
+ JsValueRef object;
+ REQUIRE(JsRunScript(_u("var obj = {a: [1,2], \"1\": 111}; obj.__proto__[3] = 333; obj;"), JS_SOURCE_CONTEXT_NONE, _u(""), &object) == JsNoError);
+
+ JsPropertyIdRef idRef = JS_INVALID_REFERENCE;
+ JsValueRef result = JS_INVALID_REFERENCE;
+ // delete property "a" triggers PathTypeHandler -> SimpleDictionaryTypeHandler
+ REQUIRE(JsGetPropertyIdFromName(_u("a"), &idRef) == JsNoError);
+ REQUIRE(JsGetProperty(object, idRef, &result) == JsNoError);
+ bool hasOwnItem = false;
+ REQUIRE(JsHasOwnItem(result, 0, &hasOwnItem) == JsNoError);
+ CHECK(hasOwnItem);
+
+ REQUIRE(JsHasOwnItem(result, 1, &hasOwnItem) == JsNoError);
+ CHECK(hasOwnItem);
+
+ REQUIRE(JsHasOwnItem(result, 2, &hasOwnItem) == JsNoError);
+ CHECK(!hasOwnItem); // It does not have item on index 2 - so we should not be able to find that.
+
+ REQUIRE(JsHasOwnItem(object, 1, &hasOwnItem) == JsNoError);
+ CHECK(hasOwnItem);
+
+ REQUIRE(JsHasOwnItem(object, 3, &hasOwnItem) == JsNoError);
+ CHECK(!hasOwnItem); // index 3 is on prototype.
+
+ bool has = false;
+ JsValueRef indexRef = JS_INVALID_REFERENCE;
+ REQUIRE(JsIntToNumber(3, &indexRef) == JsNoError);
+ REQUIRE(JsHasIndexedProperty(object, indexRef, &has) == JsNoError);
+ CHECK(has); // index 3 is prototype - so it should be able to find that.
+ }
+
+ TEST_CASE("ApiTest_HasOwnItemTest", "[ApiTest]")
+ {
+ JsRTApiTest::RunWithAttributes(JsRTApiTest::HasOwnItemTest);
+ }
+
void CALLBACK ExternalObjectFinalizeCallback(void *data)
{
CHECK(data == (void *)0xdeadbeef);
}
+ void CALLBACK ExternalObjectTraceCallback(void *data)
+ {
+ CHECK(data == (void *)0xdeadbeef);
+ }
+
void CrossContextSetPropertyTest(JsRuntimeAttributes attributes, JsRuntimeHandle runtime)
{
bool hasExternalData;
@@ -292,6 +336,11 @@ namespace JsRTApiTest
REQUIRE(JsSetPrototype(jsrtExternalObjectRef, mainObjectRef) == JsNoError);
REQUIRE(JsHasExternalData(jsrtExternalObjectRef, &hasExternalData) == JsNoError);
REQUIRE(hasExternalData);
+
+ JsValueRef object3 = JS_INVALID_REFERENCE;
+ JsGetterSetterInterceptor * interceptor3 = nullptr;
+ JsValueRef prototype2 = JS_INVALID_REFERENCE;
+ REQUIRE(JsCreateCustomExternalObject((void *)0xdeadbeef, 0, ExternalObjectTraceCallback, ExternalObjectFinalizeCallback, &interceptor3, prototype2, &object3) == JsNoError);
}
TEST_CASE("ApiTest_CrossContextSetPropertyTest", "[ApiTest]")
@@ -995,6 +1044,41 @@ namespace JsRTApiTest
JsRTApiTest::RunWithAttributes(JsRTApiTest::EngineFlagTest);
}
+ void CheckExceptionMetadata(JsValueRef exceptionMetadata)
+ {
+ JsPropertyIdRef property = JS_INVALID_REFERENCE;
+ JsValueRef metadataValue = JS_INVALID_REFERENCE;
+ JsValueType type;
+ REQUIRE(JsGetPropertyIdFromName(_u("exception"), &property) == JsNoError);
+ REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
+ REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
+ CHECK(type == JsError);
+
+ REQUIRE(JsGetPropertyIdFromName(_u("line"), &property) == JsNoError);
+ REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
+ REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
+ CHECK(type == JsNumber);
+
+ REQUIRE(JsGetPropertyIdFromName(_u("column"), &property) == JsNoError);
+ REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
+ REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
+ CHECK(type == JsNumber);
+
+ REQUIRE(JsGetPropertyIdFromName(_u("length"), &property) == JsNoError);
+ REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
+ REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
+ CHECK(type == JsNumber);
+
+ REQUIRE(JsGetPropertyIdFromName(_u("url"), &property) == JsNoError);
+ REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
+ REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
+ CHECK(type == JsString);
+
+ REQUIRE(JsGetPropertyIdFromName(_u("source"), &property) == JsNoError);
+ REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
+ REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
+ CHECK(type == JsString);
+ }
void ExceptionHandlingTest(JsRuntimeAttributes attributes, JsRuntimeHandle runtime)
{
bool value;
@@ -1029,31 +1113,7 @@ namespace JsRTApiTest
REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
CHECK(metadataValue == exception);
- REQUIRE(JsGetPropertyIdFromName(_u("line"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsNumber);
-
- REQUIRE(JsGetPropertyIdFromName(_u("column"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsNumber);
-
- REQUIRE(JsGetPropertyIdFromName(_u("length"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsNumber);
-
- REQUIRE(JsGetPropertyIdFromName(_u("url"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsString);
-
- REQUIRE(JsGetPropertyIdFromName(_u("source"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsString);
-
+ CheckExceptionMetadata(exceptionMetadata);
REQUIRE(JsHasException(&value) == JsNoError);
CHECK(value == false);
@@ -1069,35 +1129,18 @@ namespace JsRTApiTest
REQUIRE(JsHasException(&value) == JsNoError);
CHECK(value == false);
- REQUIRE(JsGetPropertyIdFromName(_u("exception"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsError);
-
- REQUIRE(JsGetPropertyIdFromName(_u("line"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsNumber);
-
- REQUIRE(JsGetPropertyIdFromName(_u("column"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsNumber);
+ CheckExceptionMetadata(exceptionMetadata);
- REQUIRE(JsGetPropertyIdFromName(_u("length"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsNumber);
+ // Test unicode characters
+ REQUIRE(JsRunScript(_u("function main() {\n var x = '\u20ac' + test();\n}\nmain();"), JS_SOURCE_CONTEXT_NONE, _u(""), nullptr) == JsErrorScriptException);
+ REQUIRE(JsHasException(&value) == JsNoError);
+ CHECK(value == true);
- REQUIRE(JsGetPropertyIdFromName(_u("url"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsString);
+ REQUIRE(JsGetAndClearExceptionWithMetadata(&exceptionMetadata) == JsNoError);
+ REQUIRE(JsHasException(&value) == JsNoError);
+ CHECK(value == false);
- REQUIRE(JsGetPropertyIdFromName(_u("source"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsString);
+ CheckExceptionMetadata(exceptionMetadata);
// Following requires eval to be enabled - no point in testing it if we've disabled eval
if (!(attributes & JsRuntimeAttributeDisableEval))
@@ -1110,35 +1153,7 @@ namespace JsRTApiTest
REQUIRE(JsHasException(&value) == JsNoError);
CHECK(value == false);
- REQUIRE(JsGetPropertyIdFromName(_u("exception"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsError);
-
- REQUIRE(JsGetPropertyIdFromName(_u("line"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsNumber);
-
- REQUIRE(JsGetPropertyIdFromName(_u("column"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsNumber);
-
- REQUIRE(JsGetPropertyIdFromName(_u("length"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsNumber);
-
- REQUIRE(JsGetPropertyIdFromName(_u("url"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsString);
-
- REQUIRE(JsGetPropertyIdFromName(_u("source"), &property) == JsNoError);
- REQUIRE(JsGetProperty(exceptionMetadata, property, &metadataValue) == JsNoError);
- REQUIRE(JsGetValueType(metadataValue, &type) == JsNoError);
- CHECK(type == JsString);
+ CheckExceptionMetadata(exceptionMetadata);
}
}
@@ -2177,7 +2192,7 @@ namespace JsRTApiTest
return JsNoError;
}
- static JsErrorCode CALLBACK Succes_NMRC(_In_opt_ JsModuleRecord referencingModule, _In_opt_ JsValueRef exceptionVar)
+ static JsErrorCode CALLBACK Success_NMRC(_In_opt_ JsModuleRecord referencingModule, _In_opt_ JsValueRef exceptionVar)
{
if (successTest.mainModule == referencingModule)
{
@@ -2187,6 +2202,11 @@ namespace JsRTApiTest
return JsNoError;
}
+ static JsErrorCode CALLBACK Success_IIMC(_In_opt_ JsModuleRecord referencingModule, _In_opt_ JsValueRef importMetaVar)
+ {
+ return JsNoError;
+ }
+
void ModuleSuccessTest(JsRuntimeAttributes attributes, JsRuntimeHandle runtime)
{
JsModuleRecord requestModule = JS_INVALID_REFERENCE;
@@ -2197,7 +2217,8 @@ namespace JsRTApiTest
successTest.mainModule = requestModule;
REQUIRE(JsSetModuleHostInfo(requestModule, JsModuleHostInfo_FetchImportedModuleCallback, Success_FIMC) == JsNoError);
REQUIRE(JsSetModuleHostInfo(requestModule, JsModuleHostInfo_FetchImportedModuleFromScriptCallback, Success_FIMC) == JsNoError);
- REQUIRE(JsSetModuleHostInfo(requestModule, JsModuleHostInfo_NotifyModuleReadyCallback, Succes_NMRC) == JsNoError);
+ REQUIRE(JsSetModuleHostInfo(requestModule, JsModuleHostInfo_NotifyModuleReadyCallback, Success_NMRC) == JsNoError);
+ REQUIRE(JsSetModuleHostInfo(requestModule, JsModuleHostInfo_InitializeImportMetaCallback, Success_IIMC) == JsNoError);
JsValueRef errorObject = JS_INVALID_REFERENCE;
const char* fileContent = "import {x} from 'foo.js'";
@@ -2239,7 +2260,70 @@ namespace JsRTApiTest
TEST_CASE("ApiTest_ModuleSuccessTest", "[ApiTest]")
{
JsRTApiTest::WithSetup(JsRuntimeAttributeEnableExperimentalFeatures, ModuleSuccessTest);
+ }
+
+ void JsIsCallableTest(JsRuntimeAttributes attributes, JsRuntimeHandle runtime)
+ {
+ JsValueRef callables, callable, index, nonCallables, nonCallable;
+ bool check;
+
+ REQUIRE(JsRunScript(_u("[function(){},function*(){},async function(){},async function*(){},_=>_,async _=>_]"),
+ JS_SOURCE_CONTEXT_NONE, _u(""), &callables) == JsNoError);
+
+ for (int i = 0; i < 6; i++)
+ {
+ REQUIRE(JsIntToNumber(i, &index) == JsNoError);
+ REQUIRE(JsGetIndexedProperty(callables, index, &callable) == JsNoError);
+ REQUIRE(JsIsCallable(callable, &check) == JsNoError);
+ CHECK(check);
+ }
+
+
+ REQUIRE(JsRunScript(_u("[class{},Math,Reflect,{}]"), JS_SOURCE_CONTEXT_NONE, _u(""), &nonCallables) == JsNoError);
+
+ for (int i = 0; i < 4; i++)
+ {
+ REQUIRE(JsIntToNumber(i, &index) == JsNoError);
+ REQUIRE(JsGetIndexedProperty(nonCallables, index, &nonCallable) == JsNoError);
+ REQUIRE(JsIsCallable(nonCallable, &check) == JsNoError);
+ CHECK(!check);
+ }
+ }
+
+ TEST_CASE("ApiTest_JsIsCallableTest", "[ApiTest]") {
+ JsRTApiTest::RunWithAttributes(JsIsCallableTest);
+ }
+
+ void JsIsConstructorTest(JsRuntimeAttributes attributes, JsRuntimeHandle runtime)
+ {
+ JsValueRef constructables, constructable, index, nonConstructables, nonConstructable;
+ bool check;
+
+ REQUIRE(JsRunScript(_u("[class{},function(){}]"), JS_SOURCE_CONTEXT_NONE, _u(""), &constructables) == JsNoError);
+
+ for (int i = 0; i < 2; i++)
+ {
+ REQUIRE(JsIntToNumber(i, &index) == JsNoError);
+ REQUIRE(JsGetIndexedProperty(constructables, index, &constructable) == JsNoError);
+ REQUIRE(JsIsConstructor(constructable, &check) == JsNoError);
+ CHECK(check);
+ }
+
+
+ REQUIRE(JsRunScript(_u("[Math,Reflect,{},function*(){},async function(){},async function*(){},_=>_,async _=>_]"),
+ JS_SOURCE_CONTEXT_NONE, _u(""), &nonConstructables) == JsNoError);
+
+ for (int i = 0; i < 8; i++)
+ {
+ REQUIRE(JsIntToNumber(i, &index) == JsNoError);
+ REQUIRE(JsGetIndexedProperty(nonConstructables, index, &nonConstructable) == JsNoError);
+ REQUIRE(JsIsConstructor(nonConstructable, &check) == JsNoError);
+ CHECK(!check);
+ }
+ }
+ TEST_CASE("ApiTest_JsIsConstructorTest", "[ApiTest]") {
+ JsRTApiTest::RunWithAttributes(JsIsConstructorTest);
}
void SetModuleHostInfoTest(JsRuntimeAttributes attributes, JsRuntimeHandle runtime)
@@ -2265,7 +2349,7 @@ namespace JsRTApiTest
REQUIRE(JsInitializeModuleRecord(nullptr, specifier, &requestModule) == JsNoError);
successTest.mainModule = requestModule;
- REQUIRE(JsSetModuleHostInfo(requestModule, JsModuleHostInfo_NotifyModuleReadyCallback, Succes_NMRC) == JsNoError);
+ REQUIRE(JsSetModuleHostInfo(requestModule, JsModuleHostInfo_NotifyModuleReadyCallback, Success_NMRC) == JsNoError);
// Parsing
JsValueRef errorObject1 = JS_INVALID_REFERENCE;
@@ -2313,7 +2397,7 @@ namespace JsRTApiTest
successTest.mainModule = requestModule;
REQUIRE(JsSetModuleHostInfo(requestModule, JsModuleHostInfo_FetchImportedModuleCallback, Success_FIMC1) == JsNoError);
REQUIRE(JsSetModuleHostInfo(requestModule, JsModuleHostInfo_FetchImportedModuleFromScriptCallback, Success_FIMC1) == JsNoError);
- REQUIRE(JsSetModuleHostInfo(requestModule, JsModuleHostInfo_NotifyModuleReadyCallback, Succes_NMRC) == JsNoError);
+ REQUIRE(JsSetModuleHostInfo(requestModule, JsModuleHostInfo_NotifyModuleReadyCallback, Success_NMRC) == JsNoError);
JsValueRef errorObject = JS_INVALID_REFERENCE;
const char* fileContent = "import {x} from 'foo.js'";
diff --git a/bin/NativeTests/NativeTests.vcxproj b/bin/NativeTests/NativeTests.vcxproj
index 12b0412c73b..cc28f224988 100644
--- a/bin/NativeTests/NativeTests.vcxproj
+++ b/bin/NativeTests/NativeTests.vcxproj
@@ -48,6 +48,8 @@
+
+
diff --git a/bin/NativeTests/stdafx.h b/bin/NativeTests/stdafx.h
index cd22fe95ada..c0d17b59a2b 100644
--- a/bin/NativeTests/stdafx.h
+++ b/bin/NativeTests/stdafx.h
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
@@ -25,11 +26,16 @@
#define DebugOnly(x) x
+#if !defined(CHAKRACORE_STRINGIZE)
+#define CHAKRACORE_STRINGIZE_IMPL(x) #x
+#define CHAKRACORE_STRINGIZE(x) CHAKRACORE_STRINGIZE_IMPL(x)
+#endif
+
#define AssertMsg(exp, comment) \
do { \
if (!(exp)) \
{ \
- fprintf(stderr, "ASSERTION (%s, line %d) %s %s\n", __FILE__, __LINE__, _STRINGIZE(exp), comment); \
+ fprintf(stderr, "ASSERTION (%s, line %d) %s %s\n", __FILE__, __LINE__, CHAKRACORE_STRINGIZE(exp), comment); \
fflush(stderr); \
DebugBreak(); \
} \
@@ -40,7 +46,7 @@ if (!(exp)) \
#define Assert(exp) AssertMsg(exp, #exp)
#define _JSRT_
-#include "chakracore.h"
+#include "ChakraCore.h"
#include "Core/CommonTypedefs.h"
#include
diff --git a/bin/ch/262.js b/bin/ch/262.js
index 6ff3bd31aca..48483f0c5ed 100644
--- a/bin/ch/262.js
+++ b/bin/ch/262.js
@@ -1,31 +1,31 @@
//-------------------------------------------------------------------------------------------------------
-// Copyright (C) Microsoft. All rights reserved.
+// Copyright (C) Microsoft Corporation and contributors. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
R"====(
var $262 = {
- createRealm: function () {
- return WScript.LoadScript('', 'samethread').$262;
- },
- global: this,
- agent: {
- start: function (src) {
- WScript.LoadScript(
- `
- $262 = {
- agent:{
- receiveBroadcast: function(callback){ WScript.ReceiveBroadcast(callback); },
- report: function(value){ WScript.Report(value); },
- leaving: function(){ WScript.Leaving(); }
- }
- };
- ${src}
- `, 'crossthread');
- },
- broadcast: function (sab) { WScript.Broadcast(sab); },
- sleep: function (timeout) { WScript.Sleep(timeout); },
- getReport: function () { return WScript.GetReport(); },
- },
+ createRealm: () => WScript.LoadScript('', 'samethread').$262,
+ global: this,
+ agent: {
+ start(src) {
+ WScript.LoadScript(`
+ $262 = {
+ agent: {
+ receiveBroadcast: WScript.ReceiveBroadcast,
+ report: WScript.Report,
+ leaving: WScript.Leaving,
+ monotonicNow: WScript.monotonicNow
+ }
+ };
+ ${ src }
+ `, 'crossthread');
+ },
+ broadcast: WScript.Broadcast,
+ sleep: WScript.Sleep,
+ getReport: WScript.GetReport,
+ monotonicNow: WScript.monotonicNow
+ }
};
-)===="
\ No newline at end of file
+)===="
diff --git a/bin/ch/CMakeLists.txt b/bin/ch/CMakeLists.txt
index 7e495f21041..012978877ff 100644
--- a/bin/ch/CMakeLists.txt
+++ b/bin/ch/CMakeLists.txt
@@ -1,3 +1,10 @@
+find_package (Python COMPONENTS Interpreter)
+add_custom_target(dbg_controller_h
+ COMMAND ${Python_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/jstoc.py ${CMAKE_CURRENT_SOURCE_DIR}/DbgController.js controllerScript
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/jstoc.py ${CMAKE_CURRENT_SOURCE_DIR}/DbgController.js
+ )
+
set(ch_source_files
ch.cpp
ChakraRtInterface.cpp
@@ -17,6 +24,8 @@ endif()
add_executable (ch ${ch_source_files})
+add_dependencies(ch dbg_controller_h)
+
set_target_properties(ch
PROPERTIES
POSITION_INDEPENDENT_CODE True
@@ -30,6 +39,7 @@ endif()
target_include_directories (ch
PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}
+ ${CMAKE_CURRENT_BINARY_DIR}
../ChakraCore
../../lib/Common
../../lib/Jsrt
@@ -109,7 +119,6 @@ elseif(CC_TARGET_OS_OSX)
endif()
endif()
-
target_link_libraries (ch
${lib_target}
${CC_LTO_ENABLED}
diff --git a/bin/ch/ChakraRtInterface.cpp b/bin/ch/ChakraRtInterface.cpp
index 5ce6f60a714..bab1e7bc18e 100644
--- a/bin/ch/ChakraRtInterface.cpp
+++ b/bin/ch/ChakraRtInterface.cpp
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#include "stdafx.h"
@@ -78,12 +79,20 @@ bool ChakraRTInterface::LoadChakraDll(ArgInfo* argInfo, HINSTANCE *outLibrary)
m_jsApiHooks.pfJsrtCreateRuntime = (JsAPIHooks::JsrtCreateRuntimePtr)GetChakraCoreSymbol(library, "JsCreateRuntime");
m_jsApiHooks.pfJsrtCreateContext = (JsAPIHooks::JsrtCreateContextPtr)GetChakraCoreSymbol(library, "JsCreateContext");
m_jsApiHooks.pfJsrtSetObjectBeforeCollectCallback = (JsAPIHooks::JsrtSetObjectBeforeCollectCallbackPtr)GetChakraCoreSymbol(library, "JsSetObjectBeforeCollectCallback");
+ m_jsApiHooks.pfJsrtSetRuntimeDomWrapperTracingCallbacks = (JsAPIHooks::JsrtSetRuntimeDomWrapperTracingCallbacksPtr)GetChakraCoreSymbol(library, "JsSetRuntimeDomWrapperTracingCallbacks");
m_jsApiHooks.pfJsrtSetRuntimeMemoryLimit = (JsAPIHooks::JsrtSetRuntimeMemoryLimitPtr)GetChakraCoreSymbol(library, "JsSetRuntimeMemoryLimit");
m_jsApiHooks.pfJsrtSetCurrentContext = (JsAPIHooks::JsrtSetCurrentContextPtr)GetChakraCoreSymbol(library, "JsSetCurrentContext");
m_jsApiHooks.pfJsrtGetCurrentContext = (JsAPIHooks::JsrtGetCurrentContextPtr)GetChakraCoreSymbol(library, "JsGetCurrentContext");
m_jsApiHooks.pfJsrtDisposeRuntime = (JsAPIHooks::JsrtDisposeRuntimePtr)GetChakraCoreSymbol(library, "JsDisposeRuntime");
m_jsApiHooks.pfJsrtCreateObject = (JsAPIHooks::JsrtCreateObjectPtr)GetChakraCoreSymbol(library, "JsCreateObject");
m_jsApiHooks.pfJsrtCreateExternalObject = (JsAPIHooks::JsrtCreateExternalObjectPtr)GetChakraCoreSymbol(library, "JsCreateExternalObject");
+ m_jsApiHooks.pfJsrtGetArrayForEachFunction = (JsAPIHooks::JsrtGetArrayForEachFunctionPtr)GetChakraCoreSymbol(library, "JsGetArrayForEachFunction");;
+ m_jsApiHooks.pfJsrtGetArrayKeysFunction = (JsAPIHooks::JsrtGetArrayKeysFunctionPtr)GetChakraCoreSymbol(library, "JsGetArrayKeysFunction");;
+ m_jsApiHooks.pfJsrtGetArrayValuesFunction = (JsAPIHooks::JsrtGetArrayValuesFunctionPtr)GetChakraCoreSymbol(library, "JsGetArrayValuesFunction");;
+ m_jsApiHooks.pfJsrtGetArrayEntriesFunction = (JsAPIHooks::JsrtGetArrayEntriesFunctionPtr)GetChakraCoreSymbol(library, "JsGetArrayEntriesFunction");;
+ m_jsApiHooks.pfJsrtGetPropertyIdSymbolIterator = (JsAPIHooks::JsrtGetPropertyIdSymbolIteratorPtr)GetChakraCoreSymbol(library, "JsGetPropertyIdSymbolIterator");;
+ m_jsApiHooks.pfJsrtGetErrorPrototype = (JsAPIHooks::JsrtGetErrorPrototypePtr)GetChakraCoreSymbol(library, "JsGetErrorPrototype");;
+ m_jsApiHooks.pfJsrtGetIteratorPrototype = (JsAPIHooks::JsrtGetIteratorPrototypePtr)GetChakraCoreSymbol(library, "JsGetIteratorPrototype");;
m_jsApiHooks.pfJsrtCreateFunction = (JsAPIHooks::JsrtCreateFunctionPtr)GetChakraCoreSymbol(library, "JsCreateFunction");
m_jsApiHooks.pfJsrtCreateNamedFunction = (JsAPIHooks::JsCreateNamedFunctionPtr)GetChakraCoreSymbol(library, "JsCreateNamedFunction");
m_jsApiHooks.pfJsrtSetProperty = (JsAPIHooks::JsrtSetPropertyPtr)GetChakraCoreSymbol(library, "JsSetProperty");
@@ -105,6 +114,7 @@ bool ChakraRTInterface::LoadChakraDll(ArgInfo* argInfo, HINSTANCE *outLibrary)
m_jsApiHooks.pfJsrtDoubleToNumber = (JsAPIHooks::JsrtDoubleToNumberPtr)GetChakraCoreSymbol(library, "JsDoubleToNumber");
m_jsApiHooks.pfJsrtGetExternalData = (JsAPIHooks::JsrtGetExternalDataPtr)GetChakraCoreSymbol(library, "JsGetExternalData");
m_jsApiHooks.pfJsrtSetExternalData = (JsAPIHooks::JsrtSetExternalDataPtr)GetChakraCoreSymbol(library, "JsSetExternalData");
+ m_jsApiHooks.pfJsrtCloneObject = (JsAPIHooks::JsrtCloneObjectPtr)GetChakraCoreSymbol(library, "JsCloneObject");
m_jsApiHooks.pfJsrtCreateArray = (JsAPIHooks::JsrtCreateArrayPtr)GetChakraCoreSymbol(library, "JsCreateArray");
m_jsApiHooks.pfJsrtCreateArrayBuffer = (JsAPIHooks::JsrtCreateArrayBufferPtr)GetChakraCoreSymbol(library, "JsCreateArrayBuffer");
m_jsApiHooks.pfJsrtCreateSharedArrayBufferWithSharedContent = (JsAPIHooks::JsrtCreateSharedArrayBufferWithSharedContentPtr)GetChakraCoreSymbol(library, "JsCreateSharedArrayBufferWithSharedContent");
@@ -113,12 +123,14 @@ bool ChakraRTInterface::LoadChakraDll(ArgInfo* argInfo, HINSTANCE *outLibrary)
m_jsApiHooks.pfJsrtGetArrayBufferStorage = (JsAPIHooks::JsrtGetArrayBufferStoragePtr)GetChakraCoreSymbol(library, "JsGetArrayBufferStorage");
m_jsApiHooks.pfJsrtHasException = (JsAPIHooks::JsrtHasExceptionPtr)GetChakraCoreSymbol(library, "JsHasException");
m_jsApiHooks.pfJsrtSetException = (JsAPIHooks::JsrtSetExceptionPtr)GetChakraCoreSymbol(library, "JsSetException");
+ m_jsApiHooks.pfJsrtGetAndClearExceptionWithMetadata = (JsAPIHooks::JsrtGetAndClearExceptiopnWithMetadataPtr)GetChakraCoreSymbol(library, "JsGetAndClearExceptionWithMetadata");
m_jsApiHooks.pfJsrtGetAndClearException = (JsAPIHooks::JsrtGetAndClearExceptiopnPtr)GetChakraCoreSymbol(library, "JsGetAndClearException");
m_jsApiHooks.pfJsrtCreateError = (JsAPIHooks::JsrtCreateErrorPtr)GetChakraCoreSymbol(library, "JsCreateError");
m_jsApiHooks.pfJsrtGetRuntime = (JsAPIHooks::JsrtGetRuntimePtr)GetChakraCoreSymbol(library, "JsGetRuntime");
m_jsApiHooks.pfJsrtRelease = (JsAPIHooks::JsrtReleasePtr)GetChakraCoreSymbol(library, "JsRelease");
m_jsApiHooks.pfJsrtAddRef = (JsAPIHooks::JsrtAddRefPtr)GetChakraCoreSymbol(library, "JsAddRef");
m_jsApiHooks.pfJsrtGetValueType = (JsAPIHooks::JsrtGetValueType)GetChakraCoreSymbol(library, "JsGetValueType");
+ m_jsApiHooks.pfJsrtGetIndexedProperty = (JsAPIHooks::JsrtGetIndexedPropertyPtr)GetChakraCoreSymbol(library, "JsGetIndexedProperty");
m_jsApiHooks.pfJsrtSetIndexedProperty = (JsAPIHooks::JsrtSetIndexedPropertyPtr)GetChakraCoreSymbol(library, "JsSetIndexedProperty");
m_jsApiHooks.pfJsrtSetPromiseContinuationCallback = (JsAPIHooks::JsrtSetPromiseContinuationCallbackPtr)GetChakraCoreSymbol(library, "JsSetPromiseContinuationCallback");
m_jsApiHooks.pfJsrtSetHostPromiseRejectionTracker = (JsAPIHooks::JsrtSetHostPromiseRejectionTrackerPtr)GetChakraCoreSymbol(library, "JsSetHostPromiseRejectionTracker");
@@ -160,6 +172,10 @@ bool ChakraRTInterface::LoadChakraDll(ArgInfo* argInfo, HINSTANCE *outLibrary)
m_jsApiHooks.pfJsrtSerializeParserState = (JsAPIHooks::JsrtSerializeParserState)GetChakraCoreSymbol(library, "JsSerializeParserState");
m_jsApiHooks.pfJsrtRunScriptWithParserState = (JsAPIHooks::JsrtRunScriptWithParserState)GetChakraCoreSymbol(library, "JsRunScriptWithParserState");
+ m_jsApiHooks.pfJsrtQueueBackgroundParse_Experimental = (JsAPIHooks::JsrtQueueBackgroundParse_Experimental)GetChakraCoreSymbol(library, "JsQueueBackgroundParse_Experimental");
+ m_jsApiHooks.pfJsrtDiscardBackgroundParse_Experimental = (JsAPIHooks::JsrtDiscardBackgroundParse_Experimental)GetChakraCoreSymbol(library, "JsDiscardBackgroundParse_Experimental");
+ m_jsApiHooks.pfJsrtExecuteBackgroundParse_Experimental = (JsAPIHooks::JsrtExecuteBackgroundParse_Experimental)GetChakraCoreSymbol(library, "JsExecuteBackgroundParse_Experimental");
+
m_jsApiHooks.pfJsrtTTDCreateRecordRuntime = (JsAPIHooks::JsrtTTDCreateRecordRuntimePtr)GetChakraCoreSymbol(library, "JsTTDCreateRecordRuntime");
m_jsApiHooks.pfJsrtTTDCreateReplayRuntime = (JsAPIHooks::JsrtTTDCreateReplayRuntimePtr)GetChakraCoreSymbol(library, "JsTTDCreateReplayRuntime");
m_jsApiHooks.pfJsrtTTDCreateContext = (JsAPIHooks::JsrtTTDCreateContextPtr)GetChakraCoreSymbol(library, "JsTTDCreateContext");
@@ -174,6 +190,23 @@ bool ChakraRTInterface::LoadChakraDll(ArgInfo* argInfo, HINSTANCE *outLibrary)
m_jsApiHooks.pfJsrtTTDGetSnapTimeTopLevelEventMove = (JsAPIHooks::JsrtTTDGetSnapTimeTopLevelEventMovePtr)GetChakraCoreSymbol(library, "JsTTDGetSnapTimeTopLevelEventMove");
m_jsApiHooks.pfJsrtTTDMoveToTopLevelEvent = (JsAPIHooks::JsrtTTDMoveToTopLevelEventPtr)GetChakraCoreSymbol(library, "JsTTDMoveToTopLevelEvent");
m_jsApiHooks.pfJsrtTTDReplayExecution = (JsAPIHooks::JsrtTTDReplayExecutionPtr)GetChakraCoreSymbol(library, "JsTTDReplayExecution");
+ m_jsApiHooks.pfJsrtVarSerializer = (JsAPIHooks::JsrtVarSerializerPtr)GetChakraCoreSymbol(library, "JsVarSerializer");
+ m_jsApiHooks.pfJsrtVarSerializerSetTransferableVars = (JsAPIHooks::JsrtVarSerializerSetTransferableVarsPtr)GetChakraCoreSymbol(library, "JsVarSerializerSetTransferableVars");
+ m_jsApiHooks.pfJsrtVarSerializerWriteValue = (JsAPIHooks::JsrtVarSerializerWriteValuePtr)GetChakraCoreSymbol(library, "JsVarSerializerWriteValue");
+ m_jsApiHooks.pfJsrtVarSerializerReleaseData = (JsAPIHooks::JsrtVarSerializerReleaseDataPtr)GetChakraCoreSymbol(library, "JsVarSerializerReleaseData");
+ m_jsApiHooks.pfJsrtVarSerializerFree = (JsAPIHooks::JsrtVarSerializerFreePtr)GetChakraCoreSymbol(library, "JsVarSerializerFree");
+ m_jsApiHooks.pfJsrtVarDeserializer = (JsAPIHooks::JsrtVarDeserializerPtr)GetChakraCoreSymbol(library, "JsVarDeserializer");
+ m_jsApiHooks.pfJsrtVarDeserializerSetTransferableVars = (JsAPIHooks::JsrtVarDeserializerSetTransferableVarsPtr)GetChakraCoreSymbol(library, "JsVarDeserializerSetTransferableVars");
+ m_jsApiHooks.pfJsrtVarDeserializerReadValue = (JsAPIHooks::JsrtVarDeserializerReadValuePtr)GetChakraCoreSymbol(library, "JsVarDeserializerReadValue");
+ m_jsApiHooks.pfJsrtVarDeserializerFree = (JsAPIHooks::JsrtVarDeserializerFreePtr)GetChakraCoreSymbol(library, "JsVarDeserializerFree");
+
+ m_jsApiHooks.pfJsrtDetachArrayBuffer = (JsAPIHooks::JsrtDetachArrayBufferPtr)GetChakraCoreSymbol(library, "JsDetachArrayBuffer");
+ m_jsApiHooks.pfJsrtGetArrayBufferFreeFunction = (JsAPIHooks::JsrtGetArrayBufferFreeFunction)GetChakraCoreSymbol(library, "JsGetArrayBufferFreeFunction");
+ m_jsApiHooks.pfJsrtExternalizeArrayBuffer = (JsAPIHooks::JsrtExternalizeArrayBufferPtr)GetChakraCoreSymbol(library, "JsExternalizeArrayBuffer");
+
+#ifdef _WIN32
+ m_jsApiHooks.pfJsrtConnectJITProcess = (JsAPIHooks::JsrtConnectJITProcess)GetChakraCoreSymbol(library, "JsConnectJITProcess");
+#endif
#endif
return true;
diff --git a/bin/ch/ChakraRtInterface.h b/bin/ch/ChakraRtInterface.h
index cbd23641e2e..7eb0cef3da6 100644
--- a/bin/ch/ChakraRtInterface.h
+++ b/bin/ch/ChakraRtInterface.h
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#pragma once
@@ -9,12 +10,20 @@ struct JsAPIHooks
typedef JsErrorCode (WINAPI *JsrtCreateRuntimePtr)(JsRuntimeAttributes attributes, JsThreadServiceCallback threadService, JsRuntimeHandle *runtime);
typedef JsErrorCode (WINAPI *JsrtCreateContextPtr)(JsRuntimeHandle runtime, JsContextRef *newContext);
typedef JsErrorCode (WINAPI *JsrtSetObjectBeforeCollectCallbackPtr)(JsRef ref, void* callbackState, JsObjectBeforeCollectCallback objectBeforeCollectCallback);
+ typedef JsErrorCode(WINAPI *JsrtSetRuntimeDomWrapperTracingCallbacksPtr)(JsRuntimeHandle runtime, JsRef wrapperTracingState, JsDOMWrapperTracingCallback wrapperTracingCallback, JsDOMWrapperTracingDoneCallback wrapperTracingDoneCallback, JsDOMWrapperTracingEnterFinalPauseCallback enterFinalPauseCallback);
typedef JsErrorCode (WINAPI *JsrtSetRuntimeMemoryLimitPtr)(JsRuntimeHandle runtime, size_t memoryLimit);
typedef JsErrorCode (WINAPI *JsrtSetCurrentContextPtr)(JsContextRef context);
typedef JsErrorCode (WINAPI *JsrtGetCurrentContextPtr)(JsContextRef* context);
typedef JsErrorCode (WINAPI *JsrtDisposeRuntimePtr)(JsRuntimeHandle runtime);
typedef JsErrorCode (WINAPI *JsrtCreateObjectPtr)(JsValueRef *object);
typedef JsErrorCode (WINAPI *JsrtCreateExternalObjectPtr)(void* data, JsFinalizeCallback callback, JsValueRef *object);
+ typedef JsErrorCode (WINAPI *JsrtGetArrayForEachFunctionPtr)(JsValueRef *result);
+ typedef JsErrorCode (WINAPI *JsrtGetArrayKeysFunctionPtr)(JsValueRef *result);
+ typedef JsErrorCode (WINAPI *JsrtGetArrayValuesFunctionPtr)(JsValueRef *result);
+ typedef JsErrorCode (WINAPI *JsrtGetArrayEntriesFunctionPtr)(JsValueRef *result);
+ typedef JsErrorCode (WINAPI *JsrtGetPropertyIdSymbolIteratorPtr)(JsPropertyIdRef *propertyId);
+ typedef JsErrorCode (WINAPI *JsrtGetErrorPrototypePtr)(JsValueRef *result);
+ typedef JsErrorCode (WINAPI *JsrtGetIteratorPrototypePtr)(JsValueRef *result);
typedef JsErrorCode (WINAPI *JsrtCreateFunctionPtr)(JsNativeFunction nativeFunction, void *callbackState, JsValueRef *function);
typedef JsErrorCode (WINAPI *JsrtCreateEnhancedFunctionPtr)(JsEnhancedNativeFunction nativeFunction, JsValueRef metadata, void *callbackState, JsValueRef *function);
typedef JsErrorCode (WINAPI *JsCreateNamedFunctionPtr)(JsValueRef name, JsNativeFunction nativeFunction, void *callbackState, JsValueRef *function);
@@ -40,6 +49,7 @@ struct JsAPIHooks
typedef JsErrorCode (WINAPI *JsrtDoubleToNumberPtr)(double doubleValue, JsValueRef* value);
typedef JsErrorCode (WINAPI *JsrtGetExternalDataPtr)(JsValueRef object, void **data);
typedef JsErrorCode (WINAPI *JsrtSetExternalDataPtr)(JsValueRef object, void *data);
+ typedef JsErrorCode(WINAPI *JsrtCloneObjectPtr)(JsValueRef object, JsValueRef *newObject);
typedef JsErrorCode (WINAPI *JsrtCreateArrayPtr)(unsigned int length, JsValueRef *result);
typedef JsErrorCode (WINAPI *JsrtCreateArrayBufferPtr)(unsigned int byteLength, JsValueRef *result);
typedef JsErrorCode (WINAPI *JsrtCreateSharedArrayBufferWithSharedContentPtr)(JsSharedArrayBufferContentHandle sharedContent, JsValueRef *result);
@@ -49,11 +59,13 @@ struct JsAPIHooks
typedef JsErrorCode (WINAPI *JsrtCreateErrorPtr)(JsValueRef message, JsValueRef *error);
typedef JsErrorCode (WINAPI *JsrtHasExceptionPtr)(bool *hasException);
typedef JsErrorCode (WINAPI *JsrtSetExceptionPtr)(JsValueRef exception);
+ typedef JsErrorCode (WINAPI *JsrtGetAndClearExceptiopnWithMetadataPtr)(JsValueRef* metadata);
typedef JsErrorCode (WINAPI *JsrtGetAndClearExceptiopnPtr)(JsValueRef* exception);
typedef JsErrorCode (WINAPI *JsrtGetRuntimePtr)(JsContextRef context, JsRuntimeHandle *runtime);
typedef JsErrorCode (WINAPI *JsrtReleasePtr)(JsRef ref, unsigned int* count);
typedef JsErrorCode (WINAPI *JsrtAddRefPtr)(JsRef ref, unsigned int* count);
typedef JsErrorCode (WINAPI *JsrtGetValueType)(JsValueRef value, JsValueType *type);
+ typedef JsErrorCode(WINAPI *JsrtGetIndexedPropertyPtr)(JsValueRef object, JsValueRef index, JsValueRef *value);
typedef JsErrorCode (WINAPI *JsrtSetIndexedPropertyPtr)(JsValueRef object, JsValueRef index, JsValueRef value);
typedef JsErrorCode (WINAPI *JsrtSetPromiseContinuationCallbackPtr)(JsPromiseContinuationCallback callback, void *callbackState);
typedef JsErrorCode (WINAPI *JsrtSetHostPromiseRejectionTrackerPtr)(JsHostPromiseRejectionTrackerCallback callback, void *callbackState);
@@ -92,6 +104,10 @@ struct JsAPIHooks
typedef JsErrorCode(WINAPI *JsrtSerializeParserState)(JsValueRef script, JsValueRef *buffer, JsParseScriptAttributes parseAttributes);
typedef JsErrorCode(WINAPI *JsrtRunScriptWithParserState)(JsValueRef script, JsSourceContext sourceContext, JsValueRef sourceUrl, JsParseScriptAttributes parseAttributes, JsValueRef parserState, JsValueRef *result);
+
+ typedef JsErrorCode(WINAPI *JsrtQueueBackgroundParse_Experimental)(JsScriptContents* contents, DWORD* dwBgParseCookie);
+ typedef JsErrorCode(WINAPI *JsrtDiscardBackgroundParse_Experimental)(DWORD dwBgParseCookie, void* buffer, bool* callerOwnsBuffer);
+ typedef JsErrorCode(WINAPI *JsrtExecuteBackgroundParse_Experimental)(DWORD dwBgParseCookie, JsValueRef script, JsSourceContext sourceContext, WCHAR *url, JsParseScriptAttributes parseAttributes, JsValueRef parserState, JsValueRef *result);
typedef JsErrorCode(WINAPI *JsrtTTDCreateRecordRuntimePtr)(JsRuntimeAttributes attributes, bool enableDebugging, size_t snapInterval, size_t snapHistoryLength, TTDOpenResourceStreamCallback openResourceStream, JsTTDWriteBytesToStreamCallback writeBytesToStream, JsTTDFlushAndCloseStreamCallback flushAndCloseStream, JsThreadServiceCallback threadService, JsRuntimeHandle *runtime);
typedef JsErrorCode(WINAPI *JsrtTTDCreateReplayRuntimePtr)(JsRuntimeAttributes attributes, const char* infoUri, size_t infoUriCount, bool enableDebugging, TTDOpenResourceStreamCallback openResourceStream, JsTTDReadBytesFromStreamCallback readBytesFromStream, JsTTDFlushAndCloseStreamCallback flushAndCloseStream, JsThreadServiceCallback threadService, JsRuntimeHandle *runtime);
@@ -108,15 +124,43 @@ struct JsAPIHooks
typedef JsErrorCode(WINAPI *JsrtTTDMoveToTopLevelEventPtr)(JsRuntimeHandle runtimeHandle, JsTTDMoveMode moveMode, int64_t snapshotStartTime, int64_t eventTime);
typedef JsErrorCode(WINAPI *JsrtTTDReplayExecutionPtr)(JsTTDMoveMode* moveMode, int64_t* rootEventTime);
+#ifdef _WIN32
+ typedef JsErrorCode(WINAPI *JsrtConnectJITProcess)(HANDLE processHandle, void* serverSecurityDescriptor, UUID connectionId);
+#endif
+
+ typedef JsErrorCode(WINAPI *JsrtVarSerializerPtr)(ReallocateBufferMemoryFunc reallocateBufferMemory, WriteHostObjectFunc writeHostObject, void * callbackState, JsVarSerializerHandle *serializerHandle);
+ typedef JsErrorCode(WINAPI *JsrtVarSerializerSetTransferableVarsPtr)(JsVarSerializerHandle serializerHandle, JsValueRef *transferableVars, size_t transferableVarsCount);
+ typedef JsErrorCode(WINAPI *JsrtVarSerializerWriteValuePtr)(JsVarSerializerHandle serializerHandle, JsValueRef rootObject);
+ typedef JsErrorCode(WINAPI *JsrtVarSerializerReleaseDataPtr)(JsVarSerializerHandle serializerHandle, byte** data, size_t *dataLength);
+ typedef JsErrorCode(WINAPI *JsrtVarSerializerFreePtr)(JsVarSerializerHandle serializerHandle);
+
+ typedef JsErrorCode(WINAPI *JsrtVarDeserializerPtr)(void *data, size_t dataLength, ReadHostObjectFunc readHostObject, GetSharedArrayBufferFromIdFunc getSharedArrayBufferFromId, void* callbackState, JsVarDeserializerHandle *deserializerHandle);
+ typedef JsErrorCode(WINAPI *JsrtVarDeserializerSetTransferableVarsPtr)(JsVarDeserializerHandle deserializerHandle, JsValueRef *transferableVars, size_t transferableVarsCount);
+ typedef JsErrorCode(WINAPI *JsrtVarDeserializerReadValuePtr)(JsVarDeserializerHandle deserializerHandle, JsValueRef* value);
+ typedef JsErrorCode(WINAPI *JsrtVarDeserializerFreePtr)(JsVarDeserializerHandle deserializerHandle);
+
+ typedef JsErrorCode(WINAPI *JsrtDetachArrayBufferPtr)(JsValueRef buffer);
+ typedef JsErrorCode(WINAPI* JsrtGetArrayBufferFreeFunction)(JsValueRef buffer, ArrayBufferFreeFn* freeFn);
+ typedef JsErrorCode(WINAPI* JsrtExternalizeArrayBufferPtr)(JsValueRef buffer);
+
JsrtCreateRuntimePtr pfJsrtCreateRuntime;
JsrtCreateContextPtr pfJsrtCreateContext;
JsrtSetObjectBeforeCollectCallbackPtr pfJsrtSetObjectBeforeCollectCallback;
+ JsrtSetRuntimeDomWrapperTracingCallbacksPtr pfJsrtSetRuntimeDomWrapperTracingCallbacks;
JsrtSetRuntimeMemoryLimitPtr pfJsrtSetRuntimeMemoryLimit;
JsrtSetCurrentContextPtr pfJsrtSetCurrentContext;
JsrtGetCurrentContextPtr pfJsrtGetCurrentContext;
JsrtDisposeRuntimePtr pfJsrtDisposeRuntime;
JsrtCreateObjectPtr pfJsrtCreateObject;
JsrtCreateExternalObjectPtr pfJsrtCreateExternalObject;
+ JsrtGetArrayForEachFunctionPtr pfJsrtGetArrayForEachFunction;
+ JsrtGetArrayKeysFunctionPtr pfJsrtGetArrayKeysFunction;
+ JsrtGetArrayValuesFunctionPtr pfJsrtGetArrayValuesFunction;
+ JsrtGetArrayEntriesFunctionPtr pfJsrtGetArrayEntriesFunction;
+ JsrtGetPropertyIdSymbolIteratorPtr pfJsrtGetPropertyIdSymbolIterator;
+ JsrtGetErrorPrototypePtr pfJsrtGetErrorPrototype;
+ JsrtGetIteratorPrototypePtr pfJsrtGetIteratorPrototype;
+
JsrtCreateFunctionPtr pfJsrtCreateFunction;
JsrtCreateEnhancedFunctionPtr pfJsrtCreateEnhancedFunction;
JsCreateNamedFunctionPtr pfJsrtCreateNamedFunction;
@@ -145,6 +189,7 @@ struct JsAPIHooks
JsrtDoubleToNumberPtr pfJsrtDoubleToNumber;
JsrtGetExternalDataPtr pfJsrtGetExternalData;
JsrtSetExternalDataPtr pfJsrtSetExternalData;
+ JsrtCloneObjectPtr pfJsrtCloneObject;
JsrtCreateArrayPtr pfJsrtCreateArray;
JsrtCreateArrayBufferPtr pfJsrtCreateArrayBuffer;
JsrtCreateSharedArrayBufferWithSharedContentPtr pfJsrtCreateSharedArrayBufferWithSharedContent;
@@ -154,11 +199,13 @@ struct JsAPIHooks
JsrtCreateErrorPtr pfJsrtCreateError;
JsrtHasExceptionPtr pfJsrtHasException;
JsrtSetExceptionPtr pfJsrtSetException;
+ JsrtGetAndClearExceptiopnWithMetadataPtr pfJsrtGetAndClearExceptionWithMetadata;
JsrtGetAndClearExceptiopnPtr pfJsrtGetAndClearException;
JsrtGetRuntimePtr pfJsrtGetRuntime;
JsrtReleasePtr pfJsrtRelease;
JsrtAddRefPtr pfJsrtAddRef;
JsrtGetValueType pfJsrtGetValueType;
+ JsrtGetIndexedPropertyPtr pfJsrtGetIndexedProperty;
JsrtSetIndexedPropertyPtr pfJsrtSetIndexedProperty;
JsrtSetPromiseContinuationCallbackPtr pfJsrtSetPromiseContinuationCallback;
JsrtSetHostPromiseRejectionTrackerPtr pfJsrtSetHostPromiseRejectionTracker;
@@ -195,6 +242,10 @@ struct JsAPIHooks
JsrtSerializeParserState pfJsrtSerializeParserState;
JsrtRunScriptWithParserState pfJsrtRunScriptWithParserState;
+ JsrtQueueBackgroundParse_Experimental pfJsrtQueueBackgroundParse_Experimental;
+ JsrtDiscardBackgroundParse_Experimental pfJsrtDiscardBackgroundParse_Experimental;
+ JsrtExecuteBackgroundParse_Experimental pfJsrtExecuteBackgroundParse_Experimental;
+
JsrtTTDCreateRecordRuntimePtr pfJsrtTTDCreateRecordRuntime;
JsrtTTDCreateReplayRuntimePtr pfJsrtTTDCreateReplayRuntime;
JsrtTTDCreateContextPtr pfJsrtTTDCreateContext;
@@ -209,6 +260,24 @@ struct JsAPIHooks
JsrtTTDGetSnapTimeTopLevelEventMovePtr pfJsrtTTDGetSnapTimeTopLevelEventMove;
JsrtTTDMoveToTopLevelEventPtr pfJsrtTTDMoveToTopLevelEvent;
JsrtTTDReplayExecutionPtr pfJsrtTTDReplayExecution;
+
+ JsrtVarSerializerPtr pfJsrtVarSerializer;
+ JsrtVarSerializerSetTransferableVarsPtr pfJsrtVarSerializerSetTransferableVars;
+ JsrtVarSerializerWriteValuePtr pfJsrtVarSerializerWriteValue;
+ JsrtVarSerializerReleaseDataPtr pfJsrtVarSerializerReleaseData;
+ JsrtVarSerializerFreePtr pfJsrtVarSerializerFree;
+
+ JsrtVarDeserializerPtr pfJsrtVarDeserializer;
+ JsrtVarDeserializerSetTransferableVarsPtr pfJsrtVarDeserializerSetTransferableVars;
+ JsrtVarDeserializerReadValuePtr pfJsrtVarDeserializerReadValue;
+ JsrtVarDeserializerFreePtr pfJsrtVarDeserializerFree;
+
+ JsrtDetachArrayBufferPtr pfJsrtDetachArrayBuffer;
+ JsrtGetArrayBufferFreeFunction pfJsrtGetArrayBufferFreeFunction;
+ JsrtExternalizeArrayBufferPtr pfJsrtExternalizeArrayBuffer;
+#ifdef _WIN32
+ JsrtConnectJITProcess pfJsrtConnectJITProcess;
+#endif
};
#ifdef _WIN32
@@ -299,18 +368,6 @@ class ChakraRTInterface
#endif
}
-#ifdef _WIN32
-#if ENABLE_NATIVE_CODEGEN
- static void ConnectJITServer(HANDLE processHandle, void* serverSecurityDescriptor, UUID connectionId)
- {
- if (m_testHooksSetup && m_testHooks.pfnConnectJITServer != NULL)
- {
- m_testHooks.pfnConnectJITServer(processHandle, serverSecurityDescriptor, connectionId);
- }
- }
-#endif
-#endif
-
static void NotifyUnhandledException(PEXCEPTION_POINTERS exceptionInfo)
{
if (m_testHooksSetup && m_testHooks.pfnNotifyUnhandledException != NULL)
@@ -327,12 +384,20 @@ class ChakraRTInterface
static JsErrorCode WINAPI JsCreateRuntime(JsRuntimeAttributes attributes, JsThreadServiceCallback threadService, JsRuntimeHandle *runtime) { return HOOK_JS_API(CreateRuntime(attributes, threadService, runtime)); }
static JsErrorCode WINAPI JsCreateContext(JsRuntimeHandle runtime, JsContextRef *newContext) { return HOOK_JS_API(CreateContext(runtime, newContext)); }
static JsErrorCode WINAPI JsSetObjectBeforeCollectCallback(JsRef ref, void* callbackState, JsObjectBeforeCollectCallback objectBeforeCollectCallback) { return HOOK_JS_API(SetObjectBeforeCollectCallback(ref, callbackState, objectBeforeCollectCallback)); }
+ static JsErrorCode WINAPI JsSetRuntimeDomWrapperTracingCallbacks(JsRuntimeHandle runtime, JsRef wrapperTracingState, JsDOMWrapperTracingCallback wrapperTracingCallback, JsDOMWrapperTracingDoneCallback wrapperTracingDoneCallback, JsDOMWrapperTracingEnterFinalPauseCallback enterFinalPauseCallback) { return HOOK_JS_API(SetRuntimeDomWrapperTracingCallbacks(runtime, wrapperTracingState, wrapperTracingCallback, wrapperTracingDoneCallback, enterFinalPauseCallback)); }
static JsErrorCode WINAPI JsSetRuntimeMemoryLimit(JsRuntimeHandle runtime, size_t memory) { return HOOK_JS_API(SetRuntimeMemoryLimit(runtime, memory)); }
static JsErrorCode WINAPI JsSetCurrentContext(JsContextRef context) { return HOOK_JS_API(SetCurrentContext(context)); }
static JsErrorCode WINAPI JsGetCurrentContext(JsContextRef* context) { return HOOK_JS_API(GetCurrentContext(context)); }
static JsErrorCode WINAPI JsDisposeRuntime(JsRuntimeHandle runtime) { return HOOK_JS_API(DisposeRuntime(runtime)); }
static JsErrorCode WINAPI JsCreateObject(JsValueRef *object) { return HOOK_JS_API(CreateObject(object)); }
static JsErrorCode WINAPI JsCreateExternalObject(void *data, JsFinalizeCallback callback, JsValueRef *object) { return HOOK_JS_API(CreateExternalObject(data, callback, object)); }
+ static JsErrorCode WINAPI JsGetArrayForEachFunction(JsValueRef * result) { return HOOK_JS_API(GetArrayForEachFunction(result)); }
+ static JsErrorCode WINAPI JsGetArrayKeysFunction(JsValueRef * result) { return HOOK_JS_API(GetArrayKeysFunction(result)); }
+ static JsErrorCode WINAPI JsGetArrayValuesFunction(JsValueRef * result) { return HOOK_JS_API(GetArrayValuesFunction(result)); }
+ static JsErrorCode WINAPI JsGetArrayEntriesFunction(JsValueRef * result) { return HOOK_JS_API(GetArrayEntriesFunction(result)); }
+ static JsErrorCode WINAPI JsGetPropertyIdSymbolIterator(JsPropertyIdRef * propertyId) { return HOOK_JS_API(GetPropertyIdSymbolIterator(propertyId)); }
+ static JsErrorCode WINAPI JsGetErrorPrototype(JsValueRef * result) { return HOOK_JS_API(GetErrorPrototype(result)); }
+ static JsErrorCode WINAPI JsGetIteratorPrototype(JsValueRef * result) { return HOOK_JS_API(GetIteratorPrototype(result)); }
static JsErrorCode WINAPI JsCreateFunction(JsNativeFunction nativeFunction, void *callbackState, JsValueRef *function) { return HOOK_JS_API(CreateFunction(nativeFunction, callbackState, function)); }
static JsErrorCode WINAPI JsCreateEnhancedFunction(JsEnhancedNativeFunction nativeFunction, JsValueRef metadata, void *callbackState, JsValueRef *function) { return HOOK_JS_API(CreateEnhancedFunction(nativeFunction, metadata, callbackState, function)); }
static JsErrorCode WINAPI JsCreateNamedFunction(JsValueRef name, JsNativeFunction nativeFunction, void *callbackState, JsValueRef *function) { return HOOK_JS_API(CreateNamedFunction(name, nativeFunction, callbackState, function)); }
@@ -355,6 +420,7 @@ class ChakraRTInterface
static JsErrorCode WINAPI JsDoubleToNumber(double doubleValue, JsValueRef* value) { return HOOK_JS_API(DoubleToNumber(doubleValue, value)); }
static JsErrorCode WINAPI JsGetExternalData(JsValueRef object, void **data) { return HOOK_JS_API(GetExternalData(object, data)); }
static JsErrorCode WINAPI JsSetExternalData(JsValueRef object, void *data) { return HOOK_JS_API(SetExternalData(object, data)); }
+ static JsErrorCode WINAPI JsCloneObject(JsValueRef object, JsValueRef *data) { return HOOK_JS_API(CloneObject(object, data)); }
static JsErrorCode WINAPI JsCreateArray(unsigned int length, JsValueRef *result) { return HOOK_JS_API(CreateArray(length, result)); }
static JsErrorCode WINAPI JsCreateArrayBuffer(unsigned int byteLength, JsValueRef *result) { return HOOK_JS_API(CreateArrayBuffer(byteLength, result)); }
static JsErrorCode WINAPI JsCreateSharedArrayBufferWithSharedContent(JsSharedArrayBufferContentHandle sharedContent, JsValueRef *result) { return HOOK_JS_API(CreateSharedArrayBufferWithSharedContent(sharedContent, result)); }
@@ -365,10 +431,12 @@ class ChakraRTInterface
static JsErrorCode WINAPI JsHasException(bool *hasException) { return HOOK_JS_API(HasException(hasException)); }
static JsErrorCode WINAPI JsSetException(JsValueRef exception) { return HOOK_JS_API(SetException(exception)); }
static JsErrorCode WINAPI JsGetAndClearException(JsValueRef *exception) { return HOOK_JS_API(GetAndClearException(exception)); }
+ static JsErrorCode WINAPI JsGetAndClearExceptionWithMetadata(JsValueRef * metadata) { return HOOK_JS_API(GetAndClearExceptionWithMetadata(metadata)); }
static JsErrorCode WINAPI JsGetRuntime(JsContextRef context, JsRuntimeHandle *runtime) { return HOOK_JS_API(GetRuntime(context, runtime)); }
static JsErrorCode WINAPI JsRelease(JsRef ref, unsigned int* count) { return HOOK_JS_API(Release(ref, count)); }
static JsErrorCode WINAPI JsAddRef(JsRef ref, unsigned int* count) { return HOOK_JS_API(AddRef(ref, count)); }
static JsErrorCode WINAPI JsGetValueType(JsValueRef value, JsValueType *type) { return HOOK_JS_API(GetValueType(value, type)); }
+ static JsErrorCode WINAPI JsGetIndexedProperty(JsValueRef object, JsValueRef index, JsValueRef *value) { return HOOK_JS_API(GetIndexedProperty(object, index, value)); }
static JsErrorCode WINAPI JsSetIndexedProperty(JsValueRef object, JsValueRef index, JsValueRef value) { return HOOK_JS_API(SetIndexedProperty(object, index, value)); }
static JsErrorCode WINAPI JsSetPromiseContinuationCallback(JsPromiseContinuationCallback callback, void *callbackState) { return HOOK_JS_API(SetPromiseContinuationCallback(callback, callbackState)); }
static JsErrorCode WINAPI JsSetHostPromiseRejectionTracker(JsHostPromiseRejectionTrackerCallback callback, void *callbackState) { return HOOK_JS_API(SetHostPromiseRejectionTracker(callback, callbackState)); }
@@ -430,6 +498,28 @@ class ChakraRTInterface
static JsErrorCode WINAPI JsSerializeParserState(JsValueRef script, JsValueRef *buffer, JsParseScriptAttributes parseAttributes) { return HOOK_JS_API(SerializeParserState(script, buffer, parseAttributes)); }
static JsErrorCode WINAPI JsRunScriptWithParserState(JsValueRef script, JsSourceContext sourceContext, JsValueRef sourceUrl, JsParseScriptAttributes parseAttributes, JsValueRef parserState, JsValueRef * result) { return HOOK_JS_API(RunScriptWithParserState(script, sourceContext, sourceUrl, parseAttributes, parserState, result)); }
+
+ static JsErrorCode WINAPI JsVarSerializer(ReallocateBufferMemoryFunc reallocateBufferMemory, WriteHostObjectFunc writeHostObject, void * callbackState, JsVarSerializerHandle *serializerHandle) { return HOOK_JS_API(VarSerializer(reallocateBufferMemory, writeHostObject, callbackState, serializerHandle)); }
+ static JsErrorCode WINAPI JsVarSerializerSetTransferableVars(JsVarSerializerHandle serializerHandle, JsValueRef *transferableVars, size_t transferableVarsCount) { return HOOK_JS_API(VarSerializerSetTransferableVars(serializerHandle, transferableVars, transferableVarsCount)); }
+ static JsErrorCode WINAPI JsVarSerializerWriteValue(JsVarSerializerHandle serializerHandle, JsValueRef rootObject) { return HOOK_JS_API(VarSerializerWriteValue(serializerHandle, rootObject)); }
+ static JsErrorCode WINAPI JsVarSerializerReleaseData(JsVarSerializerHandle serializerHandle, byte** data, size_t *dataLength) { return HOOK_JS_API(VarSerializerReleaseData(serializerHandle, data, dataLength)); }
+ static JsErrorCode WINAPI JsVarSerializerFree(JsVarSerializerHandle serializerHandle) { return HOOK_JS_API(VarSerializerFree(serializerHandle)); }
+
+ static JsErrorCode WINAPI JsVarDeserializer(void *data, size_t dataLength, ReadHostObjectFunc readHostObject, GetSharedArrayBufferFromIdFunc getSharedArrayBufferFromId, void* callbackState, JsVarDeserializerHandle *deserializerHandle) { return HOOK_JS_API(VarDeserializer(data, dataLength, readHostObject, getSharedArrayBufferFromId, callbackState, deserializerHandle)); }
+ static JsErrorCode WINAPI JsVarDeserializerSetTransferableVars(JsVarDeserializerHandle deserializerHandle, JsValueRef* transferableVars, size_t transferableVarsCount) { return HOOK_JS_API(VarDeserializerSetTransferableVars(deserializerHandle, transferableVars, transferableVarsCount)); }
+ static JsErrorCode WINAPI JsVarDeserializerReadValue(JsVarDeserializerHandle deserializerHandle, JsValueRef* value) { return HOOK_JS_API(VarDeserializerReadValue(deserializerHandle, value)); }
+ static JsErrorCode WINAPI JsVarDeserializerFree(JsVarDeserializerHandle deserializerHandle) { return HOOK_JS_API(VarDeserializerFree(deserializerHandle)); }
+
+ static JsErrorCode WINAPI JsDetachArrayBuffer(JsValueRef buffer) { return HOOK_JS_API(DetachArrayBuffer(buffer)); }
+ static JsErrorCode WINAPI JsQueueBackgroundParse_Experimental(JsScriptContents* contents, DWORD* dwBgParseCookie) { return HOOK_JS_API(QueueBackgroundParse_Experimental)(contents, dwBgParseCookie); }
+ static JsErrorCode WINAPI JsDiscardBackgroundParse_Experimental(DWORD dwBgParseCookie, void* buffer, bool* callerOwnsBuffer) { return HOOK_JS_API(DiscardBackgroundParse_Experimental(dwBgParseCookie, buffer, callerOwnsBuffer)); }
+ static JsErrorCode WINAPI JsExecuteBackgroundParse_Experimental(DWORD dwBgParseCookie, JsValueRef script, JsSourceContext sourceContext, WCHAR *url, JsParseScriptAttributes parseAttributes, JsValueRef parserState, JsValueRef *result) { return HOOK_JS_API(ExecuteBackgroundParse_Experimental(dwBgParseCookie, script, sourceContext, url, parseAttributes, parserState, result)); }
+#ifdef _WIN32
+ static JsErrorCode WINAPI JsConnectJITProcess(HANDLE processHandle, void* serverSecurityDescriptor, UUID connectionId) { return HOOK_JS_API(ConnectJITProcess(processHandle, serverSecurityDescriptor, connectionId)); }
+#endif
+
+ static JsErrorCode WINAPI JsGetArrayBufferFreeFunction(JsValueRef buffer, ArrayBufferFreeFn* freeFn) { return HOOK_JS_API(GetArrayBufferFreeFunction(buffer, freeFn)); }
+ static JsErrorCode WINAPI JsExternalizeArrayBuffer(JsValueRef buffer) { return HOOK_JS_API(ExternalizeArrayBuffer(buffer)); }
};
class AutoRestoreContext
diff --git a/bin/ch/DbgController.js b/bin/ch/DbgController.js
index 9f3744bef8c..09ec812e652 100644
--- a/bin/ch/DbgController.js
+++ b/bin/ch/DbgController.js
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
@@ -62,6 +63,8 @@ var controllerObj = (function () {
// Discard all known globals to reduce baseline noise.
[
"#__proto__",
+ "globalThis",
+ "AggregateError",
"Array",
"ArrayBuffer",
"Atomics",
@@ -156,9 +159,10 @@ var controllerObj = (function () {
function filterFileName(fileName) {
try {
var index = fileName.lastIndexOf("\\");
- if (index >= 0) {
- return fileName.substring(index + 1);
+ if (index === -1) {
+ index = fileName.lastIndexOf("/");
}
+ return fileName.substring(index + 1);
} catch (ex) { }
return "";
}
diff --git a/bin/ch/Debugger.cpp b/bin/ch/Debugger.cpp
index 95655b57f09..5e938c7a203 100644
--- a/bin/ch/Debugger.cpp
+++ b/bin/ch/Debugger.cpp
@@ -1,8 +1,11 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#include "stdafx.h"
+#include "Helpers.h"
+#include "PlatformAgnostic/ChakraICU.h"
#define MAX_BASELINE_SIZE (1024*1024*200)
@@ -303,63 +306,34 @@ bool Debugger::InstallDebugCallbacks(JsValueRef hostDebugObject)
bool Debugger::SetBaseline()
{
-#ifdef _WIN32
- LPSTR script = nullptr;
- FILE *file = nullptr;
- size_t numChars = 0;
- HRESULT hr = S_OK;
-
- if (_wfopen_s(&file, HostConfigFlags::flags.dbgbaseline, _u("rb")) != 0)
- {
- Helpers::LogError(_u("opening baseline file '%s'"), HostConfigFlags::flags.dbgbaseline);
- }
+ const char* script = nullptr;
+ char* fileName = nullptr;
+ JsValueRef scriptRef = JS_INVALID_REFERENCE;
+ HRESULT hr = E_FAIL;
+ UINT lengthBytes = 0;
- if(file != nullptr)
+ if (SUCCEEDED(WideStringToNarrowDynamic(HostConfigFlags::flags.dbgbaseline, &fileName)))
{
- long fileSize = _filelength(_fileno(file));
- if (0 <= fileSize && fileSize <= MAX_BASELINE_SIZE)
- {
- script = new char[(size_t)fileSize + 1];
-
- numChars = fread(script, sizeof(script[0]), fileSize, file);
- if (numChars == (size_t)fileSize)
- {
- script[numChars] = '\0';
-
- JsValueRef wideScriptRef;
- IfJsErrorFailLogAndHR(ChakraRTInterface::JsCreateString(
- script, strlen(script), &wideScriptRef));
-
- this->CallFunctionNoResult("SetBaseline", wideScriptRef);
- }
- else
- {
- Helpers::LogError(_u("failed to read from baseline file"));
- IfFailGo(E_FAIL);
- }
- }
- else
+ Helpers::LoadScriptFromFile(fileName, script, &lengthBytes);
+ if (script && lengthBytes < MAX_BASELINE_SIZE &&
+ ChakraRTInterface::JsCreateString(script, strlen(script), &scriptRef) == JsNoError)
{
- Helpers::LogError(_u("baseline file too large"));
- IfFailGo(E_FAIL);
+ this->CallFunctionNoResult("SetBaseline", scriptRef);
+ hr = S_OK;
}
}
-Error:
+
if (script)
{
delete[] script;
}
- if (file)
+ if (hr != S_OK)
{
- fclose(file);
+ Helpers::LogError(_u("Failed to load & process debug baseline: %s"), HostConfigFlags::flags.dbgbaseline);
}
return hr == S_OK;
-#else
- // xplat-todo: Implement this on Linux
- return false;
-#endif
}
bool Debugger::SetInspectMaxStringLength()
@@ -427,7 +401,14 @@ bool Debugger::DumpFunctionPosition(JsValueRef functionPosition)
bool Debugger::StartDebugging(JsRuntimeHandle runtime)
{
- IfJsrtErrorFailLogAndRetFalse(ChakraRTInterface::JsDiagStartDebugging(runtime, Debugger::DebugEventHandler, this));
+ JsErrorCode errorCode = ChakraRTInterface::JsDiagStartDebugging(runtime, Debugger::DebugEventHandler, this);
+
+ if (errorCode == JsErrorCode::JsErrorDiagAlreadyInDebugMode)
+ {
+ return false;
+ }
+
+ IfJsrtErrorFailLogAndRetFalse(errorCode);
this->m_isDetached = false;
@@ -437,7 +418,15 @@ bool Debugger::StartDebugging(JsRuntimeHandle runtime)
bool Debugger::StopDebugging(JsRuntimeHandle runtime)
{
void* callbackState = nullptr;
- IfJsrtErrorFailLogAndRetFalse(ChakraRTInterface::JsDiagStopDebugging(runtime, &callbackState));
+
+ JsErrorCode errorCode = ChakraRTInterface::JsDiagStopDebugging(runtime, &callbackState);
+
+ if (errorCode == JsErrorCode::JsErrorDiagNotInDebugMode)
+ {
+ return false;
+ }
+
+ IfJsrtErrorFailLogAndRetFalse(errorCode);
Assert(callbackState == this);
diff --git a/bin/ch/Helpers.cpp b/bin/ch/Helpers.cpp
index ec667f49350..f68d5790698 100644
--- a/bin/ch/Helpers.cpp
+++ b/bin/ch/Helpers.cpp
@@ -155,7 +155,7 @@ uint ConcatPath(LPCSTR filenameLeft, uint posPathSep, LPCSTR filenameRight, char
return totalLength;
}
-HRESULT Helpers::LoadScriptFromFile(LPCSTR filenameToLoad, LPCSTR& contents, UINT* lengthBytesOut /*= nullptr*/)
+HRESULT Helpers::LoadScriptFromFile(LPCSTR filenameToLoad, LPCSTR& contents, UINT* lengthBytesOut /*= nullptr*/, std::string* fullPath /*= nullptr*/, bool shouldMute /*=false */)
{
static char sHostApplicationPathBuffer[MAX_URI_LENGTH];
static uint sHostApplicationPathBufferLength = (uint) -1;
@@ -169,7 +169,7 @@ HRESULT Helpers::LoadScriptFromFile(LPCSTR filenameToLoad, LPCSTR& contents, UIN
FILE * file = NULL;
size_t bufferLength = 0;
- LPCSTR filename = filenameToLoad;
+ LPCSTR filename = fullPath == nullptr ? filenameToLoad : LPCSTR(fullPath->c_str());
if (sHostApplicationPathBufferLength == (uint)-1)
{
// consider incoming filename as the host app and base its' path for others
@@ -188,7 +188,7 @@ HRESULT Helpers::LoadScriptFromFile(LPCSTR filenameToLoad, LPCSTR& contents, UIN
}
sHostApplicationPathBuffer[sHostApplicationPathBufferLength] = char(0);
}
- else if (filename[0] != '/' && filename[0] != '\\') // make sure it's not a full path
+ else if (filename[0] != '/' && filename[0] != '\\' && fullPath == nullptr) // make sure it's not a full path
{
// concat host path and filename
uint len = ConcatPath(sHostApplicationPathBuffer, sHostApplicationPathBufferLength,
@@ -216,7 +216,7 @@ HRESULT Helpers::LoadScriptFromFile(LPCSTR filenameToLoad, LPCSTR& contents, UIN
// etc.
if (fopen_s(&file, filename, "rb") != 0)
{
- if (!HostConfigFlags::flags.MuteHostErrorMsgIsEnabled)
+ if (!HostConfigFlags::flags.MuteHostErrorMsgIsEnabled && !shouldMute)
{
#ifdef _WIN32
DWORD lastError = GetLastError();
@@ -388,8 +388,6 @@ LPCWSTR Helpers::JsErrorCodeToString(JsErrorCode jsErrorCode)
case JsErrorAlreadyDebuggingContext: return _u("JsErrorAlreadyDebuggingContext");
case JsErrorAlreadyProfilingContext: return _u("JsErrorAlreadyProfilingContext");
case JsErrorIdleNotEnabled: return _u("JsErrorIdleNotEnabled");
- case JsCannotSetProjectionEnqueueCallback: return _u("JsCannotSetProjectionEnqueueCallback");
- case JsErrorCannotStartProjection: return _u("JsErrorCannotStartProjection");
case JsErrorInObjectBeforeCollectCallback: return _u("JsErrorInObjectBeforeCollectCallback");
case JsErrorObjectNotInspectable: return _u("JsErrorObjectNotInspectable");
case JsErrorPropertyNotSymbol: return _u("JsErrorPropertyNotSymbol");
diff --git a/bin/ch/Helpers.h b/bin/ch/Helpers.h
index 83c8bdff37a..fe01d35ed5f 100644
--- a/bin/ch/Helpers.h
+++ b/bin/ch/Helpers.h
@@ -7,7 +7,7 @@
class Helpers
{
public :
- static HRESULT LoadScriptFromFile(LPCSTR filename, LPCSTR& contents, UINT* lengthBytesOut = nullptr);
+ static HRESULT LoadScriptFromFile(LPCSTR filename, LPCSTR& contents, UINT* lengthBytesOut = nullptr, std::string* fullPath = nullptr, bool shouldMute = false);
static LPCWSTR JsErrorCodeToString(JsErrorCode jsErrorCode);
static void LogError(__in __nullterminated const char16 *msg, ...);
static HRESULT LoadBinaryFile(LPCSTR filename, LPCSTR& contents, UINT& lengthBytes, bool printFileOpenError = true);
diff --git a/bin/ch/HostConfigFlagsList.h b/bin/ch/HostConfigFlagsList.h
index ea4271eb8f6..2e07b6704aa 100644
--- a/bin/ch/HostConfigFlagsList.h
+++ b/bin/ch/HostConfigFlagsList.h
@@ -17,8 +17,9 @@ FLAG(bool, IgnoreScriptErrorCode, "Don't return error code on script e
FLAG(bool, MuteHostErrorMsg, "Mute host error output, e.g. module load failures", false)
FLAG(bool, TraceHostCallback, "Output traces for host callbacks", false)
FLAG(bool, Test262, "load Test262 harness", false)
+FLAG(bool, Module, "load the script as a module", false)
FLAG(bool, TrackRejectedPromises, "Enable tracking of unhandled promise rejections", false)
FLAG(BSTR, CustomConfigFile, "Custom config file to be used to pass in additional flags to Chakra", NULL)
-FLAG(bool, ExecuteWithBgParse, "[No-op] Load script with bgparse (note: requires bgparse to be on as well)", false)
+FLAG(bool, ExecuteWithBgParse, "Load script with bgparse (note: requires bgparse and parserstatecache be on as well)", false)
#undef FLAG
#endif
diff --git a/bin/ch/JITProcessManager.cpp b/bin/ch/JITProcessManager.cpp
index 3cf50ddd33c..ec49c9aa61d 100644
--- a/bin/ch/JITProcessManager.cpp
+++ b/bin/ch/JITProcessManager.cpp
@@ -68,7 +68,7 @@ HRESULT JITProcessManager::CreateServerProcess(int argc, __in_ecount(argc) LPWST
#pragma warning(suppress: 6386) // buffer overrun
#ifdef ENABLE_DEBUG_CONFIG_OPTIONS
- hr = StringCchCopyW(cmdLine, cmdLineSize, _u("ch.exe -OOPCFGRegistration- -CheckOpHelpers -jitserver:"));
+ hr = StringCchCopyW(cmdLine, cmdLineSize, _u("ch.exe -CheckOpHelpers -jitserver:"));
#else
hr = StringCchCopyW(cmdLine, cmdLineSize, _u("ch.exe -jitserver:"));
#endif
diff --git a/bin/ch/MessageQueue.h b/bin/ch/MessageQueue.h
index bc350806ec8..ea2c91943f7 100644
--- a/bin/ch/MessageQueue.h
+++ b/bin/ch/MessageQueue.h
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#pragma once
@@ -28,15 +29,15 @@ class MessageBase
template
class SortedList
{
- template
+ template
struct DListNode
{
- T data;
- DListNode* prev;
- DListNode* next;
+ U data;
+ DListNode* prev;
+ DListNode* next;
public:
- DListNode(const T& data) :
+ DListNode(const U& data) :
data(data),
prev(nullptr),
next(nullptr)
diff --git a/bin/ch/WScriptJsrt.cpp b/bin/ch/WScriptJsrt.cpp
index 4b4fbb979d1..eb4db4cd433 100644
--- a/bin/ch/WScriptJsrt.cpp
+++ b/bin/ch/WScriptJsrt.cpp
@@ -1,9 +1,20 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft Corporation and contributors. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#include "stdafx.h"
+#include "PlatformAgnostic/ChakraICU.h"
+#if defined(__APPLE__)
+#ifdef ctime
+#undef ctime
+#define CTIME_UNDEFED
+#endif
+#endif
#include
+#include
+#include
+#include
#if defined(_X86_) || defined(_M_IX86)
#define CPU_ARCH_TEXT "x86"
@@ -44,9 +55,22 @@
#define INTL_LIBRARY_TEXT ""
#endif
+struct ArrayBufferTransferInfo {
+ byte* buffer;
+ uint length;
+ ArrayBufferFreeFn freeFn;
+};
+struct SerializerBlob
+{
+ void *data;
+ size_t dataLength;
+ std::vector transferableArrays;
+};
+
MessageQueue* WScriptJsrt::messageQueue = nullptr;
std::map WScriptJsrt::moduleRecordMap;
std::map WScriptJsrt::moduleDirMap;
+std::map WScriptJsrt::moduleErrMap;
std::map WScriptJsrt::scriptDirMap;
DWORD_PTR WScriptJsrt::sourceContext = 0;
@@ -223,7 +247,6 @@ JsValueRef WScriptJsrt::LoadScriptFileHelper(JsValueRef callee, JsValueRef *argu
hr = Helpers::LoadScriptFromFile(*fileName, fileContent);
if (FAILED(hr))
{
- // check if have it registered
fprintf(stderr, "Couldn't load file '%s'\n", fileName.GetString());
IfJsrtErrorSetGo(ChakraRTInterface::JsGetUndefinedValue(&returnValue));
return returnValue;
@@ -234,19 +257,237 @@ JsValueRef WScriptJsrt::LoadScriptFileHelper(JsValueRef callee, JsValueRef *argu
}
Error:
+
+ SetExceptionIf(errorCode, errorMessage);
+ return returnValue;
+}
+
+void WScriptJsrt::SetExceptionIf(JsErrorCode errorCode, LPCWSTR errorMessage)
+{
if (errorCode != JsNoError)
{
- JsValueRef errorObject;
- JsValueRef errorMessageString;
+ // If the exception is already is set - no need to create a new exception.
+ bool hasException = false;
+ if (!(ChakraRTInterface::JsHasException(&hasException) == JsNoError && hasException))
+ {
+ JsValueRef errorObject;
+ JsValueRef errorMessageString;
+
+ if (wcscmp(errorMessage, _u("")) == 0)
+ {
+ errorMessage = ConvertErrorCodeToMessage(errorCode);
+ }
+
+ ERROR_MESSAGE_TO_STRING(errCode, errorMessage, errorMessageString);
- if (wcscmp(errorMessage, _u("")) == 0) {
- errorMessage = ConvertErrorCodeToMessage(errorCode);
+ ChakraRTInterface::JsCreateError(errorMessageString, &errorObject);
+ ChakraRTInterface::JsSetException(errorObject);
}
+ }
+}
+
+byte * CHAKRA_CALLBACK ReallocateBufferMemory(void * state, byte *oldBuffer, size_t newSize, size_t *allocatedSize)
+{
+ void* data = realloc((void*)oldBuffer, newSize);
+ if (allocatedSize)
+ {
+ *allocatedSize = newSize;
+ }
+ return (byte*)data;
+}
- ERROR_MESSAGE_TO_STRING(errCode, errorMessage, errorMessageString);
+bool CHAKRA_CALLBACK WriteHostObject(void * state, JsValueRef data)
+{
+ // Not implemented
+ return true;
+}
- ChakraRTInterface::JsCreateError(errorMessageString, &errorObject);
- ChakraRTInterface::JsSetException(errorObject);
+JsValueRef __stdcall WScriptJsrt::SerializeObject(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState)
+{
+ JsErrorCode errorCode = JsNoError;
+ LPCWSTR errorMessage = _u("");
+ JsValueRef returnValue = JS_INVALID_REFERENCE;
+ HRESULT hr = S_OK;
+ JsValueRef *transferVarsArray = nullptr;
+ int transferVarsCount = 0;
+ if (argumentCount < 2)
+ {
+ errorCode = JsErrorInvalidArgument;
+ errorMessage = _u("Need an argument for WScript.Serialize");
+ }
+ else
+ {
+ JsValueRef rootObject = arguments[1];
+ JsValueRef transferArray = nullptr;
+ if (argumentCount > 2)
+ {
+ JsValueType argumentType = JsUndefined;
+ transferArray = arguments[2];
+ IfJsrtErrorSetGo(ChakraRTInterface::JsGetValueType(transferArray, &argumentType));
+
+ if (argumentType != JsUndefined)
+ {
+ if (argumentType != JsArray)
+ {
+ errorCode = JsErrorInvalidArgument;
+ goto Error;
+ }
+
+ JsPropertyIdRef lengthPropId;
+ JsValueRef arrayLengthObj = JS_INVALID_REFERENCE;
+ int arrayLength = 0;
+ IfJsrtErrorSetGo(CreatePropertyIdFromString("length", &lengthPropId));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsGetProperty(transferArray, lengthPropId, &arrayLengthObj));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsNumberToInt(arrayLengthObj, &arrayLength));
+ if (arrayLength > 0)
+ {
+ transferVarsArray = new JsValueRef[arrayLength];
+ if (transferVarsArray == nullptr)
+ {
+ errorCode = JsErrorOutOfMemory;
+ goto Error;
+ }
+
+ for (int i = 0; i < arrayLength; i++)
+ {
+ JsValueRef index;
+ JsValueRef value = JS_INVALID_REFERENCE;
+ JsValueType jsType = JsUndefined;
+
+ IfJsrtErrorSetGo(ChakraRTInterface::JsIntToNumber(i, &index));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsGetIndexedProperty(transferArray, index, &value));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsGetValueType(value, &jsType));
+ if (jsType == JsArrayBuffer)
+ {
+ *(transferVarsArray + transferVarsCount) = value;
+ transferVarsCount++;
+ }
+ }
+ }
+ }
+ }
+
+ JsVarSerializerHandle serializerHandle = nullptr;
+
+ // This memory will be claimed at WScriptJsrt::Deserialize.
+ SerializerBlob *blob = new SerializerBlob();
+ IfJsrtErrorSetGo(ChakraRTInterface::JsVarSerializer(ReallocateBufferMemory, WriteHostObject, nullptr, &serializerHandle));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsVarSerializerSetTransferableVars(serializerHandle, transferVarsArray, transferVarsCount));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsVarSerializerWriteValue(serializerHandle, rootObject));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsVarSerializerReleaseData(serializerHandle, (byte**)&blob->data, &blob->dataLength));
+
+ for (int i = 0; i < transferVarsCount; i++)
+ {
+ JsValueRef arrayBuffer = transferVarsArray[i];
+ ArrayBufferTransferInfo bufferInfo;
+ IfJsrtErrorSetGo(ChakraRTInterface::JsGetArrayBufferStorage(arrayBuffer, &bufferInfo.buffer, &bufferInfo.length));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsExternalizeArrayBuffer(arrayBuffer));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsGetArrayBufferFreeFunction(arrayBuffer, &bufferInfo.freeFn));
+ blob->transferableArrays.push_back(bufferInfo);
+ IfJsrtErrorSetGo(ChakraRTInterface::JsDetachArrayBuffer(arrayBuffer));
+ }
+
+ errorCode = ChakraRTInterface::JsCreateExternalArrayBuffer((void*)blob, sizeof(SerializerBlob), nullptr, nullptr, &returnValue);
+ IfJsrtErrorSetGo(ChakraRTInterface::JsVarSerializerFree(serializerHandle));
+ }
+Error:
+ SetExceptionIf(errorCode, errorMessage);
+
+ if (transferVarsArray)
+ {
+ delete[] transferVarsArray;
+ }
+
+ return returnValue;
+}
+
+JsValueRef CHAKRA_CALLBACK ReadHostObject(void * state)
+{
+ Assert(false); // TBD
+ return nullptr;
+}
+
+JsValueRef CHAKRA_CALLBACK GetSharedArrayBufferFromId(void * state, uint32_t id)
+{
+ Assert(false); // TBD
+ return nullptr;
+}
+JsValueRef CHAKRA_CALLBACK GetWasmModuleFromId(void * state, uint32_t transfer_id)
+{
+ Assert(false); // TBD
+ return nullptr;
+}
+
+struct BufferFreeFunctionState {
+ ArrayBufferFreeFn freeFn;
+ void* buffer;
+};
+
+void CHAKRA_CALLBACK BufferFreeFunction(void * state)
+{
+ BufferFreeFunctionState* bufferState = (BufferFreeFunctionState*)state;
+ if (!bufferState)
+ {
+ return;
+ }
+ if (bufferState->freeFn)
+ {
+ bufferState->freeFn(bufferState->buffer);
+ }
+ delete bufferState;
+}
+
+JsValueRef __stdcall WScriptJsrt::Deserialize(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState)
+{
+ JsErrorCode errorCode = JsNoError;
+ LPCWSTR errorMessage = _u("");
+ JsValueRef returnValue = JS_INVALID_REFERENCE;
+ JsValueRef * transferables = nullptr;
+ HRESULT hr = S_OK;
+ if (argumentCount < 2)
+ {
+ errorCode = JsErrorInvalidArgument;
+ errorMessage = _u("Need an argument for WScript.Deserialize");
+ }
+ else
+ {
+ JsValueRef dataObject = arguments[1];
+ uint32 dataLength = 0;
+ BYTE *data = nullptr;
+ IfJsrtErrorSetGo(ChakraRTInterface::JsGetArrayBufferStorage(dataObject, &data, &dataLength));
+ SerializerBlob *blob = (SerializerBlob*)data;
+ JsVarDeserializerHandle deserializerHandle = nullptr;
+ IfJsrtErrorSetGo(ChakraRTInterface::JsVarDeserializer(blob->data, blob->dataLength, ReadHostObject, GetSharedArrayBufferFromId, nullptr, &deserializerHandle));
+
+ size_t arraySize = blob->transferableArrays.size();
+ if (arraySize > 0)
+ {
+ transferables = new JsValueRef[arraySize];
+
+ for (size_t i = 0; i < arraySize; ++i)
+ {
+ JsValueRef result = nullptr;
+ BufferFreeFunctionState* bufferFreeState = new BufferFreeFunctionState();
+ bufferFreeState->buffer = blob->transferableArrays[i].buffer;
+ bufferFreeState->freeFn = blob->transferableArrays[i].freeFn;
+ IfJsrtErrorSetGo(ChakraRTInterface::JsCreateExternalArrayBuffer(blob->transferableArrays[i].buffer, blob->transferableArrays[i].length, BufferFreeFunction, bufferFreeState, &result));
+ transferables[i] = result;
+ }
+
+ IfJsrtErrorSetGo(ChakraRTInterface::JsVarDeserializerSetTransferableVars(deserializerHandle, transferables, arraySize));
+ }
+
+ IfJsrtErrorSetGo(ChakraRTInterface::JsVarDeserializerReadValue(deserializerHandle, &returnValue));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsVarDeserializerFree(deserializerHandle));
+ delete blob;
+
+ }
+
+Error:
+ SetExceptionIf(errorCode, errorMessage);
+ if (transferables)
+ {
+ delete[] transferables;
}
return returnValue;
@@ -295,21 +536,7 @@ JsValueRef __stdcall WScriptJsrt::GetModuleNamespace(JsValueRef callee, bool isC
}
}
- if (errorCode != JsNoError)
- {
- JsValueRef errorObject;
- JsValueRef errorMessageString;
-
- if (wcscmp(errorMessage, _u("")) == 0)
- {
- errorMessage = ConvertErrorCodeToMessage(errorCode);
- }
-
- ERROR_MESSAGE_TO_STRING(errCode, errorMessage, errorMessageString);
-
- ChakraRTInterface::JsCreateError(errorMessageString, &errorObject);
- ChakraRTInterface::JsSetException(errorObject);
- }
+ SetExceptionIf(errorCode, errorMessage);
return returnValue;
}
@@ -379,48 +606,10 @@ JsValueRef WScriptJsrt::LoadScriptHelper(JsValueRef callee, bool isConstructCall
}
Error:
- if (errorCode != JsNoError)
- {
- JsValueRef errorObject;
- JsValueRef errorMessageString;
-
- if (wcscmp(errorMessage, _u("")) == 0) {
- errorMessage = ConvertErrorCodeToMessage(errorCode);
- }
-
- ERROR_MESSAGE_TO_STRING(errCode, errorMessage, errorMessageString);
-
- ChakraRTInterface::JsCreateError(errorMessageString, &errorObject);
- ChakraRTInterface::JsSetException(errorObject);
- }
-
+ SetExceptionIf(errorCode, errorMessage);
return returnValue;
}
-JsErrorCode WScriptJsrt::InitializeModuleInfo(JsValueRef specifier, JsModuleRecord moduleRecord)
-{
- JsErrorCode errorCode = JsNoError;
- errorCode = ChakraRTInterface::JsSetModuleHostInfo(moduleRecord, JsModuleHostInfo_FetchImportedModuleCallback, (void*)WScriptJsrt::FetchImportedModule);
-
- if (errorCode == JsNoError)
- {
- errorCode = ChakraRTInterface::JsSetModuleHostInfo(moduleRecord, JsModuleHostInfo_FetchImportedModuleFromScriptCallback, (void*)WScriptJsrt::FetchImportedModuleFromScript);
-
- if (errorCode == JsNoError)
- {
- errorCode = ChakraRTInterface::JsSetModuleHostInfo(moduleRecord, JsModuleHostInfo_NotifyModuleReadyCallback, (void*)WScriptJsrt::NotifyModuleReadyCallback);
-
- if (errorCode == JsNoError)
- {
- errorCode = ChakraRTInterface::JsSetModuleHostInfo(moduleRecord, JsModuleHostInfo_HostDefined, specifier);
- }
- }
- }
-
- IfJsrtErrorFailLogAndRetErrorCode(errorCode);
- return JsNoError;
-}
-
void WScriptJsrt::GetDir(LPCSTR fullPathNarrow, std::string *fullDirNarrow)
{
char fileDrive[_MAX_DRIVE];
@@ -436,6 +625,11 @@ void WScriptJsrt::GetDir(LPCSTR fullPathNarrow, std::string *fullDirNarrow)
*fullDirNarrow = result;
}
+JsErrorCode WScriptJsrt::ModuleEntryPoint(LPCSTR fileName, LPCSTR fileContent, LPCSTR fullName)
+{
+ return LoadModuleFromString(fileName, fileContent, fullName, true);
+}
+
JsErrorCode WScriptJsrt::LoadModuleFromString(LPCSTR fileName, LPCSTR fileContent, LPCSTR fullName, bool isFile)
{
DWORD_PTR dwSourceCookie = WScriptJsrt::GetNextSourceContext();
@@ -448,17 +642,16 @@ JsErrorCode WScriptJsrt::LoadModuleFromString(LPCSTR fileName, LPCSTR fileConten
// otherwise we'll use the old one.
if (moduleRecordEntry == moduleRecordMap.end())
{
- JsValueRef specifier;
- errorCode = ChakraRTInterface::JsCreateString(
- fileName, strlen(fileName), &specifier);
- if (errorCode == JsNoError)
+ JsValueRef specifier = nullptr;
+ if (isFile && fullName)
{
- errorCode = ChakraRTInterface::JsInitializeModuleRecord(
- nullptr, specifier, &requestModule);
+ errorCode = ChakraRTInterface::JsCreateString(
+ fullName, strlen(fullName), &specifier);
}
if (errorCode == JsNoError)
{
- errorCode = InitializeModuleInfo(specifier, requestModule);
+ errorCode = ChakraRTInterface::JsInitializeModuleRecord(
+ nullptr, specifier, &requestModule);
}
if (errorCode == JsNoError)
{
@@ -468,6 +661,7 @@ JsErrorCode WScriptJsrt::LoadModuleFromString(LPCSTR fileName, LPCSTR fileConten
}
moduleRecordMap[std::string(moduleRecordKey)] = requestModule;
+ moduleErrMap[requestModule] = RootModule;
}
}
else
@@ -480,19 +674,12 @@ JsErrorCode WScriptJsrt::LoadModuleFromString(LPCSTR fileName, LPCSTR fileConten
// ParseModuleSource is sync, while additional fetch & evaluation are async.
unsigned int fileContentLength = (fileContent == nullptr) ? 0 : (unsigned int)strlen(fileContent);
- if (isFile && fullName)
- {
- JsValueRef moduleUrl;
- ChakraRTInterface::JsCreateString(fullName, strlen(fullName), &moduleUrl);
- errorCode = ChakraRTInterface::JsSetModuleHostInfo(requestModule, JsModuleHostInfo_Url, moduleUrl);
- IfJsrtErrorFail(errorCode, errorCode);
- }
-
errorCode = ChakraRTInterface::JsParseModuleSource(requestModule, dwSourceCookie, (LPBYTE)fileContent,
fileContentLength, JsParseModuleSourceFlags_DataIsUTF8, &errorObject);
- if ((errorCode != JsNoError) && errorObject != JS_INVALID_REFERENCE && fileContent != nullptr && !HostConfigFlags::flags.IgnoreScriptErrorCode)
+ if ((errorCode != JsNoError) && errorObject != JS_INVALID_REFERENCE && fileContent != nullptr && !HostConfigFlags::flags.IgnoreScriptErrorCode && moduleErrMap[requestModule] == RootModule)
{
ChakraRTInterface::JsSetException(errorObject);
+ moduleErrMap[requestModule] = ErroredModule;
return errorCode;
}
return JsNoError;
@@ -506,7 +693,6 @@ JsValueRef WScriptJsrt::LoadScript(JsValueRef callee, LPCSTR fileName,
JsErrorCode errorCode = JsNoError;
LPCWSTR errorMessage = _u("Internal error.");
JsValueRef returnValue = JS_INVALID_REFERENCE;
- JsErrorCode innerErrorCode = JsNoError;
JsContextRef currentContext = JS_INVALID_REFERENCE;
JsRuntimeHandle runtime = JS_INVALID_RUNTIME_HANDLE;
void *callbackArg = (finalizeCallback != nullptr ? (void*)fileContent : nullptr);
@@ -647,35 +833,7 @@ JsValueRef WScriptJsrt::LoadScript(JsValueRef callee, LPCSTR fileName,
JsValueRef value = returnValue;
if (errorCode != JsNoError)
{
- if (innerErrorCode != JsNoError)
- {
- // Failed to retrieve the inner error message, so set a custom error string
- errorMessage = ConvertErrorCodeToMessage(errorCode);
- }
-
- JsValueRef error = JS_INVALID_REFERENCE;
- JsValueRef messageProperty = JS_INVALID_REFERENCE;
-
- ERROR_MESSAGE_TO_STRING(errCode, errorMessage, messageProperty);
-
- if (errCode == JsNoError)
- {
- errCode = ChakraRTInterface::JsCreateError(messageProperty, &error);
- if (errCode == JsNoError)
- {
- bool hasException = false;
- errorCode = ChakraRTInterface::JsHasException(&hasException);
- if (errorCode == JsNoError && !hasException)
- {
- errCode = ChakraRTInterface::JsSetException(error);
- }
- else if (errCode == JsNoError)
- {
- errCode = JsErrorInExceptionState;
- }
- }
- }
-
+ SetExceptionIf(errorCode, errorMessage);
ChakraRTInterface::JsDoubleToNumber(errorCode, &value);
}
@@ -684,9 +842,31 @@ JsValueRef WScriptJsrt::LoadScript(JsValueRef callee, LPCSTR fileName,
return value;
}
+JsValueRef WScriptJsrt::MonotonicNowCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState)
+{
+ LPCWSTR errorMessage = _u("invalid call to WScript.monotonicNow");
+ JsErrorCode errorCode = JsNoError;
+ HRESULT hr = S_OK;
+ JsValueRef result;
+
+ IfJsrtErrorSetGo(ChakraRTInterface::JsDoubleToNumber(static_cast(std::chrono::steady_clock::now().time_since_epoch().count()) / 1e6 /* ns in ms */, &result));
+
+#ifdef CTIME_UNDEFED
+#define ctime PAL_ctime
+#undef CTIME_UNDEFED
+#endif
+ return result;
+
+Error:
+ SetExceptionIf(errorCode, errorMessage);
+ return JS_INVALID_REFERENCE;
+}
+
JsValueRef WScriptJsrt::SetTimeoutCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState)
{
LPCWSTR errorMessage = _u("invalid call to WScript.SetTimeout");
+ JsErrorCode errorCode = JsNoError;
+ HRESULT hr = S_OK;
JsValueRef function;
JsValueRef timerId;
@@ -696,79 +876,53 @@ JsValueRef WScriptJsrt::SetTimeoutCallback(JsValueRef callee, bool isConstructCa
if (argumentCount != 3)
{
+ errorCode = JsErrorInvalidArgument;
goto Error;
}
function = arguments[1];
- IfJsrtError(ChakraRTInterface::JsNumberToDouble(arguments[2], &tmp));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsNumberToDouble(arguments[2], &tmp));
time = static_cast(tmp);
msg = new CallbackMessage(time, function);
messageQueue->InsertSorted(msg);
- IfJsrtError(ChakraRTInterface::JsDoubleToNumber(static_cast(msg->GetId()), &timerId));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsDoubleToNumber(static_cast(msg->GetId()), &timerId));
return timerId;
Error:
- JsValueRef errorObject;
- JsValueRef errorMessageString;
-
- ERROR_MESSAGE_TO_STRING(errorCode, errorMessage, errorMessageString);
-
- if (errorCode != JsNoError)
- {
- errorCode = ChakraRTInterface::JsCreateError(errorMessageString, &errorObject);
-
- if (errorCode != JsNoError)
- {
- ChakraRTInterface::JsSetException(errorObject);
- }
- }
-
+ SetExceptionIf(errorCode, errorMessage);
return JS_INVALID_REFERENCE;
}
JsValueRef WScriptJsrt::ClearTimeoutCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState)
{
LPCWSTR errorMessage = _u("invalid call to WScript.ClearTimeout");
+ JsErrorCode errorCode = JsNoError;
+ HRESULT hr = S_OK;
if (argumentCount != 2)
{
+ errorCode = JsErrorInvalidArgument;
goto Error;
}
unsigned int timerId;
double tmp;
JsValueRef undef;
- JsValueRef global;
- IfJsrtError(ChakraRTInterface::JsNumberToDouble(arguments[1], &tmp));
-
- timerId = static_cast(tmp);
- messageQueue->RemoveById(timerId);
-
- IfJsrtError(ChakraRTInterface::JsGetGlobalObject(&global));
- IfJsrtError(ChakraRTInterface::JsGetUndefinedValue(&undef));
+ if (ChakraRTInterface::JsNumberToDouble(arguments[1], &tmp) == JsNoError)
+ {
+ timerId = static_cast(tmp);
+ messageQueue->RemoveById(timerId);
+ }
+ IfJsrtErrorSetGo(ChakraRTInterface::JsGetUndefinedValue(&undef));
return undef;
Error:
- JsValueRef errorObject;
- JsValueRef errorMessageString;
-
- ERROR_MESSAGE_TO_STRING(errorCode, errorMessage, errorMessageString);
-
- if (errorCode != JsNoError)
- {
- errorCode = ChakraRTInterface::JsCreateError(errorMessageString, &errorObject);
-
- if (errorCode != JsNoError)
- {
- ChakraRTInterface::JsSetException(errorObject);
- }
- }
-
+ SetExceptionIf(errorCode, errorMessage);
return JS_INVALID_REFERENCE;
}
@@ -781,14 +935,18 @@ void QueueDebugOperation(JsValueRef function, const DebugOperationFunc& operatio
JsValueRef WScriptJsrt::AttachCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState)
{
LPCWSTR errorMessage = _u("WScript.Attach requires a function, like WScript.Attach(foo);");
+ JsErrorCode errorCode = JsNoError;
+ HRESULT hr = S_OK;
JsValueType argumentType = JsUndefined;
if (argumentCount != 2)
{
+ errorCode = JsErrorInvalidArgument;
goto Error;
}
- IfJsrtError(ChakraRTInterface::JsGetValueType(arguments[1], &argumentType));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsGetValueType(arguments[1], &argumentType));
if (argumentType != JsFunction)
{
+ errorCode = JsErrorInvalidArgument;
goto Error;
}
QueueDebugOperation(arguments[1], [](WScriptJsrt::CallbackMessage& msg)
@@ -805,33 +963,25 @@ JsValueRef WScriptJsrt::AttachCallback(JsValueRef callee, bool isConstructCall,
return msg.CallFunction("");
});
Error:
- JsValueRef errorObject;
- JsValueRef errorMessageString;
-
- ERROR_MESSAGE_TO_STRING(errorCode, errorMessage, errorMessageString);
-
- if (errorCode != JsNoError)
- {
- errorCode = ChakraRTInterface::JsCreateError(errorMessageString, &errorObject);
- if (errorCode != JsNoError)
- {
- ChakraRTInterface::JsSetException(errorObject);
- }
- }
+ SetExceptionIf(errorCode, errorMessage);
return JS_INVALID_REFERENCE;
}
JsValueRef WScriptJsrt::DetachCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState)
{
LPCWSTR errorMessage = _u("WScript.Detach requires a function, like WScript.Detach(foo);");
+ JsErrorCode errorCode = JsNoError;
+ HRESULT hr = S_OK;
JsValueType argumentType = JsUndefined;
if (argumentCount != 2)
{
+ errorCode = JsErrorInvalidArgument;
goto Error;
}
- IfJsrtError(ChakraRTInterface::JsGetValueType(arguments[1], &argumentType));
+ IfJsrtErrorSetGo(ChakraRTInterface::JsGetValueType(arguments[1], &argumentType));
if (argumentType != JsFunction)
{
+ errorCode = JsErrorInvalidArgument;
goto Error;
}
QueueDebugOperation(arguments[1], [](WScriptJsrt::CallbackMessage& msg)
@@ -848,19 +998,7 @@ JsValueRef WScriptJsrt::DetachCallback(JsValueRef callee, bool isConstructCall,
return msg.CallFunction("");
});
Error:
- JsValueRef errorObject;
- JsValueRef errorMessageString;
-
- ERROR_MESSAGE_TO_STRING(errorCode, errorMessage, errorMessageString);
-
- if (errorCode != JsNoError)
- {
- errorCode = ChakraRTInterface::JsCreateError(errorMessageString, &errorObject);
- if (errorCode != JsNoError)
- {
- ChakraRTInterface::JsSetException(errorObject);
- }
- }
+ SetExceptionIf(errorCode, errorMessage);
return JS_INVALID_REFERENCE;
}
@@ -900,12 +1038,6 @@ JsValueRef WScriptJsrt::RequestAsyncBreakCallback(JsValueRef callee, bool isCons
return JS_INVALID_REFERENCE;
}
-JsValueRef WScriptJsrt::EmptyCallback(JsValueRef callee, bool isConstructCall,
- JsValueRef * arguments, unsigned short argumentCount, void * callbackState)
-{
- return JS_INVALID_REFERENCE;
-}
-
bool WScriptJsrt::CreateNamedFunction(const char* nameString, JsNativeFunction callback,
JsValueRef* functionVar)
{
@@ -950,6 +1082,7 @@ bool WScriptJsrt::Initialize()
JsValueRef wscript;
IfJsrtErrorFail(ChakraRTInterface::JsCreateObject(&wscript), false);
+ IfFalseGo(WScriptJsrt::InstallObjectsOnObject(wscript, "monotonicNow", MonotonicNowCallback));
IfFalseGo(WScriptJsrt::InstallObjectsOnObject(wscript, "Echo", EchoCallback));
IfFalseGo(WScriptJsrt::InstallObjectsOnObject(wscript, "Quit", QuitCallback));
IfFalseGo(WScriptJsrt::InstallObjectsOnObject(wscript, "LoadScriptFile", LoadScriptFileCallback));
@@ -967,9 +1100,9 @@ bool WScriptJsrt::Initialize()
IfFalseGo(WScriptJsrt::InstallObjectsOnObject(wscript, "RegisterModuleSource", RegisterModuleSourceCallback));
IfFalseGo(WScriptJsrt::InstallObjectsOnObject(wscript, "GetModuleNamespace", GetModuleNamespace));
IfFalseGo(WScriptJsrt::InstallObjectsOnObject(wscript, "GetProxyProperties", GetProxyPropertiesCallback));
-
- // ToDo Remove
- IfFalseGo(WScriptJsrt::InstallObjectsOnObject(wscript, "Edit", EmptyCallback));
+
+ IfFalseGo(WScriptJsrt::InstallObjectsOnObject(wscript, "SerializeObject", SerializeObject));
+ IfFalseGo(WScriptJsrt::InstallObjectsOnObject(wscript, "Deserialize", Deserialize));
// Platform
JsValueRef platformObject;
@@ -1081,7 +1214,11 @@ bool WScriptJsrt::Initialize()
IfJsrtErrorFail(CreatePropertyIdFromString("console", &consoleName), false);
IfJsrtErrorFail(ChakraRTInterface::JsSetProperty(global, consoleName, console, true), false);
- IfJsrtErrorFail(InitializeModuleCallbacks(), false);
+ IfJsrtErrorFail(ChakraRTInterface::JsSetModuleHostInfo(nullptr, JsModuleHostInfo_FetchImportedModuleCallback, (void*)WScriptJsrt::FetchImportedModule), false);
+ IfJsrtErrorFail(ChakraRTInterface::JsSetModuleHostInfo(nullptr, JsModuleHostInfo_FetchImportedModuleFromScriptCallback, (void*)WScriptJsrt::FetchImportedModuleFromScript), false);
+ IfJsrtErrorFail(ChakraRTInterface::JsSetModuleHostInfo(nullptr, JsModuleHostInfo_NotifyModuleReadyCallback, (void*)WScriptJsrt::NotifyModuleReadyCallback), false);
+ IfJsrtErrorFail(ChakraRTInterface::JsSetModuleHostInfo(nullptr, JsModuleHostInfo_InitializeImportMetaCallback, (void*)WScriptJsrt::InitializeImportMetaCallback), false);
+ IfJsrtErrorFail(ChakraRTInterface::JsSetModuleHostInfo(nullptr, JsModuleHostInfo_ReportModuleCompletionCallback, (void*)WScriptJsrt::ReportModuleCompletionCallback), false);
// When the command-line argument `-Test262` is set,
// WScript will have the extra support API below and $262 will be
@@ -1112,18 +1249,6 @@ bool WScriptJsrt::Initialize()
return hr == S_OK;
}
-JsErrorCode WScriptJsrt::InitializeModuleCallbacks()
-{
- JsModuleRecord moduleRecord = JS_INVALID_REFERENCE;
- JsErrorCode errorCode = ChakraRTInterface::JsInitializeModuleRecord(nullptr, nullptr, &moduleRecord);
- if (errorCode == JsNoError)
- {
- errorCode = InitializeModuleInfo(nullptr, moduleRecord);
- }
-
- return errorCode;
-}
-
bool WScriptJsrt::Uninitialize()
{
// moduleRecordMap is a global std::map, its destructor may access overridden
@@ -1131,6 +1256,7 @@ bool WScriptJsrt::Uninitialize()
// to avoid worrying about global destructor order.
moduleRecordMap.clear();
moduleDirMap.clear();
+ moduleErrMap.clear();
scriptDirMap.clear();
auto& threadData = GetRuntimeThreadLocalData().threadData;
@@ -1218,7 +1344,6 @@ JsValueRef __stdcall WScriptJsrt::LoadTextFileCallback(JsValueRef callee, bool i
if (FAILED(hr))
{
- // check if have it registered
fprintf(stderr, "Couldn't load file '%s'\n", fileName.GetString());
IfJsrtErrorSetGo(ChakraRTInterface::JsGetUndefinedValue(&returnValue));
return returnValue;
@@ -1382,7 +1507,6 @@ JsValueRef __stdcall WScriptJsrt::LoadBinaryFileCallback(JsValueRef callee,
if (FAILED(hr))
{
- // check if have it registered
fprintf(stderr, "Couldn't load file '%s'\n", fileName.GetString());
IfJsrtErrorSetGoLabel(ChakraRTInterface::JsGetUndefinedValue(&returnValue), Error);
return returnValue;
@@ -1661,11 +1785,26 @@ JsValueRef __stdcall WScriptJsrt::GetProxyPropertiesCallback(JsValueRef callee,
return returnValue;
}
-bool WScriptJsrt::PrintException(LPCSTR fileName, JsErrorCode jsErrorCode)
+bool WScriptJsrt::PrintException(LPCSTR fileName, JsErrorCode jsErrorCode, JsValueRef exception)
{
LPCWSTR errorTypeString = ConvertErrorCodeToMessage(jsErrorCode);
- JsValueRef exception;
- ChakraRTInterface::JsGetAndClearException(&exception);
+ JsValueRef metaData = JS_INVALID_REFERENCE;
+
+ if (exception == nullptr)
+ {
+ if (ChakraRTInterface::JsGetAndClearExceptionWithMetadata(&metaData) == JsNoError)
+ {
+ JsPropertyIdRef exceptionId = JS_INVALID_REFERENCE;
+ IfJsrtErrorFail(CreatePropertyIdFromString("exception", &exceptionId), false);
+ IfJsrtErrorFail(ChakraRTInterface::JsGetProperty(metaData, exceptionId, &exception), false);
+ }
+ else
+ {
+ IfJsrtErrorFail(ChakraRTInterface::JsGetAndClearException(&exception), false);
+ }
+
+ }
+
if (HostConfigFlags::flags.MuteHostErrorMsgIsEnabled)
{
return false;
@@ -1677,7 +1816,56 @@ bool WScriptJsrt::PrintException(LPCSTR fileName, JsErrorCode jsErrorCode)
{
AutoString errorMessage;
- IfJsrtErrorFail(errorMessage.Initialize(exception), false);
+ if (errorMessage.Initialize(exception) != JsNoError)
+ {
+ fwprintf(stderr, _u("ERROR attempting to coerce error to string, using alternate handler\n"));
+ bool hasException = false;
+ ChakraRTInterface::JsHasException(&hasException);
+ if (hasException)
+ {
+ JsValueRef throwAway = JS_INVALID_REFERENCE;
+ ChakraRTInterface::JsGetAndClearException(&throwAway);
+ }
+ JsPropertyIdRef messagePropertyId = JS_INVALID_REFERENCE;
+ IfJsrtErrorFail(CreatePropertyIdFromString("message", &messagePropertyId), false);
+ JsValueRef message = JS_INVALID_REFERENCE;
+ IfJsrtErrorFail(ChakraRTInterface::JsGetProperty(exception, messagePropertyId, &message), false);
+ IfJsrtErrorFail(errorMessage.Initialize(message), false);
+
+ if (jsErrorCode != JsErrorCode::JsErrorScriptCompile)
+ {
+ CHAR shortFileName[_MAX_PATH];
+ CHAR ext[_MAX_EXT];
+ _splitpath_s(fileName, nullptr, 0, nullptr, 0, shortFileName, _countof(shortFileName), ext, _countof(ext));
+
+ if (metaData != JS_INVALID_REFERENCE)
+ {
+ JsPropertyIdRef linePropertyId = JS_INVALID_REFERENCE;
+ JsValueRef lineProperty = JS_INVALID_REFERENCE;
+
+ JsPropertyIdRef columnPropertyId = JS_INVALID_REFERENCE;
+ JsValueRef columnProperty = JS_INVALID_REFERENCE;
+
+ int line;
+ int column;
+
+ IfJsrtErrorFail(CreatePropertyIdFromString("line", &linePropertyId), false);
+ IfJsrtErrorFail(ChakraRTInterface::JsGetProperty(metaData, linePropertyId, &lineProperty), false);
+ IfJsrtErrorFail(ChakraRTInterface::JsNumberToInt(lineProperty, &line), false);
+
+ IfJsrtErrorFail(CreatePropertyIdFromString("column", &columnPropertyId), false);
+ IfJsrtErrorFail(ChakraRTInterface::JsGetProperty(metaData, columnPropertyId, &columnProperty), false);
+ IfJsrtErrorFail(ChakraRTInterface::JsNumberToInt(columnProperty, &column), false);
+ fwprintf(stderr, _u("%ls\n at code (%S%S:%d:%d)\n"),
+ errorMessage.GetWideString(), shortFileName, ext, line + 1, column + 1);
+ }
+ else
+ {
+ fwprintf(stderr, _u("%ls\n\tat code (%S%S:\?\?:\?\?)\n"), errorMessage.GetWideString(), shortFileName, ext);
+ }
+ return true;
+ }
+ }
if (jsErrorCode == JsErrorCode::JsErrorScriptCompile)
{
@@ -1831,12 +2019,14 @@ HRESULT WScriptJsrt::CallbackMessage::CallFunction(LPCSTR fileName)
return hr;
}
-WScriptJsrt::ModuleMessage::ModuleMessage(JsModuleRecord module, JsValueRef specifier)
+WScriptJsrt::ModuleMessage::ModuleMessage(JsModuleRecord module, JsValueRef specifier, std::string* fullPathPtr)
: MessageBase(0), moduleRecord(module), specifier(specifier)
{
+ fullPath = nullptr;
ChakraRTInterface::JsAddRef(module, nullptr);
if (specifier != nullptr)
{
+ fullPath = new std::string (*fullPathPtr);
// nullptr specifier means a Promise to execute; non-nullptr means a "fetch" operation.
ChakraRTInterface::JsAddRef(specifier, nullptr);
}
@@ -1847,21 +2037,25 @@ WScriptJsrt::ModuleMessage::~ModuleMessage()
ChakraRTInterface::JsRelease(moduleRecord, nullptr);
if (specifier != nullptr)
{
+ delete fullPath;
ChakraRTInterface::JsRelease(specifier, nullptr);
}
}
HRESULT WScriptJsrt::ModuleMessage::Call(LPCSTR fileName)
{
- JsErrorCode errorCode;
+ JsErrorCode errorCode = JsNoError;
JsValueRef result = JS_INVALID_REFERENCE;
HRESULT hr;
if (specifier == nullptr)
{
- errorCode = ChakraRTInterface::JsModuleEvaluation(moduleRecord, &result);
- if (errorCode != JsNoError)
+ if (moduleErrMap[moduleRecord] != ErroredModule)
{
- PrintException(fileName, errorCode);
+ errorCode = ChakraRTInterface::JsModuleEvaluation(moduleRecord, &result);
+ if (errorCode != JsNoError)
+ {
+ PrintException(fileName, errorCode); // this should not be called
+ }
}
}
else
@@ -1871,25 +2065,39 @@ HRESULT WScriptJsrt::ModuleMessage::Call(LPCSTR fileName)
errorCode = specifierStr.GetError();
if (errorCode == JsNoError)
{
- hr = Helpers::LoadScriptFromFile(*specifierStr, fileContent);
+ hr = Helpers::LoadScriptFromFile(*specifierStr, fileContent, nullptr, fullPath, true);
if (FAILED(hr))
{
- // check if have it registered
if (!HostConfigFlags::flags.MuteHostErrorMsgIsEnabled)
{
- fprintf(stderr, "Couldn't load file '%s'\n", specifierStr.GetString());
+ auto actualModuleRecord = moduleRecordMap.find(*fullPath);
+ if (actualModuleRecord == moduleRecordMap.end() || moduleErrMap[actualModuleRecord->second] == RootModule)
+ {
+ fprintf(stderr, "Couldn't load file '%s'\n", specifierStr.GetString());
+ }
}
- LoadScript(nullptr, *specifierStr, nullptr, "module", true, WScriptJsrt::FinalizeFree, false);
+ LoadScript(nullptr, fullPath == nullptr ? *specifierStr : fullPath->c_str(), nullptr, "module", true, WScriptJsrt::FinalizeFree, false);
goto Error;
}
- LoadScript(nullptr, *specifierStr, fileContent, "module", true, WScriptJsrt::FinalizeFree, true);
+ LoadScript(nullptr, fullPath == nullptr ? *specifierStr : fullPath->c_str(), fileContent, "module", true, WScriptJsrt::FinalizeFree, true);
}
}
Error:
return errorCode;
}
+JsErrorCode WScriptJsrt::ReportModuleCompletionCallback(JsModuleRecord module, JsValueRef exception)
+{
+ if (exception != nullptr)
+ {
+ JsValueRef specifier = JS_INVALID_REFERENCE;
+ ChakraRTInterface::JsGetModuleHostInfo(module, JsModuleHostInfo_Url, &specifier);
+ PrintException(AutoString(specifier).GetString(), JsErrorCode::JsErrorScriptException, exception);
+ }
+ return JsNoError;
+}
+
JsErrorCode WScriptJsrt::FetchImportedModuleHelper(JsModuleRecord referencingModule,
JsValueRef specifier, __out JsModuleRecord* dependentModuleRecord, LPCSTR refdir)
{
@@ -1921,10 +2129,10 @@ JsErrorCode WScriptJsrt::FetchImportedModuleHelper(JsModuleRecord referencingMod
if (errorCode == JsNoError)
{
GetDir(fullPath, &moduleDirMap[moduleRecord]);
- InitializeModuleInfo(specifier, moduleRecord);
- moduleRecordMap[std::string(fullPath)] = moduleRecord;
- ModuleMessage* moduleMessage =
- WScriptJsrt::ModuleMessage::Create(referencingModule, specifier);
+ std::string pathKey = std::string(fullPath);
+ moduleRecordMap[pathKey] = moduleRecord;
+ moduleErrMap[moduleRecord] = ImportedModule;
+ ModuleMessage* moduleMessage = WScriptJsrt::ModuleMessage::Create(referencingModule, specifier, &pathKey);
if (moduleMessage == nullptr)
{
return JsErrorOutOfMemory;
@@ -1959,42 +2167,25 @@ JsErrorCode WScriptJsrt::FetchImportedModule(_In_ JsModuleRecord referencingModu
JsErrorCode WScriptJsrt::FetchImportedModuleFromScript(_In_ JsSourceContext dwReferencingSourceContext,
_In_ JsValueRef specifier, _Outptr_result_maybenull_ JsModuleRecord* dependentModuleRecord)
{
- // ch.exe assumes all imported source files are located at .
- auto scriptDirEntry = scriptDirMap.find(dwReferencingSourceContext);
- if (scriptDirEntry != scriptDirMap.end())
- {
- std::string dir = scriptDirEntry->second;
- return FetchImportedModuleHelper(nullptr, specifier, dependentModuleRecord, dir.c_str());
- }
-
return FetchImportedModuleHelper(nullptr, specifier, dependentModuleRecord);
}
-// Callback from chakraCore when the module resolution is finished, either successfuly or unsuccessfully.
+// Callback from chakraCore when the module resolution is finished, either successfully or unsuccessfully.
JsErrorCode WScriptJsrt::NotifyModuleReadyCallback(_In_opt_ JsModuleRecord referencingModule, _In_opt_ JsValueRef exceptionVar)
{
- if (exceptionVar != nullptr)
+ if (exceptionVar != nullptr && HostConfigFlags::flags.TraceHostCallbackIsEnabled)
{
- ChakraRTInterface::JsSetException(exceptionVar);
JsValueRef specifier = JS_INVALID_REFERENCE;
- ChakraRTInterface::JsGetModuleHostInfo(referencingModule, JsModuleHostInfo_HostDefined, &specifier);
+ ChakraRTInterface::JsGetModuleHostInfo(referencingModule, JsModuleHostInfo_Url, &specifier);
AutoString fileName;
if (specifier != JS_INVALID_REFERENCE)
{
fileName.Initialize(specifier);
}
-
- if (HostConfigFlags::flags.TraceHostCallbackIsEnabled)
- {
- wprintf(_u("NotifyModuleReadyCallback(exception) %S\n"), fileName.GetString());
- }
-
- // No need to print - just consume the exception
- JsValueRef exception;
- ChakraRTInterface::JsGetAndClearException(&exception);
- exception; // unused
+ wprintf(_u("NotifyModuleReadyCallback(exception) %S\n"), fileName.GetString());
}
- else
+
+ if (moduleErrMap[referencingModule] != ErroredModule)
{
WScriptJsrt::ModuleMessage* moduleMessage =
WScriptJsrt::ModuleMessage::Create(referencingModule, nullptr);
@@ -2007,6 +2198,23 @@ JsErrorCode WScriptJsrt::NotifyModuleReadyCallback(_In_opt_ JsModuleRecord refer
return JsNoError;
}
+JsErrorCode __stdcall WScriptJsrt::InitializeImportMetaCallback(_In_opt_ JsModuleRecord referencingModule, _In_opt_ JsValueRef importMetaVar)
+{
+ if (importMetaVar != nullptr)
+ {
+ JsValueRef specifier = JS_INVALID_REFERENCE;
+ ChakraRTInterface::JsGetModuleHostInfo(referencingModule, JsModuleHostInfo_Url, &specifier);
+
+ JsPropertyIdRef urlPropId;
+ if (JsNoError == CreatePropertyIdFromString("url", &urlPropId))
+ {
+ ChakraRTInterface::JsSetProperty(importMetaVar, urlPropId, specifier, false);
+ }
+ }
+
+ return JsNoError;
+}
+
void WScriptJsrt::PromiseContinuationCallback(JsValueRef task, void *callbackState)
{
Assert(task != JS_INVALID_REFERENCE);
diff --git a/bin/ch/WScriptJsrt.h b/bin/ch/WScriptJsrt.h
index 82007791a82..b4f8170fb5c 100644
--- a/bin/ch/WScriptJsrt.h
+++ b/bin/ch/WScriptJsrt.h
@@ -1,15 +1,24 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft Corporation and contributors. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#pragma once
#include
+enum ModuleState
+{
+ RootModule,
+ ImportedModule,
+ ErroredModule
+};
+
class WScriptJsrt
{
public:
static bool Initialize();
static bool Uninitialize();
+ static JsErrorCode ModuleEntryPoint(LPCSTR fileName, LPCSTR fileContent, LPCSTR fullName);
class CallbackMessage : public MessageBase
{
@@ -35,17 +44,18 @@ class WScriptJsrt
private:
JsModuleRecord moduleRecord;
JsValueRef specifier;
+ std::string* fullPath;
- ModuleMessage(JsModuleRecord module, JsValueRef specifier);
+ ModuleMessage(JsModuleRecord module, JsValueRef specifier, std::string* fullPathPtr);
public:
~ModuleMessage();
virtual HRESULT Call(LPCSTR fileName) override;
- static ModuleMessage* Create(JsModuleRecord module, JsValueRef specifier)
+ static ModuleMessage* Create(JsModuleRecord module, JsValueRef specifier, std::string* fullPath = nullptr)
{
- return new ModuleMessage(module, specifier);
+ return new ModuleMessage(module, specifier, fullPath);
}
};
@@ -56,7 +66,8 @@ class WScriptJsrt
static JsErrorCode FetchImportedModule(_In_ JsModuleRecord referencingModule, _In_ JsValueRef specifier, _Outptr_result_maybenull_ JsModuleRecord* dependentModuleRecord);
static JsErrorCode FetchImportedModuleFromScript(_In_ DWORD_PTR dwReferencingSourceContext, _In_ JsValueRef specifier, _Outptr_result_maybenull_ JsModuleRecord* dependentModuleRecord);
static JsErrorCode NotifyModuleReadyCallback(_In_opt_ JsModuleRecord referencingModule, _In_opt_ JsValueRef exceptionVar);
- static JsErrorCode InitializeModuleCallbacks();
+ static JsErrorCode ReportModuleCompletionCallback(JsModuleRecord module, JsValueRef exception);
+ static JsErrorCode CALLBACK InitializeImportMetaCallback(_In_opt_ JsModuleRecord referencingModule, _In_opt_ JsValueRef importMetaVar);
static void CALLBACK PromiseContinuationCallback(JsValueRef task, void *callbackState);
static void CALLBACK PromiseRejectionTrackerCallback(JsValueRef promise, JsValueRef reason, bool handled, void *callbackState);
@@ -80,6 +91,8 @@ class WScriptJsrt
return _u("FatalError");
case (JsErrorCode::JsErrorInExceptionState) :
return _u("ErrorInExceptionState");
+ case (JsErrorCode::JsErrorBadSerializedScript):
+ return _u("ErrorBadSerializedScript ");
default:
AssertMsg(false, "Unexpected JsErrorCode");
return nullptr;
@@ -90,7 +103,7 @@ class WScriptJsrt
static void CALLBACK JsContextBeforeCollectCallback(JsRef contextRef, void *data);
#endif
- static bool PrintException(LPCSTR fileName, JsErrorCode jsErrorCode);
+ static bool PrintException(LPCSTR fileName, JsErrorCode jsErrorCode, JsValueRef exception = nullptr);
static JsValueRef LoadScript(JsValueRef callee, LPCSTR fileName, LPCSTR fileContent, LPCSTR scriptInjectType, bool isSourceModule, JsFinalizeCallback finalizeCallback, bool isFile);
static DWORD_PTR GetNextSourceContext();
static JsValueRef LoadScriptFileHelper(JsValueRef callee, JsValueRef *arguments, unsigned short argumentCount, bool isSourceModule);
@@ -99,6 +112,7 @@ class WScriptJsrt
static void FinalizeFree(void * addr);
static void RegisterScriptDir(DWORD_PTR sourceContext, LPCSTR fullDirNarrow);
private:
+ static void SetExceptionIf(JsErrorCode errorCode, LPCWSTR errorMessage);
static bool CreateArgumentsObject(JsValueRef *argsObject);
static bool CreateNamedFunction(const char*, JsNativeFunction callback, JsValueRef* functionVar);
static void GetDir(LPCSTR fullPathNarrow, std::string *fullDirNarrow);
@@ -108,6 +122,7 @@ class WScriptJsrt
static JsValueRef CALLBACK LoadScriptCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
static JsValueRef CALLBACK LoadModuleCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
static JsValueRef CALLBACK GetModuleNamespace(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
+ static JsValueRef CALLBACK MonotonicNowCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
static JsValueRef CALLBACK SetTimeoutCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
static JsValueRef CALLBACK ClearTimeoutCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
static JsValueRef CALLBACK AttachCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
@@ -115,9 +130,7 @@ class WScriptJsrt
static JsValueRef CALLBACK DumpFunctionPositionCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
static JsValueRef CALLBACK RequestAsyncBreakCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
- static JsValueRef CALLBACK EmptyCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
static JsErrorCode CALLBACK LoadModuleFromString(LPCSTR fileName, LPCSTR fileContent, LPCSTR fullName = nullptr, bool isFile = false);
- static JsErrorCode CALLBACK InitializeModuleInfo(JsValueRef specifier, JsModuleRecord moduleRecord);
static JsValueRef CALLBACK LoadBinaryFileCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
static JsValueRef CALLBACK LoadTextFileCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
@@ -133,11 +146,15 @@ class WScriptJsrt
static JsValueRef CALLBACK SleepCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
static JsValueRef CALLBACK GetProxyPropertiesCallback(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
+ static JsValueRef CALLBACK SerializeObject(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
+ static JsValueRef CALLBACK Deserialize(JsValueRef callee, bool isConstructCall, JsValueRef *arguments, unsigned short argumentCount, void *callbackState);
+
static JsErrorCode FetchImportedModuleHelper(JsModuleRecord referencingModule, JsValueRef specifier, __out JsModuleRecord* dependentModuleRecord, LPCSTR refdir = nullptr);
static MessageQueue *messageQueue;
static DWORD_PTR sourceContext;
static std::map moduleRecordMap;
static std::map moduleDirMap;
+ static std::map moduleErrMap;
static std::map scriptDirMap;
};
diff --git a/bin/ch/ch.cpp b/bin/ch/ch.cpp
index d365340608e..8c5b4a4e20a 100644
--- a/bin/ch/ch.cpp
+++ b/bin/ch/ch.cpp
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#include "stdafx.h"
@@ -39,6 +40,8 @@ UINT32 snapHistoryLength = MAXUINT32;
LPCWSTR connectionUuidString = NULL;
UINT32 startEventCount = 1;
+HRESULT RunBgParseSync(LPCSTR fileContents, UINT lengthBytes, const char* fileName);
+
extern "C"
HRESULT __stdcall OnChakraCoreLoadedEntry(TestHooks& testHooks)
{
@@ -193,97 +196,62 @@ HANDLE GetFileHandle(LPCWSTR filename)
return GetStdHandle(STD_OUTPUT_HANDLE);
}
-HRESULT CreateLibraryByteCodeHeader(LPCSTR contentsRaw, JsFinalizeCallback contentsRawFinalizeCallback, DWORD lengthBytes, LPCWSTR bcFullPath, LPCSTR libraryNameNarrow)
+HRESULT CreateLibraryByteCode(const char* contentsRaw)
{
- HANDLE bcFileHandle = nullptr;
JsValueRef bufferVal;
BYTE *bcBuffer = nullptr;
unsigned int bcBufferSize = 0;
- DWORD written;
- // For validating the header file against the library file
- auto outputStr =
- "//-------------------------------------------------------------------------------------------------------\n"
- "// Copyright (C) Microsoft. All rights reserved.\n"
- "// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.\n"
- "//-------------------------------------------------------------------------------------------------------\n"
- "#if 0\n";
-
- std::string normalizedContentStr;
- char* nextToken = nullptr;
- char* token = strtok_s((char*)contentsRaw, "\r", &nextToken);
- while (token)
- {
- normalizedContentStr.append(token);
- token = strtok_s(nullptr, "\r", &nextToken);
- }
- // We no longer need contentsRaw, so call the finalizer for it if one was provided
- if (contentsRawFinalizeCallback != nullptr)
- {
- contentsRawFinalizeCallback((void*)contentsRaw);
- }
-
- const char* normalizedContent = normalizedContentStr.c_str();
- // We still need contentsRaw after this, so pass a null finalizeCallback into it
- HRESULT hr = GetSerializedBuffer(normalizedContent, nullptr, &bufferVal);
-
- IfFailedGoLabel((hr), ErrorRunFinalize);
-
- IfJsrtErrorHRLabel(ChakraRTInterface::JsGetArrayBufferStorage(bufferVal, &bcBuffer, &bcBufferSize), ErrorRunFinalize);
-
- bcFileHandle = GetFileHandle(bcFullPath);
- IfFalseGo(bcFileHandle != INVALID_HANDLE_VALUE && bcFileHandle != nullptr);
-
- IfFalseGoLabel(WriteFile(bcFileHandle, outputStr, (DWORD)strlen(outputStr), &written, nullptr), ErrorRunFinalize);
- IfFalseGoLabel(WriteFile(bcFileHandle, normalizedContent, (DWORD)normalizedContentStr.size(), &written, nullptr), ErrorRunFinalize);
- outputStr = "\n#endif\n";
+ HRESULT hr = E_FAIL;
+
+ // Windows can't do the below with printf - so use windows API on windows but printf on posix
+ #ifdef _WIN32
+ HANDLE out = GetStdHandle(STD_OUTPUT_HANDLE);
+ DWORD written = 0;
+ #define print_format(format, element, size) \
+ { \
+ auto scratchLen = size; \
+ char scratch[size]; \
+ int len = _snprintf_s(scratch, scratchLen, _countof(scratch), format, element); \
+ IfFalseGo(WriteFile(out, scratch, (DWORD)(len), &written, nullptr)); \
+ }
+ #define print(text) \
+ WriteFile(out, text, (DWORD)strlen(text), &written, nullptr);
+ #else
+ #define print_format(format, element, size) printf(format, element)
+ #define print printf
+ #endif
- IfFalseGo(WriteFile(bcFileHandle, outputStr, (DWORD)strlen(outputStr), &written, nullptr));
+ // Generate the bytecode, free the original buffer then retrieve the generated bytecode
+ IfFailGo(GetSerializedBuffer(contentsRaw, WScriptJsrt::FinalizeFree, &bufferVal));
+ IfFailGo(ChakraRTInterface::JsGetArrayBufferStorage(bufferVal, &bcBuffer, &bcBufferSize));
// Write out the bytecode
- outputStr = "namespace Js\n{\n const char Library_Bytecode_";
- IfFalseGo(WriteFile(bcFileHandle, outputStr, (DWORD)strlen(outputStr), &written, nullptr));
- IfFalseGo(WriteFile(bcFileHandle, libraryNameNarrow, (DWORD)strlen(libraryNameNarrow), &written, nullptr));
- outputStr = "[] = {\n/* 00000000 */";
- IfFalseGo(WriteFile(bcFileHandle, outputStr, (DWORD)strlen(outputStr), &written, nullptr));
+ print("[] = {\n/* 00000000 */");
for (unsigned int i = 0; i < bcBufferSize; i++)
{
- char scratch[6];
- auto scratchLen = sizeof(scratch);
- int num = _snprintf_s(scratch, scratchLen, _countof(scratch), " 0x%02X", bcBuffer[i]);
- Assert(num == 5);
- IfFalseGo(WriteFile(bcFileHandle, scratch, (DWORD)(scratchLen - 1), &written, nullptr));
-
- // Add a comma and a space if this is not the last item
+ print_format(" 0x%02X", bcBuffer[i], 6);
+ // Add a comma if this is not the last item
if (i < bcBufferSize - 1)
{
- char commaSpace[2];
- _snprintf_s(commaSpace, sizeof(commaSpace), _countof(commaSpace), ","); // close quote, new line, offset and open quote
- IfFalseGo(WriteFile(bcFileHandle, commaSpace, (DWORD)strlen(commaSpace), &written, nullptr));
+ print(",");
}
// Add a line break every 16 scratches, primarily so the compiler doesn't complain about the string being too long.
// Also, won't add for the last scratch
if (i % 16 == 15 && i < bcBufferSize - 1)
{
- char offset[17];
- int actualLen = _snprintf_s(offset, sizeof(offset), _countof(offset), "\n/* %08X */", i + 1); // close quote, new line, offset and open quote
- IfFalseGo(WriteFile(bcFileHandle, offset, actualLen, &written, nullptr));
+ print_format("\n/* %08X */", i + 1, 17);
}
}
- outputStr = "};\n\n";
- IfFalseGo(WriteFile(bcFileHandle, outputStr, (DWORD)strlen(outputStr), &written, nullptr));
+ print("};\n\n");
- outputStr = "}\n";
- IfFalseGo(WriteFile(bcFileHandle, outputStr, (DWORD)strlen(outputStr), &written, nullptr));
+ #undef print
+ #undef print_format
-ErrorRunFinalize:
-Error:
- if (bcFileHandle != nullptr)
- {
- CloseHandle(bcFileHandle);
- }
+ hr = S_OK;
+Error:
return hr;
}
@@ -396,6 +364,10 @@ HRESULT RunScript(const char* fileName, LPCSTR fileContents, size_t fileLength,
IfJsErrorFailLogLabel(ChakraRTInterface::JsCreateString(fullPath,
strlen(fullPath), &fname), ErrorRunFinalize);
+ // memory management for serialized script case - need to define these here
+ SerializedCallbackInfo serializedCallbackInfo;
+ serializedCallbackInfo.freeingHandled = true;
+
if (bufferValue != nullptr)
{
if (fileContents == nullptr)
@@ -412,7 +384,6 @@ HRESULT RunScript(const char* fileName, LPCSTR fileContents, size_t fileLength,
else // fileContents != nullptr
{
// Memory management is a little more complex here
- SerializedCallbackInfo serializedCallbackInfo;
serializedCallbackInfo.scriptBody = (void*)fileContents;
serializedCallbackInfo.scriptBodyFinalizeCallback = fileContentsFinalizeCallback;
serializedCallbackInfo.freeingHandled = false;
@@ -425,15 +396,6 @@ HRESULT RunScript(const char* fileName, LPCSTR fileContents, size_t fileLength,
// Use source ptr as sourceContext
fname,
nullptr /*result*/);
- // Now that we're down here, we can free the fileContents if they weren't sent into
- // a GC-managed object.
- if (!serializedCallbackInfo.freeingHandled)
- {
- if (fileContentsFinalizeCallback != nullptr)
- {
- fileContentsFinalizeCallback((void*)fileContents);
- }
- }
}
}
else if (parserStateCache != nullptr)
@@ -452,14 +414,24 @@ HRESULT RunScript(const char* fileName, LPCSTR fileContents, size_t fileLength,
parserStateCache,
nullptr);
}
+ else if (HostConfigFlags::flags.Module)
+ {
+ runScript = WScriptJsrt::ModuleEntryPoint(fileName, fileContents, fullPath);
+ }
+ else if (HostConfigFlags::flags.ExecuteWithBgParse && !HostConfigFlags::flags.DebugLaunch)
+ {
+ unsigned int lengthBytes = (unsigned int) fileLength;
+ runScript = (JsErrorCode)RunBgParseSync(fileContents, lengthBytes, fileName);
+ }
else // bufferValue == nullptr && parserStateCache == nullptr
{
JsValueRef scriptSource;
IfJsErrorFailLog(ChakraRTInterface::JsCreateExternalArrayBuffer((void*)fileContents,
(unsigned int)fileLength,
fileContentsFinalizeCallback, (void*)fileContents, &scriptSource));
+
#if ENABLE_TTD
- if(doTTRecord)
+ if (doTTRecord)
{
JsPropertyIdRef ttProperty = nullptr;
JsValueRef ttString = nullptr;
@@ -508,6 +480,12 @@ HRESULT RunScript(const char* fileName, LPCSTR fileContents, size_t fileLength,
IfFailGo(messageQueue->ProcessAll(fileName));
} while(!messageQueue->IsEmpty());
}
+
+ // free the source for the serialized script case if it's not been handed to a managed object
+ if (!serializedCallbackInfo.freeingHandled && fileContentsFinalizeCallback != nullptr)
+ {
+ fileContentsFinalizeCallback((void*)fileContents);
+ }
}
if(false)
@@ -751,6 +729,47 @@ HRESULT CreateAndRunSerializedScript(const char* fileName, LPCSTR fileContents,
return hr;
}
+// Use the asynchronous BGParse JSRT APIs in a synchronous call
+HRESULT RunBgParseSync(LPCSTR fileContents, UINT lengthBytes, const char* fileName)
+{
+ JsValueRef scriptSource;
+ JsErrorCode e = (ChakraRTInterface::JsCreateExternalArrayBuffer((void*)fileContents,
+ (unsigned int)lengthBytes,
+ nullptr, (void*)fileContents, &scriptSource));
+
+ // What's the preferred way of doing this?
+ WCHAR fileNameWide[MAX_PATH] = { 0 };
+ size_t fileNameLength = strlen(fileName);
+ for (size_t i = 0; i < fileNameLength; i++)
+ {
+ fileNameWide[i] = fileName[i];
+ }
+
+ JsScriptContents scriptContents = { 0 };
+ scriptContents.container = (LPVOID)fileContents;
+ scriptContents.containerType = JsScriptContainerType::HeapAllocatedBuffer;
+ scriptContents.encodingType = JsScriptEncodingType::Utf8;
+ scriptContents.contentLengthInBytes = lengthBytes;
+ scriptContents.fullPath = fileNameWide;
+
+ DWORD cookie = 0;
+ e = ChakraRTInterface::JsQueueBackgroundParse_Experimental(&scriptContents, &cookie);
+ Assert(e == JsErrorCode::JsNoError);
+
+ JsValueRef bgResult = nullptr;
+ e = ChakraRTInterface::JsExecuteBackgroundParse_Experimental(
+ cookie,
+ scriptSource,
+ WScriptJsrt::GetNextSourceContext(),
+ (WCHAR*)scriptContents.fullPath,
+ JsParseScriptAttributes::JsParseScriptAttributeNone,
+ nullptr,//_In_ JsValueRef parserState,
+ &bgResult
+ );
+
+ return e;
+}
+
HRESULT ExecuteTest(const char* fileName)
{
HRESULT hr = S_OK;
@@ -815,12 +834,12 @@ HRESULT ExecuteTest(const char* fileName)
JsContextRef context = JS_INVALID_REFERENCE;
IfJsErrorFailLog(ChakraRTInterface::JsTTDCreateContext(runtime, true, &context));
+ IfJsErrorFailLog(ChakraRTInterface::JsSetCurrentContext(context));
+
#if ENABLE_TTD
//We need this here since this context is created in record
IfJsErrorFailLog(ChakraRTInterface::JsSetObjectBeforeCollectCallback(context, nullptr, WScriptJsrt::JsContextBeforeCollectCallback));
#endif
-
- IfJsErrorFailLog(ChakraRTInterface::JsSetCurrentContext(context));
}
else
{
@@ -860,9 +879,6 @@ HRESULT ExecuteTest(const char* fileName)
#ifdef DEBUG
ChakraRTInterface::SetCheckOpHelpersFlag(true);
#endif
-#ifdef ENABLE_DEBUG_CONFIG_OPTIONS
- ChakraRTInterface::SetOOPCFGRegistrationFlag(false);
-#endif
if (!WScriptJsrt::Initialize())
{
@@ -882,19 +898,7 @@ HRESULT ExecuteTest(const char* fileName)
len = strlen(fullPath);
if (HostConfigFlags::flags.GenerateLibraryByteCodeHeaderIsEnabled)
{
-
- if (HostConfigFlags::flags.GenerateLibraryByteCodeHeader != nullptr)
- {
- if (wcslen(HostConfigFlags::flags.GenerateLibraryByteCodeHeader) == 0)
- {
- HostConfigFlags::flags.GenerateLibraryByteCodeHeader = nullptr;
- }
- }
- CHAR libraryName[_MAX_PATH];
- CHAR ext[_MAX_EXT];
- _splitpath_s(fullPath, NULL, 0, NULL, 0, libraryName, _countof(libraryName), ext, _countof(ext));
-
- IfFailGo(CreateLibraryByteCodeHeader(fileContents, WScriptJsrt::FinalizeFree, lengthBytes, HostConfigFlags::flags.GenerateLibraryByteCodeHeader, libraryName));
+ IfFailGo(CreateLibraryByteCode(fileContents));
}
else if (HostConfigFlags::flags.SerializedIsEnabled)
{
@@ -1245,7 +1249,7 @@ int _cdecl wmain(int argc, __in_ecount(argc) LPWSTR argv[])
{
// TODO: Error checking
JITProcessManager::StartRpcServer(argc, argv);
- ChakraRTInterface::ConnectJITServer(JITProcessManager::GetRpcProccessHandle(), nullptr, JITProcessManager::GetRpcConnectionId());
+ ChakraRTInterface::JsConnectJITProcess(JITProcessManager::GetRpcProccessHandle(), nullptr, JITProcessManager::GetRpcConnectionId());
}
#endif
HANDLE threadHandle;
diff --git a/bin/ch/ch.vcxproj b/bin/ch/ch.vcxproj
index 3ea7cb4f6f4..1fc7bf62deb 100644
--- a/bin/ch/ch.vcxproj
+++ b/bin/ch/ch.vcxproj
@@ -23,15 +23,11 @@
%(AdditionalIncludeDirectories);
$(MSBuildThisFileDirectory);
$(ChakraCoreRootDirectory)Lib\Common;
+ $(ChakraCoreRootDirectory)Lib\Runtime;
$(ChakraCoreRootDirectory)Bin\ChakraCore;
$(IntDir);
%(AdditionalIncludeDirectories);
-
-
- $(ChakraCoreRootDirectory)lib\Runtime;
- %(AdditionalIncludeDirectories)
-
ch.def
diff --git a/bin/ch/jstoc.py b/bin/ch/jstoc.py
index e0be592fb18..a9618054bc9 100755
--- a/bin/ch/jstoc.py
+++ b/bin/ch/jstoc.py
@@ -23,7 +23,7 @@ def convert():
if os.path.isfile(js_file_name) == False:
print_usage()
- h_file_name = js_file_name + '.h'
+ h_file_name = os.path.basename(js_file_name) + '.h'
js_file_time = os.path.getmtime(js_file_name)
h_file_time = 0
diff --git a/bin/ch/stdafx.h b/bin/ch/stdafx.h
index 8618b69d5bb..730a33c9fb9 100644
--- a/bin/ch/stdafx.h
+++ b/bin/ch/stdafx.h
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#pragma once
@@ -56,16 +57,16 @@
#if defined(DBG)
-#define _STRINGIZE_(x) #x
-#if !defined(_STRINGIZE)
-#define _STRINGIZE(x) _STRINGIZE_(x)
+#if !defined(CHAKRACORE_STRINGIZE)
+#define CHAKRACORE_STRINGIZE_IMPL(x) #x
+#define CHAKRACORE_STRINGIZE(x) CHAKRACORE_STRINGIZE_IMPL(x)
#endif
#define AssertMsg(exp, comment) \
do { \
if (!(exp)) \
{ \
- fprintf(stderr, "ASSERTION (%s, line %d) %s %s\n", __FILE__, __LINE__, _STRINGIZE(exp), comment); \
+ fprintf(stderr, "ASSERTION (%s, line %d) %s %s\n", __FILE__, __LINE__, CHAKRACORE_STRINGIZE(exp), comment); \
fflush(stderr); \
DebugBreak(); \
} \
@@ -129,7 +130,6 @@ do { \
if ((jsErrorCode) != JsNoError) { \
fwprintf(stderr, _u("ERROR: ") _u(#expr) _u(" failed. JsErrorCode=0x%x (%s)\n"), jsErrorCode, Helpers::JsErrorCodeToString(jsErrorCode)); \
fflush(stderr); \
- Assert(false); \
return JS_INVALID_REFERENCE; \
} \
} while (0)
@@ -140,7 +140,6 @@ do { \
if ((jsErrorCode) != JsNoError) { \
fwprintf(stderr, _u("ERROR: ") _u(#expr) _u(" failed. JsErrorCode=0x%x (%s)\n"), jsErrorCode, Helpers::JsErrorCodeToString(jsErrorCode)); \
fflush(stderr); \
- Assert(false); \
return false; \
} \
} while (0)
@@ -199,6 +198,7 @@ class AutoString
JsErrorCode Initialize(JsValueRef value)
{
+ errorCode = JsNoError;
JsValueRef strValue;
JsValueType type;
ChakraRTInterface::JsGetValueType(value, &type);
diff --git a/bin/rl/rl.cpp b/bin/rl/rl.cpp
index e1a8f9b503f..99e1a5bd178 100644
--- a/bin/rl/rl.cpp
+++ b/bin/rl/rl.cpp
@@ -4620,7 +4620,13 @@ UpdateTitleStatus()
// start at 1: skip primary thread 0 (unless we decide to let it do real work)
for (i = 1; i <= NumberOfThreads; i++) {
ThreadInfo[i].GetCurrentTest(tempBuf);
- s += sprintf_s(s, REMAININGARRAYLEN(TitleStatus, s), "; %s", tempBuf);
+ size_t remainingCount = REMAININGARRAYLEN(TitleStatus, s);
+ size_t testLen = strnlen_s(tempBuf, BUFFER_SIZE);
+ // Accounting for formatting string and endofbuffer char.
+ if ((testLen + 3) >= remainingCount) {
+ break;
+ }
+ s += sprintf_s(s, remainingCount, "; %s", tempBuf);
}
LeaveCriticalSection(&csTitleBar);
diff --git a/build.sh b/build.sh
index 746666a66a3..99ac49fa72a 100755
--- a/build.sh
+++ b/build.sh
@@ -1,6 +1,7 @@
#!/bin/bash
#-------------------------------------------------------------------------------------------------------
# Copyright (C) Microsoft. All rights reserved.
+# Copyright (c) ChakraCore Project Contributors. All rights reserved.
# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
#-------------------------------------------------------------------------------------------------------
@@ -121,13 +122,13 @@ WB_CHECK=
WB_ANALYZE=
WB_ARGS=
TARGET_PATH=0
-VALGRIND=0
+VALGRIND=""
# -DCMAKE_EXPORT_COMPILE_COMMANDS=ON useful for clang-query tool
CMAKE_EXPORT_COMPILE_COMMANDS="-DCMAKE_EXPORT_COMPILE_COMMANDS=ON"
LIBS_ONLY_BUILD=
ALWAYS_YES=
CCACHE_NAME=
-PYTHON2_BINARY=$(which python2.7 || which python2 || which python 2> /dev/null)
+PYTHON_BINARY=$(which python3 || which python || which python2.7 || which python2 || which python 2> /dev/null)
UNAME_S=`uname -s`
if [[ $UNAME_S =~ 'Linux' ]]; then
@@ -203,7 +204,7 @@ while [[ $# -gt 0 ]]; do
;;
-t | --test-build)
- BUILD_TYPE="Test"
+ BUILD_TYPE="RelWithDebInfo"
;;
-j | --jobs)
@@ -406,42 +407,7 @@ while [[ $# -gt 0 ]]; do
done
if [[ $USE_LOCAL_ICU == 1 ]]; then
- LOCAL_ICU_DIR="$CHAKRACORE_DIR/deps/Chakra.ICU/icu"
- if [[ ! -d $LOCAL_ICU_DIR ]]; then
- "$PYTHON2_BINARY" "$CHAKRACORE_DIR/tools/icu/configure.py" 57.1 $ALWAYS_YES
- fi
-
- # if there is still no directory, then the user declined the license agreement
- if [[ ! -d $LOCAL_ICU_DIR ]]; then
- echo "You must accept the ICU license agreement in order to use this configuration"
- exit 1
- fi
-
- LOCAL_ICU_DIST="$LOCAL_ICU_DIR/output"
-
- if [ ! -d "$LOCAL_ICU_DIST" ]; then
- set -e
-
- pushd "$LOCAL_ICU_DIR/source"
-
- ./configure --with-data-packaging=static\
- --prefix="$LOCAL_ICU_DIST"\
- --enable-static\
- --disable-shared\
- --with-library-bits=64\
- --disable-icuio\
- --disable-layout\
- --disable-tests\
- --disable-samples\
- CXXFLAGS="-fPIC"\
- CFLAGS="-fPIC"
-
- ERROR_EXIT "rm -rf $LOCAL_ICU_DIST"
- make STATICCFLAGS="-fPIC" STATICCXXFLAGS="-fPIC" STATICCPPFLAGS="-DPIC" install
- ERROR_EXIT "rm -rf $LOCAL_ICU_DIST"
- popd
- fi
- CMAKE_ICU="-DICU_INCLUDE_PATH_SH=$LOCAL_ICU_DIST/include"
+ CMAKE_ICU="-DEMBED_ICU_SH=ON"
fi
if [[ "$MAKE" == "ninja" ]]; then
@@ -573,17 +539,23 @@ export TARGET_PATH
if [[ $HAS_LTTNG == 1 ]]; then
CHAKRACORE_ROOT=`dirname $0`
- "$PYTHON2_BINARY" $CHAKRACORE_ROOT/tools/lttng.py --man $CHAKRACORE_ROOT/manifests/Microsoft-Scripting-Chakra-Instrumentation.man --intermediate $TARGET_PATH/intermediate
+ "$PYTHON_BINARY" $CHAKRACORE_ROOT/tools/lttng.py --man $CHAKRACORE_ROOT/manifests/Microsoft-Scripting-Chakra-Instrumentation.man --intermediate $TARGET_PATH/intermediate
mkdir -p $TARGET_PATH/lttng
(diff -q $TARGET_PATH/intermediate/lttng/jscriptEtw.h $TARGET_PATH/lttng/jscriptEtw.h && echo "jscriptEtw.h up to date; skipping") || cp $TARGET_PATH/intermediate/lttng/* $TARGET_PATH/lttng/
fi
-BUILD_DIRECTORY="${TARGET_PATH}/${BUILD_TYPE:0}"
+if [[ ${BUILD_TYPE} =~ "RelWithDebInfo" ]]; then
+ BUILD_TYPE_DIR=Test
+else
+ BUILD_TYPE_DIR=${BUILD_TYPE}
+fi
+
+BUILD_DIRECTORY="${TARGET_PATH}/${BUILD_TYPE_DIR:0}"
echo "Build path: ${BUILD_DIRECTORY}"
-BUILD_RELATIVE_DIRECTORY=$("$PYTHON2_BINARY" -c "import os.path;print \
- os.path.relpath('${CHAKRACORE_DIR}', '$BUILD_DIRECTORY')")
+BUILD_RELATIVE_DIRECTORY=$("$PYTHON_BINARY" -c "from __future__ import print_function; import os.path;\
+ print(os.path.relpath('${CHAKRACORE_DIR}', '$BUILD_DIRECTORY'))")
################# Write-barrier check/analyze run #################
WB_FLAG=
@@ -637,13 +609,6 @@ if [[ $WB_CHECK || $WB_ANALYZE ]]; then
fi
fi
-# prepare DbgController.js.h
-CH_DIR="${CHAKRACORE_DIR}/bin/ch"
-"${CH_DIR}/jstoc.py" "${CH_DIR}/DbgController.js" controllerScript
-if [[ $? != 0 ]]; then
- exit 1
-fi
-
if [ ! -d "$BUILD_DIRECTORY" ]; then
SAFE_RUN `mkdir -p $BUILD_DIRECTORY`
fi
@@ -655,6 +620,9 @@ if [[ $ARCH =~ "x86" ]]; then
elif [[ $ARCH =~ "arm" ]]; then
ARCH="-DCC_TARGETS_ARM_SH=1"
echo "Compile Target : arm"
+elif [[ $ARCH =~ "arm64" ]]; then
+ ARCH="-DCC_TARGETS_ARM64_SH=1"
+ echo "Compile Target : arm64"
elif [[ $ARCH =~ "amd64" ]]; then
ARCH="-DCC_TARGETS_AMD64_SH=1"
echo "Compile Target : amd64"
@@ -663,12 +631,13 @@ else
echo "Compile Target : System Default"
fi
-echo Generating $BUILD_TYPE makefiles
+echo Generating $BUILD_TYPE build
echo $EXTRA_DEFINES
-cmake $CMAKE_GEN $CC_PREFIX $CMAKE_ICU $LTO $LTTNG $STATIC_LIBRARY $ARCH $TARGET_OS \
- $ENABLE_CC_XPLAT_TRACE $EXTRA_DEFINES -DCMAKE_BUILD_TYPE=$BUILD_TYPE $SANITIZE $NO_JIT $CMAKE_INTL \
- $WITHOUT_FEATURES $WB_FLAG $WB_ARGS $CMAKE_EXPORT_COMPILE_COMMANDS $LIBS_ONLY_BUILD\
- $VALGRIND $BUILD_RELATIVE_DIRECTORY $CCACHE_NAME
+cmake $CMAKE_GEN -DCHAKRACORE_BUILD_SH=ON $CC_PREFIX $CMAKE_ICU $LTO $LTTNG \
+ $STATIC_LIBRARY $ARCH $TARGET_OS \ $ENABLE_CC_XPLAT_TRACE $EXTRA_DEFINES \
+ -DCMAKE_BUILD_TYPE=$BUILD_TYPE $SANITIZE $NO_JIT $CMAKE_INTL \
+ $WITHOUT_FEATURES $WB_FLAG $WB_ARGS $CMAKE_EXPORT_COMPILE_COMMANDS \
+ $LIBS_ONLY_BUILD $VALGRIND $BUILD_RELATIVE_DIRECTORY $CCACHE_NAME
_RET=$?
if [[ $? == 0 ]]; then
diff --git a/deps/Chakra.ICU/Chakra.ICU.Build.props b/deps/Chakra.ICU/Chakra.ICU.Build.props
index 0994ed09a0c..14eb2a7adb2 100644
--- a/deps/Chakra.ICU/Chakra.ICU.Build.props
+++ b/deps/Chakra.ICU/Chakra.ICU.Build.props
@@ -27,7 +27,7 @@
UCONFIG_NO_REGULAR_EXPRESSIONS=1;
UCONFIG_NO_SERVICE=1;
%(PreprocessorDefinitions)
-
+
@@ -36,6 +36,9 @@
_CRT_SECURE_NO_DEPRECATE;
%(PreprocessorDefinitions)
+
+
+ /utf-8 %(AdditionalOptions)
diff --git a/deps/Chakra.ICU/Chakra.ICU.i18n.vcxproj b/deps/Chakra.ICU/Chakra.ICU.i18n.vcxproj
index 8804a55dbca..de51a2f1bd4 100644
--- a/deps/Chakra.ICU/Chakra.ICU.i18n.vcxproj
+++ b/deps/Chakra.ICU/Chakra.ICU.i18n.vcxproj
@@ -31,9 +31,6 @@
%(AdditionalIncludeDirectories);
$(IcuSourceDirectory)\common
-
-
- /utf-8 %(AdditionalOptions)
Console
diff --git a/jenkins/buildone.cmd b/jenkins/buildone.cmd
deleted file mode 100644
index ddd3b03cd85..00000000000
--- a/jenkins/buildone.cmd
+++ /dev/null
@@ -1,61 +0,0 @@
-::-------------------------------------------------------------------------------------------------------
-:: Copyright (C) Microsoft. All rights reserved.
-:: Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
-::-------------------------------------------------------------------------------------------------------
-
-@echo off
-setlocal
-
-REM set TEMP and TMP to a new temp folder under the WORKSPACE and create it
-set TEMP=%WORKSPACE%\TEMP
-set TMP=%TEMP%
-REM create the TMP folder if it doesn't exist
-if not exist %TEMP% (
- mkdir %TEMP%
-)
-
-if "%_ENTRY_SCRIPT_NAME%"=="" (
- set _ENTRY_SCRIPT_NAME=%0
-)
-
-REM check that we have enough parameters
-if "%1"=="" (
- goto :usage
-)
-if "%2"=="" (
- goto :usage
-)
-
-:: ============================================================================
-:: Main script
-:: ============================================================================
-:main
-
- set JENKINS_BUILD=True
- call %~dp0..\test\jenkins.buildone.cmd %*
-
- goto :end
-
-:: ============================================================================
-:: Not enough params
-:: ============================================================================
-:usage
-
- echo Not enough parameters. Please specify architecture and type.
- echo Examples:
- echo.
- echo %_ENTRY_SCRIPT_NAME% x86 debug
- echo %_ENTRY_SCRIPT_NAME% x86 test
- echo %_ENTRY_SCRIPT_NAME% x86 release
- echo.
- echo %_ENTRY_SCRIPT_NAME% x64 debug
- echo %_ENTRY_SCRIPT_NAME% x64 test
- echo %_ENTRY_SCRIPT_NAME% x64 release
-
- goto :end
-
-:: ============================================================================
-:: Epilogue of script (cleanup)
-:: ============================================================================
-:end
-endlocal
diff --git a/jenkins/get_system_info.sh b/jenkins/get_system_info.sh
deleted file mode 100755
index 308f593e3f3..00000000000
--- a/jenkins/get_system_info.sh
+++ /dev/null
@@ -1,74 +0,0 @@
-#-------------------------------------------------------------------------------------------------------
-# Copyright (C) Microsoft. All rights reserved.
-# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
-#-------------------------------------------------------------------------------------------------------
-
-if [[ $# -eq 0 ]]; then
- echo "No platform passed in- assuming Linux"
- _PLATFORM="linux"
-fi
-
-while [[ $# -gt 0 ]]; do
- case "$1" in
- --linux)
- _PLATFORM="linux"
- ;;
- --osx)
- _PLATFORM="osx"
- ;;
- esac
-
- shift
-done
-
-echo
-echo "=================================================="
-echo
-
-if [[ $_PLATFORM =~ "linux" ]]; then
- echo "Number of processors (nproc):"
- echo
- nproc
-elif [[ $_PLATFORM =~ "osx" ]]; then
- echo "Number of processors (sysctl -n hw.logicalcpu):"
- echo
- sysctl -n hw.logicalcpu
-else
- echo "Unknown platform"
- exit 1
-fi
-
-echo
-echo "--------------------------------------------------"
-echo
-
-if [[ $_PLATFORM =~ "linux" ]]; then
- echo "Linux version (lsb_release -a):"
- echo
- lsb_release -a
-elif [[ $_PLATFORM =~ "osx" ]]; then
- echo "OS X version (sw_vers -productVersion):"
- echo
- sw_vers -productVersion
-fi
-
-
-echo
-echo "--------------------------------------------------"
-echo
-
-echo "Clang version (clang --version):"
-echo
-clang --version
-
-echo
-echo "--------------------------------------------------"
-echo
-
-echo "cmake version (cmake --version):"
-echo
-cmake --version
-
-echo
-echo "=================================================="
-echo
diff --git a/lib/Backend/AsmJsJITInfo.cpp b/lib/Backend/AsmJsJITInfo.cpp
index c62cb8ba7df..5a5bae12fa7 100644
--- a/lib/Backend/AsmJsJITInfo.cpp
+++ b/lib/Backend/AsmJsJITInfo.cpp
@@ -97,4 +97,4 @@ AsmJsJITInfo::AccessNeedsBoundCheck(uint offset) const
{
return offset >= 0x10000;
}
-#endif
\ No newline at end of file
+#endif
diff --git a/lib/Backend/BackendApi.cpp b/lib/Backend/BackendApi.cpp
index 134555f1a08..cfd7b5c6579 100644
--- a/lib/Backend/BackendApi.cpp
+++ b/lib/Backend/BackendApi.cpp
@@ -126,7 +126,7 @@ Js::JavascriptMethod GetCheckAsmJsCodeGenThunk()
uint GetBailOutRegisterSaveSlotCount()
{
- // REVIEW: not all registers are used, we are allocating more space then necessary.
+ // REVIEW: not all registers are used, we are allocating more space than necessary.
return LinearScanMD::GetRegisterSaveSlotCount();
}
@@ -142,10 +142,10 @@ void CheckIsExecutable(Js::RecyclableObject * function, Js::JavascriptMethod ent
{
Js::ScriptContext * scriptContext = function->GetScriptContext();
// it's easy to call the default entry point from RecyclableObject.
- AssertMsg((Js::JavascriptFunction::Is(function) && Js::JavascriptFunction::FromVar(function)->IsExternalFunction())
+ AssertMsg((Js::VarIs(function) && Js::VarTo(function)->IsExternalFunction())
|| Js::CrossSite::IsThunk(entrypoint)
// External object with entrypoint
- || (!Js::JavascriptFunction::Is(function)
+ || (!Js::VarIs(function)
&& function->IsExternal()
&& Js::JavascriptConversion::IsCallable(function))
|| !scriptContext->IsActuallyClosed()
@@ -160,7 +160,7 @@ void CheckIsExecutable(Js::RecyclableObject * function, Js::JavascriptMethod ent
{
return;
}
-
+
Js::TypeId typeId = Js::JavascriptOperators::GetTypeId(function);
if (typeId == Js::TypeIds_HostDispatch)
{
diff --git a/lib/Backend/BackendOpCodeAttrAsmJs.cpp b/lib/Backend/BackendOpCodeAttrAsmJs.cpp
index b647a421376..c52dc963a08 100644
--- a/lib/Backend/BackendOpCodeAttrAsmJs.cpp
+++ b/lib/Backend/BackendOpCodeAttrAsmJs.cpp
@@ -68,4 +68,4 @@ namespace OpCodeAttrAsmJs
}
}; // OpCodeAttrAsmJs
-#endif
\ No newline at end of file
+#endif
diff --git a/lib/Backend/BackendOpCodeAttrAsmJs.h b/lib/Backend/BackendOpCodeAttrAsmJs.h
index d267d7516f6..d43359aea99 100644
--- a/lib/Backend/BackendOpCodeAttrAsmJs.h
+++ b/lib/Backend/BackendOpCodeAttrAsmJs.h
@@ -12,4 +12,4 @@ namespace OpCodeAttrAsmJs
bool HasProfiledOp(Js::OpCodeAsmJs opcode);
bool IsProfiledOp(Js::OpCodeAsmJs opcode);
};
-#endif
\ No newline at end of file
+#endif
diff --git a/lib/Backend/BackwardPass.cpp b/lib/Backend/BackwardPass.cpp
index e73fce97fa9..4a3c567f586 100644
--- a/lib/Backend/BackwardPass.cpp
+++ b/lib/Backend/BackwardPass.cpp
@@ -87,13 +87,11 @@ BackwardPass::DoMarkTempNumbers() const
}
bool
-BackwardPass::DoMarkTempObjects() const
-{
- // only mark temp object on the backward store phase
- return (tag == Js::BackwardPhase) && !PHASE_OFF(Js::MarkTempPhase, this->func) &&
- !PHASE_OFF(Js::MarkTempObjectPhase, this->func) && func->DoGlobOpt() && func->GetHasTempObjectProducingInstr() &&
- !func->IsJitInDebugMode() &&
- func->DoGlobOptsForGeneratorFunc();
+BackwardPass::SatisfyMarkTempObjectsConditions() const {
+ return !PHASE_OFF(Js::MarkTempPhase, this->func) &&
+ !PHASE_OFF(Js::MarkTempObjectPhase, this->func) &&
+ func->DoGlobOpt() && func->GetHasTempObjectProducingInstr() &&
+ !func->IsJitInDebugMode();
// Why MarkTempObject is disabled under debugger:
// We add 'identified so far dead non-temp locals' to byteCodeUpwardExposedUsed in ProcessBailOutInfo,
@@ -101,6 +99,13 @@ BackwardPass::DoMarkTempObjects() const
// from a temp to non-temp. That's in general not a supported conversion (while non-temp -> temp is fine).
}
+bool
+BackwardPass::DoMarkTempObjects() const
+{
+ // only mark temp object on the backward store phase
+ return (tag == Js::BackwardPhase) && SatisfyMarkTempObjectsConditions();
+}
+
bool
BackwardPass::DoMarkTempNumbersOnTempObjects() const
{
@@ -112,8 +117,7 @@ bool
BackwardPass::DoMarkTempObjectVerify() const
{
// only mark temp object on the backward store phase
- return (tag == Js::DeadStorePhase) && !PHASE_OFF(Js::MarkTempPhase, this->func) &&
- !PHASE_OFF(Js::MarkTempObjectPhase, this->func) && func->DoGlobOpt() && func->GetHasTempObjectProducingInstr();
+ return (tag == Js::DeadStorePhase) && SatisfyMarkTempObjectsConditions();
}
#endif
@@ -150,8 +154,7 @@ BackwardPass::DoDeadStore(Func* func, StackSym* sym)
// Dead store is disabled under debugger for non-temp local vars.
return
DoDeadStore(func) &&
- !(func->IsJitInDebugMode() && sym->HasByteCodeRegSlot() && func->IsNonTempLocalVar(sym->GetByteCodeRegSlot())) &&
- func->DoGlobOptsForGeneratorFunc();
+ !(func->IsJitInDebugMode() && sym->HasByteCodeRegSlot() && func->IsNonTempLocalVar(sym->GetByteCodeRegSlot()));
}
bool
@@ -162,8 +165,7 @@ BackwardPass::DoTrackNegativeZero() const
!PHASE_OFF(Js::TrackNegativeZeroPhase, func) &&
func->DoGlobOpt() &&
!IsPrePass() &&
- !func->IsJitInDebugMode() &&
- func->DoGlobOptsForGeneratorFunc();
+ !func->IsJitInDebugMode();
}
bool
@@ -175,8 +177,7 @@ BackwardPass::DoTrackBitOpsOrNumber() const
tag == Js::BackwardPhase &&
func->DoGlobOpt() &&
!IsPrePass() &&
- !func->IsJitInDebugMode() &&
- func->DoGlobOptsForGeneratorFunc();
+ !func->IsJitInDebugMode();
#else
return false;
#endif
@@ -191,8 +192,7 @@ BackwardPass::DoTrackIntOverflow() const
tag == Js::BackwardPhase &&
!IsPrePass() &&
globOpt->DoLossyIntTypeSpec() &&
- !func->IsJitInDebugMode() &&
- func->DoGlobOptsForGeneratorFunc();
+ !func->IsJitInDebugMode();
}
bool
@@ -232,6 +232,7 @@ BackwardPass::CleanupBackwardPassInfoInFlowGraph()
block->typesNeedingKnownObjectLayout = nullptr;
block->slotDeadStoreCandidates = nullptr;
block->byteCodeUpwardExposedUsed = nullptr;
+ block->liveFixedFields = nullptr;
#if DBG
block->byteCodeRestoreSyms = nullptr;
block->excludeByteCodeUpwardExposedTracking = nullptr;
@@ -307,9 +308,11 @@ void
BackwardPass::MarkScopeObjSymUseForStackArgOpt()
{
IR::Instr * instr = this->currentInstr;
+ BasicBlock *block = this->currentBlock;
+
if (tag == Js::DeadStorePhase)
{
- if (instr->DoStackArgsOpt(this->func) && instr->m_func->GetScopeObjSym() != nullptr && this->DoByteCodeUpwardExposedUsed())
+ if (instr->DoStackArgsOpt() && !block->IsLandingPad() && instr->m_func->GetScopeObjSym() != nullptr && this->DoByteCodeUpwardExposedUsed())
{
this->currentBlock->byteCodeUpwardExposedUsed->Set(instr->m_func->GetScopeObjSym()->m_id);
}
@@ -321,16 +324,17 @@ BackwardPass::ProcessBailOnStackArgsOutOfActualsRange()
{
IR::Instr * instr = this->currentInstr;
- if (tag == Js::DeadStorePhase &&
- (instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::TypeofElem) &&
+ if (tag == Js::DeadStorePhase &&
+ (instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::TypeofElem) &&
instr->HasBailOutInfo() && !IsPrePass())
{
- if (instr->DoStackArgsOpt(this->func))
+ if (instr->DoStackArgsOpt())
{
AssertMsg(instr->GetBailOutKind() & IR::BailOnStackArgsOutOfActualsRange, "Stack args bail out is not set when the optimization is turned on? ");
if (instr->GetBailOutKind() & ~IR::BailOnStackArgsOutOfActualsRange)
{
- Assert(instr->GetBailOutKind() == (IR::BailOnStackArgsOutOfActualsRange | IR::BailOutOnImplicitCallsPreOp));
+ //Make sure that in absence of potential LazyBailOut and BailOutOnImplicitCallsPreOp, we only have BailOnStackArgsOutOfActualsRange bit set
+ Assert((BailOutInfo::WithoutLazyBailOut(instr->GetBailOutKind() & ~IR::BailOutOnImplicitCallsPreOp)) == IR::BailOnStackArgsOutOfActualsRange);
//We are sure at this point, that we will not have any implicit calls as we wouldn't have done this optimization in the first place.
instr->SetBailOutKind(IR::BailOnStackArgsOutOfActualsRange);
}
@@ -486,6 +490,7 @@ BackwardPass::MergeSuccBlocksInfo(BasicBlock * block)
BVSparse * slotDeadStoreCandidates = nullptr;
BVSparse * byteCodeUpwardExposedUsed = nullptr;
BVSparse * couldRemoveNegZeroBailoutForDef = nullptr;
+ BVSparse * liveFixedFields = nullptr;
#if DBG
uint byteCodeLocalsCount = func->GetJITFunctionBody()->GetLocalsCount();
StackSym ** byteCodeRestoreSyms = nullptr;
@@ -513,6 +518,7 @@ BackwardPass::MergeSuccBlocksInfo(BasicBlock * block)
if (!block->isDead)
{
bool keepUpwardExposed = (this->tag == Js::BackwardPhase);
+
JitArenaAllocator *upwardExposedArena = nullptr;
if(!IsCollectionPass())
{
@@ -522,6 +528,7 @@ BackwardPass::MergeSuccBlocksInfo(BasicBlock * block)
if (this->tag == Js::DeadStorePhase)
{
+ liveFixedFields = JitAnew(this->tempAlloc, BVSparse, this->tempAlloc);
typesNeedingKnownObjectLayout = JitAnew(this->tempAlloc, BVSparse, this->tempAlloc);
}
@@ -659,6 +666,13 @@ BackwardPass::MergeSuccBlocksInfo(BasicBlock * block)
|| (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
|| !this->DoMarkTempObjectVerify());
+ if (this->tag == Js::DeadStorePhase && blockSucc->liveFixedFields != nullptr)
+ {
+ liveFixedFields->Or(blockSucc->liveFixedFields);
+ JitAdelete(this->tempAlloc, blockSucc->liveFixedFields);
+ blockSucc->liveFixedFields = nullptr;
+ }
+
if (blockSucc->upwardExposedUses != nullptr)
{
upwardExposedUses->Or(blockSucc->upwardExposedUses);
@@ -737,7 +751,7 @@ BackwardPass::MergeSuccBlocksInfo(BasicBlock * block)
this->func->GetDebugNumberSet(debugStringBuffer),
block->GetBlockNum(), blockSucc->GetBlockNum());
- auto fixupFrom = [block, blockSucc, this](Bucket &bucket)
+ auto fixupFrom = [block, blockSucc, upwardExposedUses, this](Bucket &bucket)
{
AddPropertyCacheBucket *fromData = &bucket.element;
if (fromData->GetInitialType() == nullptr ||
@@ -746,10 +760,10 @@ BackwardPass::MergeSuccBlocksInfo(BasicBlock * block)
return;
}
- this->InsertTypeTransitionsAtPriorSuccessors(block, blockSucc, bucket.value, fromData);
+ this->InsertTypeTransitionsAtPriorSuccessors(block, blockSucc, bucket.value, fromData, upwardExposedUses);
};
- auto fixupTo = [blockSucc, this](Bucket &bucket)
+ auto fixupTo = [blockSucc, upwardExposedUses, this](Bucket &bucket)
{
AddPropertyCacheBucket *toData = &bucket.element;
if (toData->GetInitialType() == nullptr ||
@@ -758,7 +772,7 @@ BackwardPass::MergeSuccBlocksInfo(BasicBlock * block)
return;
}
- this->InsertTypeTransitionAtBlock(blockSucc, bucket.value, toData);
+ this->InsertTypeTransitionAtBlock(blockSucc, bucket.value, toData, upwardExposedUses);
};
if (blockSucc->stackSymToFinalType != nullptr)
@@ -935,6 +949,7 @@ BackwardPass::MergeSuccBlocksInfo(BasicBlock * block)
blockSucc->couldRemoveNegZeroBailoutForDef = nullptr;
}
}
+ this->CombineTypeIDsWithFinalType(block, blockSucc);
}
if (blockSucc->noImplicitCallUses != nullptr)
@@ -1197,6 +1212,7 @@ BackwardPass::MergeSuccBlocksInfo(BasicBlock * block)
block->noImplicitCallJsArrayHeadSegmentSymUses = noImplicitCallJsArrayHeadSegmentSymUses;
block->noImplicitCallArrayLengthSymUses = noImplicitCallArrayLengthSymUses;
block->couldRemoveNegZeroBailoutForDef = couldRemoveNegZeroBailoutForDef;
+ block->liveFixedFields = liveFixedFields;
}
ObjTypeGuardBucket
@@ -1333,6 +1349,12 @@ BackwardPass::DeleteBlockData(BasicBlock * block)
JitAdelete(this->tempAlloc, block->noImplicitCallArrayLengthSymUses);
block->noImplicitCallArrayLengthSymUses = nullptr;
}
+ if (block->liveFixedFields != nullptr)
+ {
+ JitArenaAllocator *liveFixedFieldsArena = this->tempAlloc;
+ JitAdelete(liveFixedFieldsArena, block->liveFixedFields);
+ block->liveFixedFields = nullptr;
+ }
if (block->upwardExposedUses != nullptr)
{
JitArenaAllocator *upwardExposedArena = (this->tag == Js::BackwardPhase) ? this->globOpt->alloc : this->tempAlloc;
@@ -1719,19 +1741,19 @@ BackwardPass::ProcessBailOutArgObj(BailOutInfo * bailOutInfo, BVSparseTestAndClear(symId))
{
- if (bailOutInfo->usedCapturedValues.argObjSyms == nullptr)
+ if (bailOutInfo->usedCapturedValues->argObjSyms == nullptr)
{
- bailOutInfo->usedCapturedValues.argObjSyms = JitAnew(this->func->m_alloc,
+ bailOutInfo->usedCapturedValues->argObjSyms = JitAnew(this->func->m_alloc,
BVSparse, this->func->m_alloc);
}
- bailOutInfo->usedCapturedValues.argObjSyms->Set(symId);
+ bailOutInfo->usedCapturedValues->argObjSyms->Set(symId);
}
}
NEXT_BITSET_IN_SPARSEBV;
}
- if (bailOutInfo->usedCapturedValues.argObjSyms)
+ if (bailOutInfo->usedCapturedValues->argObjSyms)
{
- byteCodeUpwardExposedUsed->Minus(bailOutInfo->usedCapturedValues.argObjSyms);
+ byteCodeUpwardExposedUsed->Minus(bailOutInfo->usedCapturedValues->argObjSyms);
}
}
@@ -1741,7 +1763,7 @@ BackwardPass::ProcessBailOutConstants(BailOutInfo * bailOutInfo, BVSparsetag != Js::BackwardPhase);
// Remove constants that we are already going to restore
- SListBase * usedConstantValues = &bailOutInfo->usedCapturedValues.constantValues;
+ SListBase * usedConstantValues = &bailOutInfo->usedCapturedValues->constantValues;
FOREACH_SLISTBASE_ENTRY(ConstantStackSymValue, value, usedConstantValues)
{
byteCodeUpwardExposedUsed->Clear(value.Key()->m_id);
@@ -1749,9 +1771,17 @@ BackwardPass::ProcessBailOutConstants(BailOutInfo * bailOutInfo, BVSparsebailInInstr;
+
// Find other constants that we need to restore
FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, value, &bailOutInfo->capturedValues->constantValues, iter)
{
+ if (bailInInstr)
+ {
+ // Store all captured constant values for the corresponding bailin instr
+ bailInInstr->capturedValues.constantValues.PrependNode(this->func->m_alloc, value);
+ }
+
if (byteCodeUpwardExposedUsed->TestAndClear(value.Key()->m_id) || bailoutReferencedArgSymsBv->TestAndClear(value.Key()->m_id))
{
// Constant need to be restore, move it to the restore list
@@ -1773,7 +1803,7 @@ BackwardPass::ProcessBailOutCopyProps(BailOutInfo * bailOutInfo, BVSparsefunc->GetJITFunctionBody()->IsAsmJsMode());
// Remove copy prop that we were already going to restore
- SListBase * usedCopyPropSyms = &bailOutInfo->usedCapturedValues.copyPropSyms;
+ SListBase * usedCopyPropSyms = &bailOutInfo->usedCapturedValues->copyPropSyms;
FOREACH_SLISTBASE_ENTRY(CopyPropSyms, copyPropSyms, usedCopyPropSyms)
{
byteCodeUpwardExposedUsed->Clear(copyPropSyms.Key()->m_id);
@@ -1784,6 +1814,7 @@ BackwardPass::ProcessBailOutCopyProps(BailOutInfo * bailOutInfo, BVSparsefunc->m_alloc;
BasicBlock * block = this->currentBlock;
BVSparse * upwardExposedUses = block->upwardExposedUses;
+ IR::GeneratorBailInInstr* bailInInstr = bailOutInfo->bailInInstr;
// Find other copy prop that we need to restore
FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSyms, &bailOutInfo->capturedValues->copyPropSyms, iter)
@@ -1793,6 +1824,13 @@ BackwardPass::ProcessBailOutCopyProps(BailOutInfo * bailOutInfo, BVSparseIsTypeSpec());
if (byteCodeUpwardExposedUsed->TestAndClear(copyPropSyms.Key()->m_id) || bailoutReferencedArgSymsBv->TestAndClear(copyPropSyms.Key()->m_id))
{
+ if (bailInInstr)
+ {
+ // Copy all copyprop syms into the corresponding bail-in instr so that
+ // we can map the correct symbols to restore during bail-in
+ bailInInstr->capturedValues.copyPropSyms.PrependNode(allocator, copyPropSyms);
+ }
+
// This copy-prop sym needs to be restored; add it to the restore list.
/*
@@ -1839,7 +1877,7 @@ BackwardPass::ProcessBailOutCopyProps(BailOutInfo * bailOutInfo, BVSparseGetByteCodeOffset() == Js::Constants::NoByteCodeOffset ||
bailOutInfo->bailOutOffset > instr->GetByteCodeOffset())
{
- // Currently, we only have post-op bailout with BailOutOnImplicitCalls
- // or JIT inserted operation (which no byte code offsets).
+ // Currently, we only have post-op bailout with BailOutOnImplicitCalls,
+ // LazyBailOut, or JIT inserted operation (which no byte code offsets).
// If there are other bailouts that we want to bailout after the operation,
// we have to make sure that it still doesn't do the implicit call
// if it is done on the stack object.
// Otherwise, the stack object will be passed to the implicit call functions.
Assert(instr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset
|| (instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCalls
+ || (instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::LazyBailOut
|| (instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutInvalid);
// This instruction bails out to a later byte-code instruction, so process the bailout info now
- ProcessBailOutInfo(instr, bailOutInfo);
+ this->ProcessBailOutInfo(instr, bailOutInfo);
+
+ if (instr->HasLazyBailOut())
+ {
+ this->ClearDstUseForPostOpLazyBailOut(instr);
+ }
}
else
{
@@ -2172,10 +2216,47 @@ BackwardPass::ProcessBailOutInfo(IR::Instr * instr)
}
bool
-BackwardPass::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, bool mayNeedImplicitCallBailOut, bool hasLiveFields)
+BackwardPass::IsLazyBailOutCurrentlyNeeeded(IR::Instr * instr) const
{
- return this->globOpt->IsImplicitCallBailOutCurrentlyNeeded(instr, nullptr, nullptr, this->currentBlock, hasLiveFields, mayNeedImplicitCallBailOut, false) ||
- this->NeedBailOutOnImplicitCallsForTypedArrayStore(instr);
+ if (!this->func->ShouldDoLazyBailOut())
+ {
+ return false;
+ }
+
+ Assert(this->tag == Js::DeadStorePhase);
+
+ // We insert potential lazy bailout points in the forward pass, so if the instruction doesn't
+ // have bailout info at this point, we know for sure lazy bailout is not needed.
+ if (!instr->HasLazyBailOut() || this->currentBlock->isDead)
+ {
+ return false;
+ }
+
+ AssertMsg(
+ this->currentBlock->liveFixedFields != nullptr,
+ "liveFixedField is null, MergeSuccBlocksInfo might have not initialized it?"
+ );
+
+ if (instr->IsStFldVariant())
+ {
+ Assert(instr->GetDst());
+ Js::PropertyId id = instr->GetDst()->GetSym()->AsPropertySym()->m_propertyId;
+
+ // We only need to protect against SetFld if it is setting to one of the live fixed fields
+ return this->currentBlock->liveFixedFields->Test(id);
+ }
+
+ return !this->currentBlock->liveFixedFields->IsEmpty();
+}
+
+bool
+BackwardPass::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, bool mayNeedImplicitCallBailOut, bool needLazyBailOut, bool hasLiveFields)
+{
+ return this->globOpt->IsImplicitCallBailOutCurrentlyNeeded(
+ instr, nullptr /* src1Val */, nullptr /* src2Val */,
+ this->currentBlock, hasLiveFields, mayNeedImplicitCallBailOut, false /* isForwardPass */, needLazyBailOut
+ ) ||
+ this->NeedBailOutOnImplicitCallsForTypedArrayStore(instr);
}
void
@@ -2202,7 +2283,7 @@ BackwardPass::DeadStoreTypeCheckBailOut(IR::Instr * instr)
return;
}
- IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
+ const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
if (!IR::IsTypeCheckBailOutKind(oldBailOutKind))
{
return;
@@ -2286,11 +2367,35 @@ BackwardPass::DeadStoreTypeCheckBailOut(IR::Instr * instr)
instr->GetBailOutInfo()->polymorphicCacheIndex = (uint)-1;
// Keep the mark temp object bit if it is there so that we will not remove the implicit call check
- instr->SetBailOutKind(IR::BailOutOnImplicitCallsPreOp | (oldBailOutKind & IR::BailOutMarkTempObject));
+ IR::BailOutKind newBailOutKind = IR::BailOutOnImplicitCallsPreOp | (oldBailOutKind & IR::BailOutMarkTempObject);
+ if (BailOutInfo::HasLazyBailOut(oldBailOutKind))
+ {
+ instr->SetBailOutKind(BailOutInfo::WithLazyBailOut(newBailOutKind));
+ }
+ else
+ {
+ instr->SetBailOutKind(newBailOutKind);
+ }
+}
+
+void
+BackwardPass::DeadStoreLazyBailOut(IR::Instr * instr, bool needsLazyBailOut)
+{
+ if (!this->IsPrePass() && !needsLazyBailOut && instr->HasLazyBailOut())
+ {
+ instr->ClearLazyBailOut();
+ if (!instr->HasBailOutInfo())
+ {
+ if (this->preOpBailOutInstrToProcess == instr)
+ {
+ this->preOpBailOutInstrToProcess = nullptr;
+ }
+ }
+ }
}
void
-BackwardPass::DeadStoreImplicitCallBailOut(IR::Instr * instr, bool hasLiveFields)
+BackwardPass::DeadStoreImplicitCallBailOut(IR::Instr * instr, bool hasLiveFields, bool needsLazyBailOut)
{
Assert(this->tag == Js::DeadStorePhase);
@@ -2309,15 +2414,15 @@ BackwardPass::DeadStoreImplicitCallBailOut(IR::Instr * instr, bool hasLiveFields
UpdateArrayBailOutKind(instr);
// Install the implicit call PreOp for mark temp object if we need one.
- IR::BailOutKind kind = instr->GetBailOutKind();
- IR::BailOutKind kindNoBits = kind & ~IR::BailOutKindBits;
- if ((kind & IR::BailOutMarkTempObject) != 0 && kindNoBits != IR::BailOutOnImplicitCallsPreOp)
+ if ((instr->GetBailOutKind() & IR::BailOutMarkTempObject) != 0 && instr->GetBailOutKindNoBits() != IR::BailOutOnImplicitCallsPreOp)
{
+ IR::BailOutKind kind = instr->GetBailOutKind();
+ const IR::BailOutKind kindNoBits = instr->GetBailOutKindNoBits();
Assert(kindNoBits != IR::BailOutOnImplicitCalls);
if (kindNoBits == IR::BailOutInvalid)
{
- // We should only have combined with array bits
- Assert((kind & ~IR::BailOutForArrayBits) == IR::BailOutMarkTempObject);
+ // We should only have combined with array bits or lazy bailout
+ Assert(BailOutInfo::WithoutLazyBailOut(kind & ~IR::BailOutForArrayBits) == IR::BailOutMarkTempObject);
// Don't need to install if we are not going to do helper calls,
// or we are in the landingPad since implicit calls are already turned off.
if ((kind & IR::BailOutOnArrayAccessHelperCall) == 0 && !this->currentBlock->IsLandingPad())
@@ -2330,9 +2435,10 @@ BackwardPass::DeadStoreImplicitCallBailOut(IR::Instr * instr, bool hasLiveFields
// Currently only try to eliminate these bailout kinds. The others are required in cases
// where we don't necessarily have live/hoisted fields.
- const bool mayNeedBailOnImplicitCall = BailOutInfo::IsBailOutOnImplicitCalls(kind);
+ const bool mayNeedBailOnImplicitCall = BailOutInfo::IsBailOutOnImplicitCalls(instr->GetBailOutKind());
if (!mayNeedBailOnImplicitCall)
{
+ const IR::BailOutKind kind = instr->GetBailOutKind();
if (kind & IR::BailOutMarkTempObject)
{
if (kind == IR::BailOutMarkTempObject)
@@ -2356,9 +2462,8 @@ BackwardPass::DeadStoreImplicitCallBailOut(IR::Instr * instr, bool hasLiveFields
// We have an implicit call bailout in the code, and we want to make sure that it's required.
// Do this now, because only in the dead store pass do we have complete forward and backward liveness info.
- bool needsBailOutOnImplicitCall = this->IsImplicitCallBailOutCurrentlyNeeded(instr, mayNeedBailOnImplicitCall, hasLiveFields);
-
- if(!UpdateImplicitCallBailOutKind(instr, needsBailOutOnImplicitCall))
+ bool needsBailOutOnImplicitCall = this->IsImplicitCallBailOutCurrentlyNeeded(instr, mayNeedBailOnImplicitCall, needsLazyBailOut, hasLiveFields);
+ if(!UpdateImplicitCallBailOutKind(instr, needsBailOutOnImplicitCall, needsLazyBailOut))
{
instr->ClearBailOutInfo();
if (preOpBailOutInstrToProcess == instr)
@@ -2374,6 +2479,80 @@ BackwardPass::DeadStoreImplicitCallBailOut(IR::Instr * instr, bool hasLiveFields
}
}
+bool
+BackwardPass::UpdateImplicitCallBailOutKind(IR::Instr* const instr, bool needsBailOutOnImplicitCall, bool needsLazyBailOut)
+{
+ Assert(instr);
+ Assert(instr->HasBailOutInfo());
+ Assert(BailOutInfo::IsBailOutOnImplicitCalls(instr->GetBailOutKind()));
+ AssertMsg(
+ needsLazyBailOut || instr->GetBailOutKind() == BailOutInfo::WithoutLazyBailOut(instr->GetBailOutKind()),
+ "We should have removed all lazy bailout bit at this point if we decided that we wouldn't need it"
+ );
+ AssertMsg(
+ !needsLazyBailOut || instr->GetBailOutKind() == BailOutInfo::WithLazyBailOut(instr->GetBailOutKind()),
+ "The lazy bailout bit should be present at this point. We might have removed it incorrectly."
+ );
+
+ IR::BailOutKind bailOutKindWithBits = instr->GetBailOutKind();
+
+ const bool hasMarkTempObject = bailOutKindWithBits & IR::BailOutMarkTempObject;
+
+ // Firstly, we remove the mark temp object bit, as it is not needed after the dead store pass.
+ // We will later skip removing BailOutOnImplicitCalls when there is a mark temp object bit regardless
+ // of `needsBailOutOnImplicitCall`.
+ if (hasMarkTempObject)
+ {
+ bailOutKindWithBits &= ~IR::BailOutMarkTempObject;
+ instr->SetBailOutKind(bailOutKindWithBits);
+ }
+
+ if (needsBailOutOnImplicitCall)
+ {
+ // We decided that BailOutOnImplicitCall is needed. So lazy bailout is unnecessary
+ // because we are already protected from potential side effects unless the operation
+ // itself can change fields' values (StFld/StElem).
+ if (needsLazyBailOut && !instr->CanChangeFieldValueWithoutImplicitCall())
+ {
+ instr->ClearLazyBailOut();
+ }
+
+ return true;
+ }
+ else
+ {
+ // `needsBailOutOnImplicitCall` also captures our intention to keep BailOutOnImplicitCalls
+ // because we want to do fixed field lazy bailout optimization. So if we don't need them,
+ // just remove our lazy bailout.
+ instr->ClearLazyBailOut();
+ if (!instr->HasBailOutInfo())
+ {
+ return true;
+ }
+ }
+
+ const IR::BailOutKind bailOutKindWithoutBits = instr->GetBailOutKindNoBits();
+ if (!instr->GetBailOutInfo()->canDeadStore)
+ {
+ // revisit if canDeadStore is used for anything other than BailOutMarkTempObject
+ Assert(hasMarkTempObject);
+ // Don't remove the implicit call pre op bailout for mark temp object.
+ Assert(bailOutKindWithoutBits == IR::BailOutOnImplicitCallsPreOp);
+ return true;
+ }
+
+ // At this point, we don't need the bail on implicit calls.
+ // Simply use the bailout kind bits as our new bailout kind.
+ IR::BailOutKind newBailOutKind = bailOutKindWithBits - bailOutKindWithoutBits;
+
+ if (newBailOutKind == IR::BailOutInvalid)
+ {
+ return false;
+ }
+
+ instr->SetBailOutKind(newBailOutKind);
+ return true;
+}
bool
BackwardPass::NeedBailOutOnImplicitCallsForTypedArrayStore(IR::Instr* instr)
@@ -2504,11 +2683,16 @@ BackwardPass::ProcessBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo)
BVSparse* tmpBv = nullptr;
if (instr->IsBranchInstr())
{
- IR::LabelInstr* target = instr->AsBranchInstr()->GetTarget();
+ IR::BranchInstr* branchInstr = instr->AsBranchInstr();
+ IR::LabelInstr* target = branchInstr->GetTarget();
uint32 targetOffset = target->GetByteCodeOffset();
- if (targetOffset == instr->GetByteCodeOffset())
+
+ // If the instr's label has the same bytecode offset as the instr then move the targetOffset
+ // to the next bytecode instr. This can happen when we have airlock blocks or compensation
+ // code, but also for infinite loops. Don't do it for the latter.
+ if (targetOffset == instr->GetByteCodeOffset() && block != target->GetBasicBlock())
{
- // This can happen if the target is an break or airlock block
+ // This can happen if the target is a break or airlock block.
Assert(
target->GetBasicBlock()->isAirLockBlock ||
target->GetBasicBlock()->isAirLockCompensationBlock ||
@@ -2518,11 +2702,12 @@ BackwardPass::ProcessBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo)
);
targetOffset = target->GetNextByteCodeInstr()->GetByteCodeOffset();
}
- BVSparse* branchTargetUpdwardExposed = target->m_func->GetByteCodeOffsetUses(targetOffset);
- if (branchTargetUpdwardExposed)
+ BVSparse* branchTargetUpwardExposed = target->m_func->GetByteCodeOffsetUses(targetOffset);
+ if (branchTargetUpwardExposed)
{
- // The bailout should restore both the bailout destination and the branch target since we don't know where we'll end up
- trackingByteCodeUpwardExposedUsed = tmpBv = trackingByteCodeUpwardExposedUsed->OrNew(branchTargetUpdwardExposed);
+ // The bailout should restore both the bailout destination and
+ // the branch target since we don't know where we'll end up.
+ trackingByteCodeUpwardExposedUsed = tmpBv = trackingByteCodeUpwardExposedUsed->OrNew(branchTargetUpwardExposed);
}
}
Assert(trackingByteCodeUpwardExposedUsed);
@@ -2605,7 +2790,7 @@ BackwardPass::ProcessBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo)
tempBv->And(this->func->m_nonTempLocalVars, bailOutInfo->liveVarSyms);
// Remove syms that are restored in other ways than byteCodeUpwardExposedUsed.
- FOREACH_SLIST_ENTRY(ConstantStackSymValue, value, &bailOutInfo->usedCapturedValues.constantValues)
+ FOREACH_SLIST_ENTRY(ConstantStackSymValue, value, &bailOutInfo->usedCapturedValues->constantValues)
{
Assert(value.Key()->HasByteCodeRegSlot() || value.Key()->GetInstrDef()->m_opcode == Js::OpCode::BytecodeArgOutCapture);
if (value.Key()->HasByteCodeRegSlot())
@@ -2614,7 +2799,7 @@ BackwardPass::ProcessBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo)
}
}
NEXT_SLIST_ENTRY;
- FOREACH_SLIST_ENTRY(CopyPropSyms, value, &bailOutInfo->usedCapturedValues.copyPropSyms)
+ FOREACH_SLIST_ENTRY(CopyPropSyms, value, &bailOutInfo->usedCapturedValues->copyPropSyms)
{
Assert(value.Key()->HasByteCodeRegSlot() || value.Key()->GetInstrDef()->m_opcode == Js::OpCode::BytecodeArgOutCapture);
if (value.Key()->HasByteCodeRegSlot())
@@ -2623,9 +2808,9 @@ BackwardPass::ProcessBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo)
}
}
NEXT_SLIST_ENTRY;
- if (bailOutInfo->usedCapturedValues.argObjSyms)
+ if (bailOutInfo->usedCapturedValues->argObjSyms)
{
- tempBv->Minus(bailOutInfo->usedCapturedValues.argObjSyms);
+ tempBv->Minus(bailOutInfo->usedCapturedValues->argObjSyms);
}
byteCodeUpwardExposedUsed->Or(tempBv);
@@ -2701,6 +2886,7 @@ BackwardPass::ProcessBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo)
}
});
}
+
JitAdelete(this->tempAlloc, bailoutReferencedArgSymsBv);
if (this->IsPrePass())
@@ -2709,6 +2895,21 @@ BackwardPass::ProcessBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo)
}
}
+void
+BackwardPass::ClearDstUseForPostOpLazyBailOut(IR::Instr *instr)
+{
+ // Refer to comments on BailOutInfo::ClearUseOfDst()
+ Assert(instr->HasLazyBailOut());
+ IR::Opnd *dst = instr->GetDst();
+ if (!this->IsPrePass() && dst && dst->IsRegOpnd())
+ {
+ StackSym *stackSym = dst->GetStackSym();
+ if (stackSym) {
+ instr->GetBailOutInfo()->ClearUseOfDst(stackSym->m_id);
+ }
+ }
+}
+
void
BackwardPass::ProcessBlock(BasicBlock * block)
{
@@ -2804,7 +3005,12 @@ BackwardPass::ProcessBlock(BasicBlock * block)
this->currentInstr = instr;
this->currentRegion = this->currentBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
-
+
+ if (instr->m_opcode == Js::OpCode::Yield && !this->IsCollectionPass())
+ {
+ this->DisallowMarkTempAcrossYield(this->currentBlock->byteCodeUpwardExposedUsed);
+ }
+
IR::Instr * insertedInstr = TryChangeInstrForStackArgOpt();
if (insertedInstr != nullptr)
{
@@ -2834,6 +3040,11 @@ BackwardPass::ProcessBlock(BasicBlock * block)
bool hasLiveFields = (block->upwardExposedFields && !block->upwardExposedFields->IsEmpty());
+ if (this->tag == Js::DeadStorePhase && block->stackSymToFinalType != nullptr)
+ {
+ this->InsertTypeTransitionsAtPotentialKills();
+ }
+
IR::Opnd * opnd = instr->GetDst();
if (opnd != nullptr)
{
@@ -3255,6 +3466,7 @@ BackwardPass::ProcessBlock(BasicBlock * block)
case Js::OpCode::StSlotBoxTemp:
case Js::OpCode::StSlotChkUndecl:
case Js::OpCode::StSuperFld:
+ case Js::OpCode::StSuperFldStrict:
case Js::OpCode::ProfiledStElemI_A:
case Js::OpCode::ProfiledStElemI_A_Strict:
case Js::OpCode::ProfiledStFld:
@@ -3263,6 +3475,7 @@ BackwardPass::ProcessBlock(BasicBlock * block)
case Js::OpCode::ProfiledStRootFld:
case Js::OpCode::ProfiledStRootFldStrict:
case Js::OpCode::ProfiledStSuperFld:
+ case Js::OpCode::ProfiledStSuperFldStrict:
// Unfortunately, being fed into a store means that we could have aliasing, and the
// consequence is that it may be re-read and then dereferenced. Note that we can do
// this case if we poison any array symbol that we store to on the way out, but the
@@ -3444,6 +3657,21 @@ BackwardPass::ProcessBlock(BasicBlock * block)
#endif
switch(instr->m_opcode)
{
+ case Js::OpCode::CheckFixedFld:
+ {
+ if (!this->IsPrePass())
+ {
+ // Use `propertyId` instead of `propertySymId` to track live fixed fields
+ // During jit transfer (`CreateFrom()`), all properties that can be fixed are transferred
+ // over and also invalidated using `propertyId` regardless of whether we choose to fix them in the jit.
+ // So all properties with the same name are invalidated even though not all of them are fixed.
+ // Therefore we need to attach lazy bailout using propertyId so that all of them can be protected.
+ Assert(instr->GetSrc1() && block->liveFixedFields);
+ block->liveFixedFields->Set(instr->GetSrc1()->GetSym()->AsPropertySym()->m_propertyId);
+ }
+
+ break;
+ }
case Js::OpCode::LdSlot:
{
DeadStoreOrChangeInstrForScopeObjRemoval(&instrPrev);
@@ -3528,13 +3756,20 @@ BackwardPass::ProcessBlock(BasicBlock * block)
#endif
}
+ bool needsLazyBailOut = this->IsLazyBailOutCurrentlyNeeeded(instr);
+ AssertMsg(
+ !needsLazyBailOut || instr->HasLazyBailOut(),
+ "Instruction does not have the lazy bailout bit. Forward pass did not insert it correctly?"
+ );
+
DeadStoreTypeCheckBailOut(instr);
- DeadStoreImplicitCallBailOut(instr, hasLiveFields);
+ DeadStoreLazyBailOut(instr, needsLazyBailOut);
+ DeadStoreImplicitCallBailOut(instr, hasLiveFields, needsLazyBailOut);
- if (block->stackSymToFinalType != nullptr)
- {
- this->InsertTypeTransitionsAtPotentialKills();
- }
+ AssertMsg(
+ this->IsPrePass() || (needsLazyBailOut || !instr->HasLazyBailOut()),
+ "We didn't remove lazy bailout after prepass even though we don't need it?"
+ );
// NoImplicitCallUses transfers need to be processed after determining whether implicit calls need to be disabled
// for the current instruction, because the instruction where the def occurs also needs implicit calls disabled.
@@ -3638,6 +3873,16 @@ BackwardPass::ProcessBlock(BasicBlock * block)
}
}
#endif
+
+ // Make a copy of upwardExposedUses for our bail-in code, note that we have
+ // to do it at the bail-in instruction (right after yield) and not at the yield point
+ // since the yield instruction might use some symbols as operands that we don't need when
+ // bail-in
+ if (instr->IsGeneratorBailInInstr() && this->currentBlock->upwardExposedUses)
+ {
+ instr->AsGeneratorBailInInstr()->upwardExposedUses.Copy(this->currentBlock->upwardExposedUses);
+ }
+
instrPrev = ProcessPendingPreOpBailOutInfo(instr);
#if DBG_DUMP
@@ -3659,12 +3904,13 @@ BackwardPass::ProcessBlock(BasicBlock * block)
this->tag == Js::DeadStorePhase
// We don't do the masking in simplejit due to reduced perf concerns and the issues
// with handling try/catch structures with late-added blocks
- && !this->func->IsSimpleJit()
+ && this->func->DoGlobOpt()
// We don't need the masking blocks in asmjs/wasm mode
&& !block->GetFirstInstr()->m_func->GetJITFunctionBody()->IsAsmJsMode()
&& !block->GetFirstInstr()->m_func->GetJITFunctionBody()->IsWasmFunction()
&& !block->isDead
&& !block->isDeleted
+ && CONFIG_FLAG_RELEASE(AddMaskingBlocks)
)
{
FOREACH_PREDECESSOR_BLOCK(blockPred, block)
@@ -3770,7 +4016,7 @@ BackwardPass::ProcessBlock(BasicBlock * block)
#endif
}
-bool
+bool
BackwardPass::CanDeadStoreInstrForScopeObjRemoval(Sym *sym) const
{
if (tag == Js::DeadStorePhase && this->currentInstr->m_func->IsStackArgsEnabled())
@@ -3833,7 +4079,7 @@ BackwardPass::DeadStoreOrChangeInstrForScopeObjRemoval(IR::Instr ** pInstrPrev)
IR::Instr * instr = this->currentInstr;
Func * currFunc = instr->m_func;
- if (this->tag == Js::DeadStorePhase && instr->m_func->IsStackArgsEnabled() && !IsPrePass())
+ if (this->tag == Js::DeadStorePhase && instr->m_func->IsStackArgsEnabled() && (IsPrePass() || !currentBlock->loop))
{
switch (instr->m_opcode)
{
@@ -3860,6 +4106,7 @@ BackwardPass::DeadStoreOrChangeInstrForScopeObjRemoval(IR::Instr ** pInstrPrev)
Assert(currFunc->HasStackSymForFormal(value));
StackSym * paramStackSym = currFunc->GetStackSymForFormal(value);
IR::RegOpnd * srcOpnd = IR::RegOpnd::New(paramStackSym, TyVar, currFunc);
+ srcOpnd->SetIsJITOptimizedReg(true);
instr->ReplaceSrc1(srcOpnd);
this->ProcessSymUse(paramStackSym, true, true);
@@ -3926,7 +4173,7 @@ BackwardPass::DeadStoreOrChangeInstrForScopeObjRemoval(IR::Instr ** pInstrPrev)
case Js::OpCode::GetCachedFunc:
{
// = GetCachedFunc ,
- // is converted to
+ // is converted to
// = NewScFunc ,
if (instr->GetSrc1()->IsScopeObjOpnd(currFunc))
@@ -3952,7 +4199,7 @@ IR::Instr *
BackwardPass::TryChangeInstrForStackArgOpt()
{
IR::Instr * instr = this->currentInstr;
- if (tag == Js::DeadStorePhase && instr->DoStackArgsOpt(this->func))
+ if (tag == Js::DeadStorePhase && instr->DoStackArgsOpt())
{
switch (instr->m_opcode)
{
@@ -3988,8 +4235,8 @@ BackwardPass::TryChangeInstrForStackArgOpt()
* -This is to facilitate Bailout to record the live Scope object Sym, whenever required.
* -Reason for doing is this because - Scope object has to be implicitly live whenever Heap Arguments object is live.
* -When we restore HeapArguments object in the bail out path, it expects the scope object also to be restored - if one was created.
- * -We do not know detailed information about Heap arguments obj syms(aliasing etc.) until we complete Forward Pass.
- * -And we want to avoid dead sym clean up (in this case, scope object though not explicitly live, it is live implicitly) during Block merging in the forward pass.
+ * -We do not know detailed information about Heap arguments obj syms(aliasing etc.) until we complete Forward Pass.
+ * -And we want to avoid dead sym clean up (in this case, scope object though not explicitly live, it is live implicitly) during Block merging in the forward pass.
* -Hence this is the optimal spot to do this.
*/
@@ -4026,7 +4273,7 @@ BackwardPass::IsFormalParamSym(Func * func, Sym * sym) const
if (sym->IsPropertySym())
{
- //If the sym is a propertySym, then see if the propertyId is within the range of the formals
+ //If the sym is a propertySym, then see if the propertyId is within the range of the formals
//We can have other properties stored in the scope object other than the formals (following the formals).
PropertySym * propSym = sym->AsPropertySym();
IntConstType value = propSym->m_propertyId;
@@ -4151,59 +4398,6 @@ BackwardPass::TraceBlockUses(BasicBlock * block, bool isStart)
#endif
-bool
-BackwardPass::UpdateImplicitCallBailOutKind(IR::Instr *const instr, bool needsBailOutOnImplicitCall)
-{
- Assert(instr);
- Assert(instr->HasBailOutInfo());
-
- IR::BailOutKind implicitCallBailOutKind = needsBailOutOnImplicitCall ? IR::BailOutOnImplicitCalls : IR::BailOutInvalid;
-
- IR::BailOutKind instrBailOutKind = instr->GetBailOutKind();
- if (instrBailOutKind & IR::BailOutMarkTempObject)
- {
- // Remove the mark temp object bit, as we don't need it after the dead store pass
- instrBailOutKind &= ~IR::BailOutMarkTempObject;
- instr->SetBailOutKind(instrBailOutKind);
-
- if (!instr->GetBailOutInfo()->canDeadStore)
- {
- return true;
- }
- }
-
- const IR::BailOutKind instrImplicitCallBailOutKind = instrBailOutKind & ~IR::BailOutKindBits;
- if(instrImplicitCallBailOutKind == IR::BailOutOnImplicitCallsPreOp)
- {
- if(needsBailOutOnImplicitCall)
- {
- implicitCallBailOutKind = IR::BailOutOnImplicitCallsPreOp;
- }
- }
- else if(instrImplicitCallBailOutKind != IR::BailOutOnImplicitCalls && instrImplicitCallBailOutKind != IR::BailOutInvalid)
- {
- // This bailout kind (the value of 'instrImplicitCallBailOutKind') must guarantee that implicit calls will not happen.
- // If it doesn't make such a guarantee, it must be possible to merge this bailout kind with an implicit call bailout
- // kind, and therefore should be part of BailOutKindBits.
- Assert(!needsBailOutOnImplicitCall);
- return true;
- }
-
- if(instrImplicitCallBailOutKind == implicitCallBailOutKind)
- {
- return true;
- }
-
- const IR::BailOutKind newBailOutKind = instrBailOutKind - instrImplicitCallBailOutKind + implicitCallBailOutKind;
- if(newBailOutKind == IR::BailOutInvalid)
- {
- return false;
- }
-
- instr->SetBailOutKind(newBailOutKind);
- return true;
-}
-
bool
BackwardPass::ProcessNoImplicitCallUses(IR::Instr *const instr)
{
@@ -4692,7 +4886,9 @@ BackwardPass::ProcessNewScObject(IR::Instr* instr)
return;
}
- if (instr->HasBailOutInfo() && (instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutFailedCtorGuardCheck)
+ // The instruction could have a lazy bailout associated with it, which might get cleared
+ // later, so we make sure that we only process instructions with the right bailout kind.
+ if (instr->HasBailOutInfo() && instr->GetBailOutKindNoBits() == IR::BailOutFailedCtorGuardCheck)
{
Assert(instr->IsProfiledInstr());
Assert(instr->GetDst()->IsRegOpnd());
@@ -4720,7 +4916,7 @@ BackwardPass::ProcessNewScObject(IR::Instr* instr)
Assert(pBucket->GetInitialType() == ctorCache->GetType());
if (!this->IsPrePass())
{
- this->InsertTypeTransition(instr->m_next, objSym, pBucket);
+ this->InsertTypeTransition(instr->m_next, objSym, pBucket, block->upwardExposedUses);
}
#if DBG
pBucket->deadStoreUnavailableInitialType = pBucket->GetInitialType();
@@ -4733,6 +4929,7 @@ BackwardPass::ProcessNewScObject(IR::Instr* instr)
#else
block->stackSymToFinalType->Clear(objSym->m_id);
#endif
+ this->ClearTypeIDWithFinalType(objSym->m_id, block);
}
}
@@ -5115,6 +5312,7 @@ BackwardPass::ProcessStackSymUse(StackSym * stackSym, BOOLEAN isNonByteCodeUse)
// It has to have a var version for byte code regs
byteCodeUseSym = byteCodeUseSym->GetVarEquivSym(nullptr);
}
+
block->byteCodeUpwardExposedUsed->Set(byteCodeUseSym->m_id);
#if DBG
// We can only track first level function stack syms right now
@@ -5208,6 +5406,10 @@ BackwardPass::MayPropertyBeWrittenTo(Js::PropertyId propertyId)
void
BackwardPass::ProcessPropertySymOpndUse(IR::PropertySymOpnd * opnd)
{
+ if (opnd == this->currentInstr->GetDst() && this->HasTypeIDWithFinalType(this->currentBlock))
+ {
+ opnd->SetCantChangeType(true);
+ }
// If this operand doesn't participate in the type check sequence it's a pass-through.
// We will not set any bits on the operand and we will ignore them when lowering.
@@ -5289,7 +5491,7 @@ BackwardPass::ProcessPropertySymOpndUse(IR::PropertySymOpnd * opnd)
}
if (mayNeedTypeTransition &&
!this->IsPrePass() &&
- !this->currentInstr->HasBailOutInfo() &&
+ !this->currentInstr->HasTypeCheckBailOut() &&
(opnd->NeedsPrimaryTypeCheck() ||
opnd->NeedsLocalTypeCheck() ||
opnd->NeedsLoadFromProtoTypeCheck()))
@@ -5306,7 +5508,7 @@ BackwardPass::ProcessPropertySymOpndUse(IR::PropertySymOpnd * opnd)
pBucket->GetFinalType() != nullptr &&
pBucket->GetFinalType() != pBucket->GetInitialType())
{
- this->InsertTypeTransition(this->currentInstr->m_next, baseSym, pBucket);
+ this->InsertTypeTransition(this->currentInstr->m_next, baseSym, pBucket, block->upwardExposedUses);
pBucket->SetFinalType(pBucket->GetInitialType());
}
}
@@ -5325,9 +5527,6 @@ BackwardPass::ProcessPropertySymOpndUse(IR::PropertySymOpnd * opnd)
void
BackwardPass::TrackObjTypeSpecProperties(IR::PropertySymOpnd *opnd, BasicBlock *block)
{
- StackSym *auxSlotPtrSym = nullptr;
- bool auxSlotPtrUpwardExposed = false;
-
Assert(tag == Js::DeadStorePhase);
Assert(opnd->IsTypeCheckSeqCandidate());
@@ -5394,7 +5593,6 @@ BackwardPass::TrackObjTypeSpecProperties(IR::PropertySymOpnd *opnd, BasicBlock *
#endif
bucket->AddToGuardedPropertyOps(opnd->GetObjTypeSpecFldId());
- auxSlotPtrUpwardExposed = PHASE_ON(Js::ReuseAuxSlotPtrPhase, this->func) && opnd->UsesAuxSlot() && !opnd->IsLoadedFromProto() && opnd->IsTypeChecked();
if (opnd->NeedsMonoCheck())
{
@@ -5440,6 +5638,7 @@ BackwardPass::TrackObjTypeSpecProperties(IR::PropertySymOpnd *opnd, BasicBlock *
this->currentInstr->ChangeEquivalentToMonoTypeCheckBailOut();
}
bucket->SetMonoGuardType(nullptr);
+ this->ClearTypeIDWithFinalType(objSym->m_id, block);
}
if (!opnd->IsTypeAvailable())
@@ -5448,12 +5647,6 @@ BackwardPass::TrackObjTypeSpecProperties(IR::PropertySymOpnd *opnd, BasicBlock *
bucket->SetGuardedPropertyOps(nullptr);
JitAdelete(this->tempAlloc, guardedPropertyOps);
block->stackSymToGuardedProperties->Clear(objSym->m_id);
- auxSlotPtrSym = opnd->GetAuxSlotPtrSym();
- if (auxSlotPtrSym)
- {
- this->currentBlock->upwardExposedUses->Clear(auxSlotPtrSym->m_id);
- }
- auxSlotPtrUpwardExposed = false;
}
}
#if DBG
@@ -5472,11 +5665,25 @@ BackwardPass::TrackObjTypeSpecProperties(IR::PropertySymOpnd *opnd, BasicBlock *
opnd->SetGuardedPropOp(opnd->GetObjTypeSpecFldId());
}
- if (auxSlotPtrUpwardExposed)
+ if (opnd->UsesAuxSlot() && opnd->IsTypeCheckSeqParticipant() && !opnd->HasTypeMismatch() && !opnd->IsLoadedFromProto())
{
- // This is an upward-exposed use of the aux slot pointer.
- auxSlotPtrSym = opnd->EnsureAuxSlotPtrSym(this->func);
- this->currentBlock->upwardExposedUses->Set(auxSlotPtrSym->m_id);
+ bool auxSlotPtrUpwardExposed = false;
+ StackSym *auxSlotPtrSym = opnd->GetAuxSlotPtrSym();
+ if (opnd->IsAuxSlotPtrSymAvailable())
+ {
+ // This is an upward-exposed use of the aux slot pointer.
+ Assert(auxSlotPtrSym);
+ auxSlotPtrUpwardExposed = this->currentBlock->upwardExposedUses->TestAndSet(auxSlotPtrSym->m_id);
+ }
+ else if (auxSlotPtrSym != nullptr)
+ {
+ // The aux slot pointer is not upward-exposed at this point.
+ auxSlotPtrUpwardExposed = this->currentBlock->upwardExposedUses->TestAndClear(auxSlotPtrSym->m_id);
+ }
+ if (!this->IsPrePass() && auxSlotPtrUpwardExposed)
+ {
+ opnd->SetProducesAuxSlotPtr(true);
+ }
}
}
@@ -5641,6 +5848,7 @@ BackwardPass::TrackAddPropertyTypes(IR::PropertySymOpnd *opnd, BasicBlock *block
}
pBucket->SetInitialType(typeWithoutProperty);
+ this->SetTypeIDWithFinalType(propertySym->m_stackSym->m_id, block);
if (!PHASE_OFF(Js::ObjTypeSpecStorePhase, this->func))
{
@@ -5728,20 +5936,23 @@ BackwardPass::TrackAddPropertyTypes(IR::PropertySymOpnd *opnd, BasicBlock *block
#else
block->stackSymToFinalType->Clear(propertySym->m_stackSym->m_id);
#endif
+ this->ClearTypeIDWithFinalType(propertySym->m_stackSym->m_id, block);
}
}
void
-BackwardPass::InsertTypeTransition(IR::Instr *instrInsertBefore, int symId, AddPropertyCacheBucket *data)
+BackwardPass::InsertTypeTransition(IR::Instr *instrInsertBefore, int symId, AddPropertyCacheBucket *data, BVSparse* upwardExposedUses)
{
StackSym *objSym = this->func->m_symTable->FindStackSym(symId);
Assert(objSym);
- this->InsertTypeTransition(instrInsertBefore, objSym, data);
+ this->InsertTypeTransition(instrInsertBefore, objSym, data, upwardExposedUses);
}
void
-BackwardPass::InsertTypeTransition(IR::Instr *instrInsertBefore, StackSym *objSym, AddPropertyCacheBucket *data)
+BackwardPass::InsertTypeTransition(IR::Instr *instrInsertBefore, StackSym *objSym, AddPropertyCacheBucket *data, BVSparse* upwardExposedUses)
{
+ Assert(!this->IsPrePass());
+
IR::RegOpnd *baseOpnd = IR::RegOpnd::New(objSym, TyMachReg, this->func);
baseOpnd->SetIsJITOptimizedReg(true);
@@ -5758,7 +5969,7 @@ BackwardPass::InsertTypeTransition(IR::Instr *instrInsertBefore, StackSym *objSy
IR::Instr *adjustTypeInstr =
IR::Instr::New(Js::OpCode::AdjustObjType, finalTypeOpnd, baseOpnd, initialTypeOpnd, this->func);
- if (this->currentBlock->upwardExposedUses)
+ if (upwardExposedUses)
{
// If this type change causes a slot adjustment, the aux slot pointer (if any) will be reloaded here, so take it out of upwardExposedUses.
int oldCount;
@@ -5772,7 +5983,10 @@ BackwardPass::InsertTypeTransition(IR::Instr *instrInsertBefore, StackSym *objSy
StackSym *auxSlotPtrSym = baseOpnd->m_sym->GetAuxSlotPtrSym();
if (auxSlotPtrSym)
{
- this->currentBlock->upwardExposedUses->Clear(auxSlotPtrSym->m_id);
+ if (upwardExposedUses->Test(auxSlotPtrSym->m_id))
+ {
+ adjustTypeInstr->m_opcode = Js::OpCode::AdjustObjTypeReloadAuxSlotPtr;
+ }
}
}
}
@@ -5781,7 +5995,7 @@ BackwardPass::InsertTypeTransition(IR::Instr *instrInsertBefore, StackSym *objSy
}
void
-BackwardPass::InsertTypeTransitionAfterInstr(IR::Instr *instr, int symId, AddPropertyCacheBucket *data)
+BackwardPass::InsertTypeTransitionAfterInstr(IR::Instr *instr, int symId, AddPropertyCacheBucket *data, BVSparse* upwardExposedUses)
{
if (!this->IsPrePass())
{
@@ -5790,11 +6004,11 @@ BackwardPass::InsertTypeTransitionAfterInstr(IR::Instr *instr, int symId, AddPro
{
// The instr with the bailout is something like a branch that may not fall through.
// Insert the transitions instead at the beginning of each successor block.
- this->InsertTypeTransitionsAtPriorSuccessors(this->currentBlock, nullptr, symId, data);
+ this->InsertTypeTransitionsAtPriorSuccessors(this->currentBlock, nullptr, symId, data, upwardExposedUses);
}
else
{
- this->InsertTypeTransition(instr->m_next, symId, data);
+ this->InsertTypeTransition(instr->m_next, symId, data, upwardExposedUses);
}
}
// Note: we could probably clear this entry out of the table, but I don't know
@@ -5803,7 +6017,7 @@ BackwardPass::InsertTypeTransitionAfterInstr(IR::Instr *instr, int symId, AddPro
}
void
-BackwardPass::InsertTypeTransitionAtBlock(BasicBlock *block, int symId, AddPropertyCacheBucket *data)
+BackwardPass::InsertTypeTransitionAtBlock(BasicBlock *block, int symId, AddPropertyCacheBucket *data, BVSparse* upwardExposedUses)
{
bool inserted = false;
FOREACH_INSTR_IN_BLOCK(instr, block)
@@ -5826,7 +6040,7 @@ BackwardPass::InsertTypeTransitionAtBlock(BasicBlock *block, int symId, AddPrope
}
else
{
- this->InsertTypeTransition(instr, symId, data);
+ this->InsertTypeTransition(instr, symId, data, upwardExposedUses);
inserted = true;
break;
}
@@ -5836,7 +6050,7 @@ BackwardPass::InsertTypeTransitionAtBlock(BasicBlock *block, int symId, AddPrope
if (!inserted)
{
Assert(block->GetLastInstr()->m_next);
- this->InsertTypeTransition(block->GetLastInstr()->m_next, symId, data);
+ this->InsertTypeTransition(block->GetLastInstr()->m_next, symId, data, upwardExposedUses);
}
}
@@ -5845,7 +6059,8 @@ BackwardPass::InsertTypeTransitionsAtPriorSuccessors(
BasicBlock *block,
BasicBlock *blockSucc,
int symId,
- AddPropertyCacheBucket *data)
+ AddPropertyCacheBucket *data,
+ BVSparse* upwardExposedUses)
{
// For each successor of block prior to blockSucc, adjust the type.
FOREACH_SUCCESSOR_BLOCK(blockFix, block)
@@ -5855,7 +6070,7 @@ BackwardPass::InsertTypeTransitionsAtPriorSuccessors(
return;
}
- this->InsertTypeTransitionAtBlock(blockFix, symId, data);
+ this->InsertTypeTransitionAtBlock(blockFix, symId, data, upwardExposedUses);
}
NEXT_SUCCESSOR_BLOCK;
}
@@ -5873,7 +6088,7 @@ BackwardPass::InsertTypeTransitionsAtPotentialKills()
// Also do this for ctor cache updates, to avoid putting a type in the ctor cache that extends past
// the end of the ctor that the cache covers.
this->ForEachAddPropertyCacheBucket([&](int symId, AddPropertyCacheBucket *data)->bool {
- this->InsertTypeTransitionAfterInstr(instr, symId, data);
+ this->InsertTypeTransitionAfterInstr(instr, symId, data, this->currentBlock->upwardExposedUses);
return false;
});
}
@@ -5899,7 +6114,7 @@ BackwardPass::InsertTypeTransitionsAtPotentialKills()
if (this->TransitionUndoesObjectHeaderInlining(data))
{
// We're transitioning from inlined to non-inlined, so we can't push it up any farther.
- this->InsertTypeTransitionAfterInstr(instr, symId, data);
+ this->InsertTypeTransitionAfterInstr(instr, symId, data, this->currentBlock->upwardExposedUses);
}
return false;
});
@@ -5933,6 +6148,40 @@ BackwardPass::ForEachAddPropertyCacheBucket(Fn fn)
NEXT_HASHTABLE_ENTRY;
}
+void
+BackwardPass::SetTypeIDWithFinalType(int symID, BasicBlock *block)
+{
+ BVSparse *bv = block->EnsureTypeIDsWithFinalType(this->tempAlloc);
+ bv->Set(symID);
+}
+
+void
+BackwardPass::ClearTypeIDWithFinalType(int symID, BasicBlock *block)
+{
+ BVSparse *bv = block->typeIDsWithFinalType;
+ if (bv != nullptr)
+ {
+ bv->Clear(symID);
+ }
+}
+
+bool
+BackwardPass::HasTypeIDWithFinalType(BasicBlock *block) const
+{
+ return block->typeIDsWithFinalType != nullptr && !block->typeIDsWithFinalType->IsEmpty();
+}
+
+void
+BackwardPass::CombineTypeIDsWithFinalType(BasicBlock *block, BasicBlock *blockSucc)
+{
+ BVSparse *bvSucc = blockSucc->typeIDsWithFinalType;
+ if (bvSucc != nullptr && !bvSucc->IsEmpty())
+ {
+ BVSparse *bv = block->EnsureTypeIDsWithFinalType(this->tempAlloc);
+ bv->Or(bvSucc);
+ }
+}
+
bool
BackwardPass::TransitionUndoesObjectHeaderInlining(AddPropertyCacheBucket *data) const
{
@@ -6139,6 +6388,27 @@ BackwardPass::ProcessPropertySymUse(PropertySym *propertySym)
return isLive;
}
+void
+BackwardPass::DisallowMarkTempAcrossYield(BVSparse* bytecodeUpwardExposed)
+{
+ Assert(!this->IsCollectionPass());
+ BasicBlock* block = this->currentBlock;
+ if (this->DoMarkTempNumbers())
+ {
+ block->tempNumberTracker->DisallowMarkTempAcrossYield(bytecodeUpwardExposed);
+ }
+ if (this->DoMarkTempObjects())
+ {
+ block->tempObjectTracker->DisallowMarkTempAcrossYield(bytecodeUpwardExposed);
+ }
+#if DBG
+ if (this->DoMarkTempObjectVerify())
+ {
+ block->tempObjectVerifyTracker->DisallowMarkTempAcrossYield(bytecodeUpwardExposed);
+ }
+#endif
+}
+
void
BackwardPass::MarkTemp(StackSym * sym)
{
@@ -7542,7 +7812,7 @@ BackwardPass::ProcessDef(IR::Opnd * opnd)
PropertySym *propertySym = sym->AsPropertySym();
ProcessStackSymUse(propertySym->m_stackSym, isJITOptimizedReg);
- if(IsCollectionPass())
+ if (IsCollectionPass())
{
return false;
}
@@ -7629,7 +7899,7 @@ BackwardPass::ProcessDef(IR::Opnd * opnd)
}
}
- if(IsCollectionPass())
+ if (IsCollectionPass())
{
return false;
}
@@ -7781,7 +8051,8 @@ BackwardPass::DeadStoreInstr(IR::Instr *instr)
if (instr->m_opcode == Js::OpCode::ArgIn_A)
{
- //Ignore tracking ArgIn for "this", as argInsCount only tracks other params - unless it is a asmjs function(which doesn't have a "this").
+ // Ignore tracking ArgIn for "this" as argInsCount only tracks other
+ // params, unless it is a AsmJS function (which doesn't have a "this").
if (instr->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum() != 1 || func->GetJITFunctionBody()->IsAsmJsMode())
{
Assert(this->func->argInsCount > 0);
@@ -7913,7 +8184,6 @@ BackwardPass::ProcessInlineeStart(IR::Instr* inlineeStart)
inlineeStart->m_func->SetFirstArgOffset(inlineeStart);
IR::Instr* startCallInstr = nullptr;
- bool noImplicitCallsInInlinee = false;
// Inlinee has no bailouts or implicit calls. Get rid of the inline overhead.
auto removeInstr = [&](IR::Instr* argInstr)
{
@@ -7935,7 +8205,6 @@ BackwardPass::ProcessInlineeStart(IR::Instr* inlineeStart)
// If there are no implicit calls - bailouts/throws - we can remove all inlining overhead.
if (!inlineeStart->m_func->GetHasImplicitCalls())
{
- noImplicitCallsInInlinee = true;
inlineeStart->IterateArgInstrs(removeInstr);
inlineeStart->IterateMetaArgs([](IR::Instr* metArg)
@@ -7944,17 +8213,19 @@ BackwardPass::ProcessInlineeStart(IR::Instr* inlineeStart)
return false;
});
inlineeStart->m_func->m_hasInlineArgsOpt = false;
+ inlineeStart->m_func->m_hasInlineOverheadRemoved = true;
removeInstr(inlineeStart);
return true;
}
if (!inlineeStart->m_func->m_hasInlineArgsOpt)
{
- PHASE_PRINT_TESTTRACE(Js::InlineArgsOptPhase, func, _u("%s[%d]: Skipping inline args optimization: %s[%d] HasCalls: %s 'arguments' access: %s Can do inlinee args opt: %s\n"),
+ PHASE_PRINT_TESTTRACE(Js::InlineArgsOptPhase, func, _u("%s[%d]: Skipping inline args optimization: %s[%d] HasCalls: %s, 'arguments' access: %s, stackArgs enabled: %s, Can do inlinee args opt: %s\n"),
func->GetJITFunctionBody()->GetDisplayName(), func->GetJITFunctionBody()->GetFunctionNumber(),
inlineeStart->m_func->GetJITFunctionBody()->GetDisplayName(), inlineeStart->m_func->GetJITFunctionBody()->GetFunctionNumber(),
IsTrueOrFalse(inlineeStart->m_func->GetHasCalls()),
IsTrueOrFalse(inlineeStart->m_func->GetHasUnoptimizedArgumentsAccess()),
+ IsTrueOrFalse(inlineeStart->m_func->IsStackArgsEnabled()),
IsTrueOrFalse(inlineeStart->m_func->m_canDoInlineArgsOpt));
return false;
}
@@ -8013,10 +8284,12 @@ BackwardPass::ProcessInlineeEnd(IR::Instr* instr)
}
if (this->tag == Js::BackwardPhase)
{
- if (!GlobOpt::DoInlineArgsOpt(instr->m_func))
+ // Commenting out to allow for argument length and argument[constant] optimization
+ // Will revisit in phase two
+ /*if (!GlobOpt::DoInlineArgsOpt(instr->m_func))
{
return;
- }
+ }*/
// This adds a use for function sym as part of InlineeStart & all the syms referenced by the args.
// It ensure they do not get cleared from the copy prop sym map.
@@ -8030,6 +8303,11 @@ BackwardPass::ProcessInlineeEnd(IR::Instr* instr)
}
else if (this->tag == Js::DeadStorePhase)
{
+ if (instr->m_func->GetJITFunctionBody()->UsesArgumentsObject() && !instr->m_func->IsStackArgsEnabled())
+ {
+ instr->m_func->DisableCanDoInlineArgOpt();
+ }
+
if (instr->m_func->m_hasInlineArgsOpt)
{
Assert(instr->m_func->frameInfo);
@@ -8071,9 +8349,21 @@ BackwardPass::ProcessBailOnNoProfile(IR::Instr *instr, BasicBlock *block)
return false;
}
+ // For generator functions, we don't want to move the BailOutOnNoProfile
+ // above certain instructions such as GeneratorResumeYield or
+ // CreateInterpreterStackFrameForGenerator. This indicates the insertion
+ // point for the BailOutOnNoProfile in such cases.
+ IR::Instr *insertionPointForGenerator = nullptr;
+
// Don't hoist if we see calls with profile data (recursive calls)
while(!curInstr->StartsBasicBlock())
{
+ if (curInstr->DontHoistBailOnNoProfileAboveInGeneratorFunction())
+ {
+ Assert(insertionPointForGenerator == nullptr);
+ insertionPointForGenerator = curInstr;
+ }
+
// If a function was inlined, it must have had profile info.
if (curInstr->m_opcode == Js::OpCode::InlineeEnd || curInstr->m_opcode == Js::OpCode::InlineBuiltInEnd || curInstr->m_opcode == Js::OpCode::InlineNonTrackingBuiltInEnd
|| curInstr->m_opcode == Js::OpCode::InlineeStart || curInstr->m_opcode == Js::OpCode::EndCallForPolymorphicInlinee)
@@ -8144,7 +8434,8 @@ BackwardPass::ProcessBailOnNoProfile(IR::Instr *instr, BasicBlock *block)
// Now try to move this up the flowgraph to the predecessor blocks
FOREACH_PREDECESSOR_BLOCK(pred, block)
{
- bool hoistBailToPred = true;
+ // Don't hoist BailOnNoProfile up past blocks containing GeneratorResumeYield
+ bool hoistBailToPred = (insertionPointForGenerator == nullptr);
if (block->isLoopHeader && pred->loop == block->loop)
{
@@ -8185,7 +8476,7 @@ BackwardPass::ProcessBailOnNoProfile(IR::Instr *instr, BasicBlock *block)
// We already have one, we don't need a second.
instrCopy->Free();
}
- else if (!predInstr->AsBranchInstr()->m_isSwitchBr)
+ else if (predInstr->IsBranchInstr() && !predInstr->AsBranchInstr()->m_isSwitchBr)
{
// Don't put a bailout in the middle of a switch dispatch sequence.
// The bytecode offsets are not in order, and it would lead to incorrect
@@ -8220,10 +8511,19 @@ BackwardPass::ProcessBailOnNoProfile(IR::Instr *instr, BasicBlock *block)
#if DBG
blockHeadInstr->m_noHelperAssert = true;
#endif
- block->beginsBailOnNoProfile = true;
instr->m_func = curInstr->m_func;
- curInstr->InsertAfter(instr);
+
+ if (insertionPointForGenerator != nullptr)
+ {
+ insertionPointForGenerator->InsertAfter(instr);
+ block->beginsBailOnNoProfile = false;
+ }
+ else
+ {
+ curInstr->InsertAfter(instr);
+ block->beginsBailOnNoProfile = true;
+ }
bool setLastInstr = (curInstr == block->GetLastInstr());
if (setLastInstr)
@@ -8395,7 +8695,7 @@ BackwardPass::ReverseCopyProp(IR::Instr *instr)
FOREACH_SLISTBASE_ENTRY(
CopyPropSyms,
usedCopyPropSym,
- &instrPrev->GetBailOutInfo()->usedCapturedValues.copyPropSyms)
+ &instrPrev->GetBailOutInfo()->usedCapturedValues->copyPropSyms)
{
if(dstSym == usedCopyPropSym.Value())
{
@@ -8753,19 +9053,35 @@ BackwardPass::RestoreInductionVariableValuesAfterMemOp(Loop *loop)
opCode = Js::OpCode::Sub_I4;
}
Func *localFunc = loop->GetFunc();
- StackSym *sym = localFunc->m_symTable->FindStackSym(symId)->GetInt32EquivSym(localFunc);
-
+ StackSym *sym = localFunc->m_symTable->FindStackSym(symId);
+ if (!sym->IsInt32())
+ {
+ sym = sym->GetInt32EquivSym(localFunc);
+ }
+
IR::Opnd *inductionVariableOpnd = IR::RegOpnd::New(sym, IRType::TyInt32, localFunc);
- IR::Opnd *sizeOpnd = globOpt->GenerateInductionVariableChangeForMemOp(loop, inductionVariableChangeInfo.unroll, loop->memOpInfo->instr);
- IR::Instr* restoreInductionVarInstr = IR::Instr::New(opCode, inductionVariableOpnd, inductionVariableOpnd, sizeOpnd, loop->GetFunc());
+ IR::Opnd *tempInductionVariableOpnd = IR::RegOpnd::New(IRType::TyInt32, localFunc);
+
+ // The induction variable is restored to a temp register before the MemOp occurs. Once the MemOp is
+ // complete, the induction variable's register is set to the value of the temp register. This is done
+ // in order to avoid overwriting the induction variable's value after a bailout on the MemOp.
+ IR::Instr* restoreInductionVarToTemp = IR::Instr::New(opCode, tempInductionVariableOpnd, inductionVariableOpnd, loop->GetFunc());
// The IR that restores the induction variable's value is placed before the MemOp. Since this IR can
// bailout to the loop's landing pad, placing this IR before the MemOp avoids performing the MemOp,
// bailing out because of this IR, and then performing the effects of the loop again.
- loop->landingPad->InsertInstrBefore(restoreInductionVarInstr, loop->memOpInfo->instr);
+ loop->landingPad->InsertInstrBefore(restoreInductionVarToTemp, loop->memOpInfo->instr);
+
+ // The amount to be added or subtraced (depends on opCode) to the induction vairable after the MemOp.
+ IR::Opnd *sizeOpnd = globOpt->GenerateInductionVariableChangeForMemOp(loop, inductionVariableChangeInfo.unroll, restoreInductionVarToTemp);
+ restoreInductionVarToTemp->SetSrc2(sizeOpnd);
+ IR::Instr* restoreInductionVar = IR::Instr::New(Js::OpCode::Ld_A, inductionVariableOpnd, tempInductionVariableOpnd, loop->GetFunc());
// If restoring an induction variable results in an overflow, bailout to the loop's landing pad.
- restoreInductionVarInstr->ConvertToBailOutInstr(loop->bailOutInfo, IR::BailOutOnOverflow);
+ restoreInductionVarToTemp->ConvertToBailOutInstr(loop->bailOutInfo, IR::BailOutOnOverflow);
+
+ // Restore the induction variable's actual register once all bailouts have been passed.
+ loop->landingPad->InsertAfter(restoreInductionVar);
};
for (auto it = loop->memOpInfo->inductionVariableChangeInfoMap->GetIterator(); it.IsValid(); it.MoveNext())
@@ -8825,7 +9141,7 @@ BackwardPass::IsEmptyLoopAfterMemOp(Loop *loop)
{
Assert(instr->GetDst());
if (instr->GetDst()->GetStackSym()
- && loop->memOpInfo->inductionVariablesUsedAfterLoop->Test(globOpt->GetVarSymID(instr->GetDst()->GetStackSym())))
+ && loop->memOpInfo->inductionVariablesUsedAfterLoop->Test(instr->GetDst()->GetStackSym()->m_id))
{
// We have use after the loop for a variable defined inside the loop. So the loop can't be removed.
return false;
diff --git a/lib/Backend/BackwardPass.h b/lib/Backend/BackwardPass.h
index 68a53439c43..a03066eabf3 100644
--- a/lib/Backend/BackwardPass.h
+++ b/lib/Backend/BackwardPass.h
@@ -46,6 +46,7 @@ class BackwardPass
bool ProcessBailOutInfo(IR::Instr * instr);
void ProcessBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo);
IR::Instr* ProcessPendingPreOpBailOutInfo(IR::Instr *const currentInstr);
+ void ClearDstUseForPostOpLazyBailOut(IR::Instr *instr);
void ProcessBailOutArgObj(BailOutInfo * bailOutInfo, BVSparse * byteCodeUpwardExposedUsed);
void ProcessBailOutConstants(BailOutInfo * bailOutInfo, BVSparse * byteCodeUpwardExposedUsed, BVSparse* argSymsBv);
void ProcessBailOutCopyProps(BailOutInfo * bailOutInfo, BVSparse * byteCodeUpwardExposedUsed, BVSparse* argSymsBv);
@@ -55,6 +56,7 @@ class BackwardPass
void ProcessPropertySymOpndUse(IR::PropertySymOpnd *opnd);
bool ProcessPropertySymUse(PropertySym *propertySym);
void ProcessNewScObject(IR::Instr* instr);
+ void DisallowMarkTempAcrossYield(BVSparse* bytecodeUpwardExposed);
void MarkTemp(StackSym * sym);
bool ProcessInlineeStart(IR::Instr* instr);
void ProcessInlineeEnd(IR::Instr* instr);
@@ -76,7 +78,7 @@ class BackwardPass
void DumpMarkTemp();
#endif
- static bool UpdateImplicitCallBailOutKind(IR::Instr *const instr, bool needsBailOutOnImplicitCall);
+ static bool UpdateImplicitCallBailOutKind(IR::Instr *const instr, bool needsBailOutOnImplicitCall, bool needsLazyBailOut);
bool ProcessNoImplicitCallUses(IR::Instr *const instr);
void ProcessNoImplicitCallDef(IR::Instr *const instr);
@@ -105,9 +107,11 @@ class BackwardPass
void TrackFloatSymEquivalence(IR::Instr *const instr);
- void DeadStoreImplicitCallBailOut(IR::Instr * instr, bool hasLiveFields);
+ bool IsLazyBailOutCurrentlyNeeeded(IR::Instr * instr) const;
+ void DeadStoreImplicitCallBailOut(IR::Instr * instr, bool hasLiveFields, bool needsLazyBailOut);
void DeadStoreTypeCheckBailOut(IR::Instr * instr);
- bool IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, bool mayNeedImplicitCallBailOut, bool hasLiveFields);
+ void DeadStoreLazyBailOut(IR::Instr * instr, bool needsLazyBailOut);
+ bool IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, bool mayNeedImplicitCallBailOut, bool needLazyBailOut, bool hasLiveFields);
bool NeedBailOutOnImplicitCallsForTypedArrayStore(IR::Instr* instr);
bool TrackNoImplicitCallInlinees(IR::Instr *instr);
bool ProcessBailOnNoProfile(IR::Instr *instr, BasicBlock *block);
@@ -115,6 +119,8 @@ class BackwardPass
bool DoByteCodeUpwardExposedUsed() const;
bool DoCaptureByteCodeUpwardExposedUsed() const;
void DoSetDead(IR::Opnd * opnd, bool isDead) const;
+
+ bool SatisfyMarkTempObjectsConditions() const;
bool DoMarkTempObjects() const;
bool DoMarkTempNumbers() const;
bool DoMarkTempNumbersOnTempObjects() const;
@@ -141,14 +147,19 @@ class BackwardPass
void TrackObjTypeSpecProperties(IR::PropertySymOpnd *opnd, BasicBlock *block);
void TrackObjTypeSpecWriteGuards(IR::PropertySymOpnd *opnd, BasicBlock *block);
void TrackAddPropertyTypes(IR::PropertySymOpnd *opnd, BasicBlock *block);
- void InsertTypeTransition(IR::Instr *instrInsertBefore, int symId, AddPropertyCacheBucket *data);
- void InsertTypeTransition(IR::Instr *instrInsertBefore, StackSym *objSym, AddPropertyCacheBucket *data);
- void InsertTypeTransitionAtBlock(BasicBlock *block, int symId, AddPropertyCacheBucket *data);
- void InsertTypeTransitionsAtPriorSuccessors(BasicBlock *block, BasicBlock *blockSucc, int symId, AddPropertyCacheBucket *data);
- void InsertTypeTransitionAfterInstr(IR::Instr *instr, int symId, AddPropertyCacheBucket *data);
+ void InsertTypeTransition(IR::Instr *instrInsertBefore, int symId, AddPropertyCacheBucket *data, BVSparse* upwardExposedUses);
+ void InsertTypeTransition(IR::Instr *instrInsertBefore, StackSym *objSym, AddPropertyCacheBucket *data, BVSparse* upwardExposedUses);
+ void InsertTypeTransitionAtBlock(BasicBlock *block, int symId, AddPropertyCacheBucket *data, BVSparse* upwardExposedUses);
+ void InsertTypeTransitionsAtPriorSuccessors(BasicBlock *block, BasicBlock *blockSucc, int symId, AddPropertyCacheBucket *data, BVSparse* upwardExposedUses);
+ void InsertTypeTransitionAfterInstr(IR::Instr *instr, int symId, AddPropertyCacheBucket *data, BVSparse* upwardExposedUses);
void InsertTypeTransitionsAtPotentialKills();
bool TransitionUndoesObjectHeaderInlining(AddPropertyCacheBucket *data) const;
+ void SetTypeIDWithFinalType(int symId, BasicBlock *block);
+ void ClearTypeIDWithFinalType(int symId, BasicBlock *block);
+ bool HasTypeIDWithFinalType(BasicBlock *block) const;
+ void CombineTypeIDsWithFinalType(BasicBlock *block, BasicBlock *blockSucc);
+
template void ForEachAddPropertyCacheBucket(Fn fn);
static ObjTypeGuardBucket MergeGuardedProperties(ObjTypeGuardBucket bucket1, ObjTypeGuardBucket bucket2);
static ObjWriteGuardBucket MergeWriteGuards(ObjWriteGuardBucket bucket1, ObjWriteGuardBucket bucket2);
diff --git a/lib/Backend/BailOut.cpp b/lib/Backend/BailOut.cpp
index 84097adb46d..46dd3dafc5a 100644
--- a/lib/Backend/BailOut.cpp
+++ b/lib/Backend/BailOut.cpp
@@ -4,6 +4,7 @@
//-------------------------------------------------------------------------------------------------------
#include "Backend.h"
+#include "CommonPal.h"
#ifdef ENABLE_SCRIPT_DEBUGGING
#include "Debug/DebuggingFlags.h"
#include "Debug/DiagProbe.h"
@@ -13,19 +14,113 @@
extern const IRType RegTypes[RegNumCount];
+// In `FillBailOutRecord`, some of the fields of BailOutInfo are modified directly,
+// so simply doing a shallow copy of pointers when duplicating the BailOutInfo to
+// the helper calls for lazy bailouts will mess things up.Make a deep copies of such fields.
+void BailOutInfo::PartialDeepCopyTo(BailOutInfo * const other) const
+{
+ // Primitive types
+ other->wasCloned = this->wasCloned;
+ other->isInvertedBranch = this->isInvertedBranch;
+ other->sharedBailOutKind = this->sharedBailOutKind;
+ other->isLoopTopBailOutInfo = this->isLoopTopBailOutInfo;
+ other->bailOutOffset = this->bailOutOffset;
+ other->polymorphicCacheIndex = this->polymorphicCacheIndex;
+ other->startCallCount = this->startCallCount;
+ other->totalOutParamCount = this->totalOutParamCount;
+ other->stackLiteralBailOutInfoCount = this->stackLiteralBailOutInfoCount;
+#if DBG
+ other->wasCopied = this->wasCopied;
+#endif
+
+ other->bailOutRecord = this->bailOutRecord;
+
+ this->capturedValues->CopyTo(this->bailOutFunc->m_alloc, other->capturedValues);
+ this->usedCapturedValues->CopyTo(this->bailOutFunc->m_alloc, other->usedCapturedValues);
+
+ if (this->byteCodeUpwardExposedUsed != nullptr)
+ {
+ other->byteCodeUpwardExposedUsed = this->byteCodeUpwardExposedUsed->CopyNew(this->bailOutFunc->m_alloc);
+ }
+
+ if (this->liveVarSyms != nullptr)
+ {
+ other->liveVarSyms = this->liveVarSyms->CopyNew(this->bailOutFunc->m_alloc);
+ }
+
+ if (this->liveLosslessInt32Syms != nullptr)
+ {
+ other->liveLosslessInt32Syms = this->liveLosslessInt32Syms->CopyNew(this->bailOutFunc->m_alloc);
+ }
+
+ if (this->liveFloat64Syms != nullptr)
+ {
+ other->liveFloat64Syms = this->liveFloat64Syms->CopyNew(this->bailOutFunc->m_alloc);
+ }
+
+ if (this->outParamInlinedArgSlot != nullptr)
+ {
+ other->outParamInlinedArgSlot = this->outParamInlinedArgSlot->CopyNew(this->bailOutFunc->m_alloc);
+ }
+
+ other->startCallFunc = this->startCallFunc;
+ other->argOutSyms = this->argOutSyms;
+ other->startCallInfo = this->startCallInfo;
+ other->stackLiteralBailOutInfo = this->stackLiteralBailOutInfo;
+ other->outParamOffsets = this->outParamOffsets;
+
+
+#ifdef _M_IX86
+ other->outParamFrameAdjustArgSlot = this->outParamFrameAdjustArgSlot;
+ other->inlinedStartCall = this->inlinedStartCall;
+#endif
+
+ other->bailOutInstr = this->bailOutInstr;
+ other->bailInInstr = this->bailInInstr;
+
+#if ENABLE_DEBUG_CONFIG_OPTIONS
+ other->bailOutOpcode = this->bailOutOpcode;
+#endif
+
+ other->bailOutFunc = this->bailOutFunc;
+ other->branchConditionOpnd = this->branchConditionOpnd;
+}
+
void
BailOutInfo::Clear(JitArenaAllocator * allocator)
{
- // Currently, we don't have a case where we delete bailout info after we allocated the bailout record
- Assert(!bailOutRecord);
+ // Previously, we don't have a case where we delete bailout info after we allocated the bailout record.
+ // However, since lazy bailouts can now be attached on helper call instructions, and those instructions
+ // might sometimes be removed in Peeps, we will hit those cases. Make sure that in such cases, lazy bailout
+ // is the only bailout reason we have.
+ Assert(bailOutRecord == nullptr || BailOutInfo::OnlyHasLazyBailOut(bailOutRecord->bailOutKind));
if (this->capturedValues && this->capturedValues->DecrementRefCount() == 0)
{
this->capturedValues->constantValues.Clear(allocator);
this->capturedValues->copyPropSyms.Clear(allocator);
+
+ if (this->capturedValues->argObjSyms)
+ {
+ JitAdelete(allocator, this->capturedValues->argObjSyms);
+ }
+
JitAdelete(allocator, this->capturedValues);
}
- this->usedCapturedValues.constantValues.Clear(allocator);
- this->usedCapturedValues.copyPropSyms.Clear(allocator);
+
+ if (this->usedCapturedValues)
+ {
+ Assert(this->usedCapturedValues->refCount == 0);
+ this->usedCapturedValues->constantValues.Clear(allocator);
+ this->usedCapturedValues->copyPropSyms.Clear(allocator);
+
+ if (this->usedCapturedValues->argObjSyms)
+ {
+ JitAdelete(allocator, this->usedCapturedValues->argObjSyms);
+ }
+
+ JitAdelete(allocator, this->usedCapturedValues);
+ }
+
if (byteCodeUpwardExposedUsed)
{
JitAdelete(allocator, byteCodeUpwardExposedUsed);
@@ -50,6 +145,37 @@ BailOutInfo::Clear(JitArenaAllocator * allocator)
#endif
}
+// Refer to comments in the header file
+void BailOutInfo::ClearUseOfDst(SymID id)
+{
+ Assert(id != SymID_Invalid);
+ if (this->byteCodeUpwardExposedUsed != nullptr &&
+ this->byteCodeUpwardExposedUsed->Test(id))
+ {
+ this->clearedDstByteCodeUpwardExposedUseId = id;
+ this->byteCodeUpwardExposedUsed->Clear(id);
+ }
+}
+
+void BailOutInfo::RestoreUseOfDst()
+{
+ if (this->byteCodeUpwardExposedUsed != nullptr &&
+ this->NeedsToRestoreUseOfDst())
+ {
+ this->byteCodeUpwardExposedUsed->Set(this->clearedDstByteCodeUpwardExposedUseId);
+ }
+}
+
+bool BailOutInfo::NeedsToRestoreUseOfDst() const
+{
+ return this->clearedDstByteCodeUpwardExposedUseId != SymID_Invalid;
+}
+
+SymID BailOutInfo::GetClearedUseOfDstId() const
+{
+ return this->clearedDstByteCodeUpwardExposedUseId;
+}
+
#ifdef _M_IX86
uint
@@ -576,10 +702,10 @@ BailOutRecord::RestoreValues(IR::BailOutKind bailOutKind, Js::JavascriptCallStac
Assert(RegTypes[LinearScanMD::GetRegisterFromSaveIndex(offset)] != TyFloat64);
value = registerSaveSpace[offset - 1];
}
- Assert(Js::DynamicObject::Is(value));
+ Assert(Js::DynamicObject::IsBaseDynamicObject(value));
Assert(ThreadContext::IsOnStack(value));
- Js::DynamicObject * obj = Js::DynamicObject::FromVar(value);
+ Js::DynamicObject * obj = Js::VarTo(value);
uint propertyCount = obj->GetPropertyCount();
for (uint j = record.initFldCount; j < propertyCount; j++)
{
@@ -656,7 +782,7 @@ BailOutRecord::RestoreValues(IR::BailOutKind bailOutKind, Js::JavascriptCallStac
if (branchValueRegSlot != Js::Constants::NoRegister)
{
// Used when a t1 = CmCC is optimize to BrCC, and the branch bails out. T1 needs to be restored
- Assert(branchValue && Js::JavascriptBoolean::Is(branchValue));
+ Assert(branchValue && Js::VarIs(branchValue));
Assert(branchValueRegSlot < newInstance->GetJavascriptFunction()->GetFunctionBody()->GetLocalsCount());
newInstance->m_localSlots[branchValueRegSlot] = branchValue;
}
@@ -865,6 +991,23 @@ BailOutRecord::RestoreValue(IR::BailOutKind bailOutKind, Js::JavascriptCallStack
value = Js::JavascriptNumber::ToVar(int32Value, scriptContext);
BAILOUT_VERBOSE_TRACE(newInstance->function->GetFunctionBody(), bailOutKind, _u(", value: %10d (ToVar: 0x%p)"), int32Value, value);
}
+ else if (regSlot == newInstance->function->GetFunctionBody()->GetYieldRegister() && newInstance->function->GetFunctionBody()->IsCoroutine())
+ {
+ // This value can only either be a resume yield object created on the heap or
+ // an iterator result object created on the heap. No need to box either.
+ Assert(value);
+
+#if ENABLE_DEBUG_CONFIG_OPTIONS
+ if (ThreadContext::IsOnStack(value))
+ {
+ BAILOUT_VERBOSE_TRACE(newInstance->function->GetFunctionBody(), bailOutKind, _u(", value: 0x%p (Resume Yield Object)"), value);
+ }
+ else
+ {
+ BAILOUT_VERBOSE_TRACE(newInstance->function->GetFunctionBody(), bailOutKind, _u(", value: 0x%p (Yield Return Value)"), value);
+ }
+#endif
+ }
else
{
BAILOUT_VERBOSE_TRACE(newInstance->function->GetFunctionBody(), bailOutKind, _u(", value: 0x%p"), value);
@@ -1004,7 +1147,7 @@ BailOutRecord::BailOutCommonNoCodeGen(Js::JavascriptCallStackLayout * layout, Ba
BailOutReturnValue * bailOutReturnValue, void * argoutRestoreAddress)
{
Assert(bailOutRecord->parent == nullptr);
- Assert(Js::ScriptFunction::Is(layout->functionObject));
+ Assert(Js::VarIs(layout->functionObject));
Js::ScriptFunction ** functionRef = (Js::ScriptFunction **)&layout->functionObject;
Js::ArgumentReader args(&layout->callInfo, layout->args);
Js::Var result = BailOutHelper(layout, functionRef, args, false, bailOutRecord, bailOutOffset, returnAddress, bailOutKind, registerSaves, bailOutReturnValue, layout->GetArgumentsObjectLocation(), branchValue, argoutRestoreAddress);
@@ -1031,7 +1174,7 @@ uint32 bailOutOffset, void * returnAddress, IR::BailOutKind bailOutKind, Js::Imp
sizeof(registerSaves));
Js::Var result = BailOutCommonNoCodeGen(layout, bailOutRecord, bailOutOffset, returnAddress, bailOutKind, branchValue, registerSaves, bailOutReturnValue, argoutRestoreAddress);
- ScheduleFunctionCodeGen(Js::ScriptFunction::FromVar(layout->functionObject), nullptr, bailOutRecord, bailOutKind, bailOutOffset, savedImplicitCallFlags, returnAddress);
+ ScheduleFunctionCodeGen(Js::VarTo(layout->functionObject), nullptr, bailOutRecord, bailOutKind, bailOutOffset, savedImplicitCallFlags, returnAddress);
return result;
}
@@ -1050,17 +1193,10 @@ BailOutRecord::BailOutInlinedCommon(Js::JavascriptCallStackLayout * layout, Bail
BailOutReturnValue bailOutReturnValue;
Js::ScriptFunction * innerMostInlinee = nullptr;
BailOutInlinedHelper(layout, currentBailOutRecord, bailOutOffset, returnAddress, bailOutKind, registerSaves, &bailOutReturnValue, &innerMostInlinee, false, branchValue);
-
- bool * hasBailedOutBitPtr = layout->functionObject->GetScriptContext()->GetThreadContext()->GetHasBailedOutBitPtr();
- Assert(!bailOutRecord->ehBailoutData || hasBailedOutBitPtr ||
- bailOutRecord->ehBailoutData->ht == Js::HandlerType::HT_Finally /* When we bailout from inlinee in non exception finally, we maynot see hasBailedOutBitPtr*/);
- if (hasBailedOutBitPtr && bailOutRecord->ehBailoutData)
- {
- *hasBailedOutBitPtr = true;
- }
+ SetHasBailedOutBit(bailOutRecord, layout->functionObject->GetScriptContext());
Js::Var result = BailOutCommonNoCodeGen(layout, currentBailOutRecord, currentBailOutRecord->bailOutOffset, returnAddress, bailOutKind, branchValue,
registerSaves, &bailOutReturnValue);
- ScheduleFunctionCodeGen(Js::ScriptFunction::FromVar(layout->functionObject), innerMostInlinee, currentBailOutRecord, bailOutKind, bailOutOffset, savedImplicitCallFlags, returnAddress);
+ ScheduleFunctionCodeGen(Js::VarTo(layout->functionObject), innerMostInlinee, currentBailOutRecord, bailOutKind, bailOutOffset, savedImplicitCallFlags, returnAddress);
return result;
}
@@ -1076,7 +1212,7 @@ BailOutRecord::BailOutFromLoopBodyCommon(Js::JavascriptCallStackLayout * layout,
js_memcpy_s(registerSaves, sizeof(registerSaves), (Js::Var *)layout->functionObject->GetScriptContext()->GetThreadContext()->GetBailOutRegisterSaveSpace(),
sizeof(registerSaves));
uint32 result = BailOutFromLoopBodyHelper(layout, bailOutRecord, bailOutOffset, bailOutKind, branchValue, registerSaves);
- ScheduleLoopBodyCodeGen(Js::ScriptFunction::FromVar(layout->functionObject), nullptr, bailOutRecord, bailOutKind);
+ ScheduleLoopBodyCodeGen(Js::VarTo(layout->functionObject), nullptr, bailOutRecord, bailOutKind);
return result;
}
@@ -1096,20 +1232,40 @@ BailOutRecord::BailOutFromLoopBodyInlinedCommon(Js::JavascriptCallStackLayout *
BailOutReturnValue bailOutReturnValue;
Js::ScriptFunction * innerMostInlinee = nullptr;
BailOutInlinedHelper(layout, currentBailOutRecord, bailOutOffset, returnAddress, bailOutKind, registerSaves, &bailOutReturnValue, &innerMostInlinee, true, branchValue);
- bool * hasBailedOutBitPtr = layout->functionObject->GetScriptContext()->GetThreadContext()->GetHasBailedOutBitPtr();
- Assert(!bailOutRecord->ehBailoutData || hasBailedOutBitPtr ||
- bailOutRecord->ehBailoutData->ht == Js::HandlerType::HT_Finally /* When we bailout from inlinee in non exception finally, we maynot see hasBailedOutBitPtr*/);
- if (hasBailedOutBitPtr && bailOutRecord->ehBailoutData)
- {
- *hasBailedOutBitPtr = true;
- }
-
+ SetHasBailedOutBit(bailOutRecord, layout->functionObject->GetScriptContext());
uint32 result = BailOutFromLoopBodyHelper(layout, currentBailOutRecord, currentBailOutRecord->bailOutOffset,
bailOutKind, nullptr, registerSaves, &bailOutReturnValue);
- ScheduleLoopBodyCodeGen(Js::ScriptFunction::FromVar(layout->functionObject), innerMostInlinee, currentBailOutRecord, bailOutKind);
+ ScheduleLoopBodyCodeGen(Js::VarTo(layout->functionObject), innerMostInlinee, currentBailOutRecord, bailOutKind);
return result;
}
+void
+BailOutRecord::SetHasBailedOutBit(BailOutRecord const * bailOutRecord, Js::ScriptContext * scriptContext)
+{
+ Js::EHBailoutData * ehBailoutData = bailOutRecord->ehBailoutData;
+ if (!ehBailoutData)
+ {
+ return;
+ }
+
+ // When a bailout occurs within a finally region, the hasBailedOutBitPtr associated with the
+ // try-catch-finally or try-finally has already been removed from the stack. In that case,
+ // we set the hasBailedOutBitPtr for the nearest enclosing try or catch region within the
+ // function.
+ while (ehBailoutData->ht == Js::HandlerType::HT_Finally)
+ {
+ if (!ehBailoutData->parent || ehBailoutData->parent->nestingDepth < 0)
+ {
+ return;
+ }
+ ehBailoutData = ehBailoutData->parent;
+ }
+
+ bool * hasBailedOutBitPtr = scriptContext->GetThreadContext()->GetHasBailedOutBitPtr();
+ Assert(hasBailedOutBitPtr);
+ *hasBailedOutBitPtr = true;
+}
+
void
BailOutRecord::BailOutInlinedHelper(Js::JavascriptCallStackLayout * layout, BailOutRecord const *& currentBailOutRecord,
uint32 bailOutOffset, void * returnAddress, IR::BailOutKind bailOutKind, Js::Var * registerSaves, BailOutReturnValue * bailOutReturnValue, Js::ScriptFunction ** innerMostInlinee, bool isInLoopBody, Js::Var branchValue)
@@ -1118,7 +1274,7 @@ BailOutRecord::BailOutInlinedHelper(Js::JavascriptCallStackLayout * layout, Bail
BailOutReturnValue * lastBailOutReturnValue = nullptr;
*innerMostInlinee = nullptr;
- Js::FunctionBody* functionBody = Js::ScriptFunction::FromVar(layout->functionObject)->GetFunctionBody();
+ Js::FunctionBody* functionBody = Js::VarTo(layout->functionObject)->GetFunctionBody();
Js::EntryPointInfo *entryPointInfo;
if(isInLoopBody)
@@ -1143,7 +1299,7 @@ BailOutRecord::BailOutInlinedHelper(Js::JavascriptCallStackLayout * layout, Bail
// object, the cached version (that was previously boxed) will be reused to maintain pointer identity and correctness
// after the transition to the interpreter.
InlinedFrameLayout* outerMostFrame = (InlinedFrameLayout *)(((uint8 *)Js::JavascriptCallStackLayout::ToFramePointer(layout)) - entryPointInfo->GetFrameHeight());
- inlineeFrameRecord->RestoreFrames(functionBody, outerMostFrame, layout, true /* boxArgs */);
+ inlineeFrameRecord->RestoreFrames(functionBody, outerMostFrame, layout, true /*boxArgs*/);
}
}
@@ -1162,7 +1318,7 @@ BailOutRecord::BailOutInlinedHelper(Js::JavascriptCallStackLayout * layout, Bail
Js::ScriptFunction ** functionRef = (Js::ScriptFunction **)&(inlinedFrame->function);
AnalysisAssert(*functionRef);
- Assert(Js::ScriptFunction::Is(inlinedFrame->function));
+ Assert(Js::VarIs(inlinedFrame->function));
if (*innerMostInlinee == nullptr)
{
@@ -1271,8 +1427,6 @@ BailOutRecord::BailOutHelper(Js::JavascriptCallStackLayout * layout, Js::ScriptF
#ifdef ENABLE_SCRIPT_DEBUGGING
bool isInDebugMode = executeFunction->IsInDebugMode();
- AssertMsg(!isInDebugMode || Js::Configuration::Global.EnableJitInDebugMode(),
- "In diag mode we can get here (function has to be JIT'ed) only when EnableJitInDiagMode is true!");
// Adjust bailout offset for debug mode (only scenario when we ignore exception).
if (isInDebugMode)
@@ -1375,63 +1529,32 @@ BailOutRecord::BailOutHelper(Js::JavascriptCallStackLayout * layout, Js::ScriptF
if (executeFunction->IsCoroutine())
{
// If the FunctionBody is a generator then this call is being made by one of the three
- // generator resuming methods: next(), throw(), or return(). They all pass the generator
- // object as the first of two arguments. The real user arguments are obtained from the
- // generator object. The second argument is the ResumeYieldData which is only needed
- // when resuming a generator and not needed when yielding from a generator, as is occurring
- // here.
- AssertMsg(args.Info.Count == 2, "Generator ScriptFunctions should only be invoked by generator APIs with the pair of arguments they pass in -- the generator object and a ResumeYieldData pointer");
- Js::JavascriptGenerator* generator = Js::JavascriptGenerator::FromVar(args[0]);
+ // generator resuming methods: next(), throw(), or return(). They all pass the generator
+ // object as the first of two arguments. The real user arguments are obtained from the
+ // generator object. The second argument is the resume yield object which is only needed
+ // when resuming a generator and not needed when yielding from a generator, as is occurring here.
+ AssertMsg(args.Info.Count == 2, "Generator ScriptFunctions should only be invoked by generator APIs with the pair of arguments they pass in -- the generator object and a resume yield object");
+ Js::JavascriptGenerator* generator = Js::VarTo(args[0]);
newInstance = generator->GetFrame();
- if (newInstance != nullptr)
- {
- // BailOut will recompute OutArg pointers based on BailOutRecord. Reset them back
- // to initial position before that happens so that OP_StartCall calls don't accumulate
- // incorrectly over multiple yield bailouts.
- newInstance->ResetOut();
-
- // The debugger relies on comparing stack addresses of frames to decide when a step_out is complete so
- // give the InterpreterStackFrame a legit enough stack address to make this comparison work.
- newInstance->m_stackAddress = reinterpret_cast(&generator);
- }
- else
- {
- //
- // Allocate a new InterpreterStackFrame instance on the recycler heap.
- // It will live with the JavascriptGenerator object.
- //
- Js::Arguments generatorArgs = generator->GetArguments();
- Js::InterpreterStackFrame::Setup setup(function, generatorArgs, true, isInlinee);
- Assert(setup.GetStackAllocationVarCount() == 0);
- size_t varAllocCount = setup.GetAllocationVarCount();
- size_t varSizeInBytes = varAllocCount * sizeof(Js::Var);
- DWORD_PTR stackAddr = reinterpret_cast(&generator); // as mentioned above, use any stack address from this frame to ensure correct debugging functionality
- Js::LoopHeader* loopHeaderArray = executeFunction->GetHasAllocatedLoopHeaders() ? executeFunction->GetLoopHeaderArrayPtr() : nullptr;
-
- allocation = RecyclerNewPlus(functionScriptContext->GetRecycler(), varSizeInBytes, Js::Var);
-
- // Initialize the interpreter stack frame (constants) but not the param, the bailout record will restore the value
-#if DBG
- // Allocate invalidVar on GC instead of stack since this InterpreterStackFrame will out live the current real frame
- Js::Var invalidVar = (Js::RecyclableObject*)RecyclerNewPlusLeaf(functionScriptContext->GetRecycler(), sizeof(Js::RecyclableObject), Js::Var);
- memset(invalidVar, 0xFE, sizeof(Js::RecyclableObject));
-#endif
-
- newInstance = setup.InitializeAllocation(allocation, nullptr, false, false, loopHeaderArray, stackAddr
-#if DBG
- , invalidVar
-#endif
- );
+ // The jit relies on the interpreter stack frame to store various information such as
+ // for-in enumerators. Therefore, we always create an interpreter stack frame for generator
+ // as part of the resume jump table, at the beginning of the jit'd function, if it doesn't
+ // already exist.
+ Assert(newInstance != nullptr);
- newInstance->m_reader.Create(executeFunction);
+ // BailOut will recompute OutArg pointers based on BailOutRecord. Reset them back
+ // to initial position before that happens so that OP_StartCall calls don't accumulate
+ // incorrectly over multiple yield bailouts.
+ newInstance->ResetOut();
- generator->SetFrame(newInstance, varSizeInBytes);
- }
+ // The debugger relies on comparing stack addresses of frames to decide when a step_out is complete so
+ // give the InterpreterStackFrame a legit enough stack address to make this comparison work.
+ newInstance->m_stackAddress = reinterpret_cast(&generator);
}
else
{
- Js::InterpreterStackFrame::Setup setup(function, args, true, isInlinee);
+ Js::InterpreterStackFrame::Setup setup(function, args, true /* bailedOut */, isInlinee);
size_t varAllocCount = setup.GetAllocationVarCount();
size_t stackVarAllocCount = setup.GetStackAllocationVarCount();
size_t varSizeInBytes;
@@ -2694,7 +2817,7 @@ void BailOutRecord::CheckPreemptiveRejit(Js::FunctionBody* executeFunction, IR::
Js::Var BailOutRecord::BailOutForElidedYield(void * framePointer)
{
- JIT_HELPER_REENTRANT_HEADER(NoSaveRegistersBailOutForElidedYield);
+ JIT_HELPER_NOT_REENTRANT_NOLOCK_HEADER(NoSaveRegistersBailOutForElidedYield);
Js::JavascriptCallStackLayout * const layout = Js::JavascriptCallStackLayout::FromFramePointer(framePointer);
Js::ScriptFunction ** functionRef = (Js::ScriptFunction **)&layout->functionObject;
Js::ScriptFunction * function = *functionRef;
@@ -2707,8 +2830,7 @@ Js::Var BailOutRecord::BailOutForElidedYield(void * framePointer)
Js::InterpreterStackFrame* frame = generator->GetFrame();
ThreadContext *threadContext = frame->GetScriptContext()->GetThreadContext();
- Js::ResumeYieldData* resumeYieldData = static_cast(layout->args[1]);
- frame->SetNonVarReg(executeFunction->GetYieldRegister(), resumeYieldData);
+ frame->SetNonVarReg(executeFunction->GetYieldRegister(), layout->args[1]);
// The debugger relies on comparing stack addresses of frames to decide when a step_out is complete so
// give the InterpreterStackFrame a legit enough stack address to make this comparison work.
@@ -2850,16 +2972,11 @@ SharedBailOutRecord::SharedBailOutRecord(uint32 bailOutOffset, uint bailOutCache
this->type = BailoutRecordType::Shared;
}
-void LazyBailOutRecord::SetBailOutKind()
-{
- this->bailoutRecord->SetBailOutKind(IR::BailOutKind::LazyBailOut);
-}
-
#if DBG
-void LazyBailOutRecord::Dump(Js::FunctionBody* functionBody)
+void LazyBailOutRecord::Dump(Js::FunctionBody* functionBody) const
{
OUTPUT_PRINT(functionBody);
- Output::Print(_u("Bytecode Offset: #%04x opcode: %s"), this->bailoutRecord->GetBailOutOffset(), Js::OpCodeUtil::GetOpCodeName(this->bailoutRecord->GetBailOutOpCode()));
+ Output::Print(_u("Bytecode Offset: #%04x opcode: %s"), this->bailOutRecord->GetBailOutOffset(), Js::OpCodeUtil::GetOpCodeName(this->bailOutRecord->GetBailOutOpCode()));
}
#endif
@@ -2926,4 +3043,3 @@ void GlobalBailOutRecordDataTable::AddOrUpdateRow(JitArenaAllocator *allocator,
rowToInsert->regSlot = regSlot;
*lastUpdatedRowIndex = length++;
}
-
diff --git a/lib/Backend/BailOut.h b/lib/Backend/BailOut.h
index 208d940552f..1cacb220adc 100644
--- a/lib/Backend/BailOut.h
+++ b/lib/Backend/BailOut.h
@@ -26,11 +26,12 @@ class BailOutInfo
BailOutInfo(uint32 bailOutOffset, Func* bailOutFunc) :
bailOutOffset(bailOutOffset), bailOutFunc(bailOutFunc),
- byteCodeUpwardExposedUsed(nullptr), polymorphicCacheIndex((uint)-1), startCallCount(0), startCallInfo(nullptr), bailOutInstr(nullptr),
+ byteCodeUpwardExposedUsed(nullptr), polymorphicCacheIndex((uint)-1), startCallCount(0), startCallInfo(nullptr), bailOutInstr(nullptr), bailInInstr(nullptr),
totalOutParamCount(0), argOutSyms(nullptr), bailOutRecord(nullptr), wasCloned(false), isInvertedBranch(false), sharedBailOutKind(true), isLoopTopBailOutInfo(false), canDeadStore(true),
outParamInlinedArgSlot(nullptr), liveVarSyms(nullptr), liveLosslessInt32Syms(nullptr), liveFloat64Syms(nullptr),
branchConditionOpnd(nullptr),
- stackLiteralBailOutInfoCount(0), stackLiteralBailOutInfo(nullptr)
+ stackLiteralBailOutInfoCount(0), stackLiteralBailOutInfo(nullptr),
+ clearedDstByteCodeUpwardExposedUseId(SymID_Invalid)
{
Assert(bailOutOffset != Js::Constants::NoByteCodeOffset);
#ifdef _M_IX86
@@ -41,10 +42,27 @@ class BailOutInfo
#endif
this->capturedValues = JitAnew(bailOutFunc->m_alloc, CapturedValues);
this->capturedValues->refCount = 1;
- this->usedCapturedValues.argObjSyms = nullptr;
+
+ this->usedCapturedValues = JitAnew(bailOutFunc->m_alloc, CapturedValues);
+ this->usedCapturedValues->argObjSyms = nullptr;
}
+
+ void PartialDeepCopyTo(BailOutInfo *const bailOutInfo) const;
void Clear(JitArenaAllocator * allocator);
+ // Lazy bailout
+ //
+ // Workaround for dealing with use of destination register of `call` instructions with postop lazy bailout.
+ // As an example, in globopt, we have s1 = Call and s1 is in byteCodeUpwardExposedUse,
+ // but after lowering, the instructions are: s3 = Call, s1 = s3.
+ // If we add a postop lazy bailout to s3 = call, we will create a use of s1 right at that instructions.
+ // However, s1 at that point is not initialized yet.
+ // As a workaround, we will clear the use of s1 and restore it if we determine that lazy bailout is not needed.
+ void ClearUseOfDst(SymID id);
+ void RestoreUseOfDst();
+ bool NeedsToRestoreUseOfDst() const;
+ SymID GetClearedUseOfDstId() const;
+
void FinalizeBailOutRecord(Func * func);
#ifdef MD_GROW_LOCALS_AREA_UP
void FinalizeOffsets(__in_ecount(count) int * offsets, uint count, Func *func, BVSparse *bvInlinedArgSlot);
@@ -64,6 +82,26 @@ class BailOutInfo
kindMinusBits == IR::BailOutOnImplicitCallsPreOp;
}
+ static bool OnlyHasLazyBailOut(IR::BailOutKind kind)
+ {
+ return kind == IR::LazyBailOut;
+ }
+
+ static bool HasLazyBailOut(IR::BailOutKind kind)
+ {
+ return (kind & IR::LazyBailOut) != 0;
+ }
+
+ static IR::BailOutKind WithoutLazyBailOut(IR::BailOutKind kind)
+ {
+ return kind & ~IR::LazyBailOut;
+ }
+
+ static IR::BailOutKind WithLazyBailOut(IR::BailOutKind kind)
+ {
+ return kind | IR::LazyBailOut;
+ }
+
#if DBG
static bool IsBailOutHelper(IR::JnHelperMethod helper);
#endif
@@ -76,11 +114,12 @@ class BailOutInfo
#if DBG
bool wasCopied;
#endif
+ SymID clearedDstByteCodeUpwardExposedUseId;
uint32 bailOutOffset;
BailOutRecord * bailOutRecord;
- CapturedValues* capturedValues; // Values we know about after forward pass
- CapturedValues usedCapturedValues; // Values that need to be restored in the bail out
- BVSparse * byteCodeUpwardExposedUsed; // Non-constant stack syms that needs to be restored in the bail out
+ CapturedValues * capturedValues; // Values we know about after forward pass
+ CapturedValues * usedCapturedValues; // Values that need to be restored in the bail out
+ BVSparse * byteCodeUpwardExposedUsed; // Non-constant stack syms that needs to be restored in the bail out
uint polymorphicCacheIndex;
uint startCallCount;
uint totalOutParamCount;
@@ -121,6 +160,8 @@ class BailOutInfo
// 2) After we generated bailout, this becomes label instr. In case of shared bailout other instrs JMP to this label.
IR::Instr * bailOutInstr;
+ IR::GeneratorBailInInstr * bailInInstr;
+
#if ENABLE_DEBUG_CONFIG_OPTIONS
Js::OpCode bailOutOpcode;
#endif
@@ -238,6 +279,8 @@ class BailOutRecord
static uint32 BailOutFromLoopBodyHelper(Js::JavascriptCallStackLayout * layout, BailOutRecord const * bailOutRecord,
uint32 bailOutOffset, IR::BailOutKind bailOutKind, Js::Var branchValue, Js::Var * registerSaves, BailOutReturnValue * returnValue = nullptr);
+ static void SetHasBailedOutBit(BailOutRecord const * bailOutRecord, Js::ScriptContext * scriptContext);
+
static void UpdatePolymorphicFieldAccess(Js::JavascriptFunction * function, BailOutRecord const * bailOutRecord);
static void ScheduleFunctionCodeGen(Js::ScriptFunction * function, Js::ScriptFunction * innerMostInlinee, BailOutRecord const * bailOutRecord, IR::BailOutKind bailOutKind,
diff --git a/lib/Backend/BailOutKind.h b/lib/Backend/BailOutKind.h
index 156e299dfe4..93faae0dc5a 100644
--- a/lib/Backend/BailOutKind.h
+++ b/lib/Backend/BailOutKind.h
@@ -7,12 +7,12 @@
#error BAIL_OUT_KIND, BAIL_OUT_KIND_VALUE, and BAIL_OUT_KIND_VALUE_LAST must be defined before including this file.
#endif
/* kind */ /* allowed bits */
-BAIL_OUT_KIND(BailOutInvalid, IR::BailOutOnResultConditions | IR::BailOutForArrayBits | IR::BailOutForDebuggerBits | IR::BailOutMarkTempObject)
+BAIL_OUT_KIND(BailOutInvalid, IR::BailOutOnResultConditions | IR::BailOutForArrayBits | IR::BailOutForDebuggerBits | IR::BailOutMarkTempObject | IR::LazyBailOut)
BAIL_OUT_KIND(BailOutIntOnly, IR::BailOutMarkTempObject)
BAIL_OUT_KIND(BailOutNumberOnly, IR::BailOutMarkTempObject)
BAIL_OUT_KIND(BailOutPrimitiveButString, IR::BailOutMarkTempObject)
-BAIL_OUT_KIND(BailOutOnImplicitCalls, IR::BailOutForArrayBits)
-BAIL_OUT_KIND(BailOutOnImplicitCallsPreOp, (IR::BailOutOnResultConditions | IR::BailOutForArrayBits | IR::BailOutMarkTempObject) & ~IR::BailOutOnArrayAccessHelperCall )
+BAIL_OUT_KIND(BailOutOnImplicitCalls, IR::BailOutForArrayBits | IR::LazyBailOut)
+BAIL_OUT_KIND(BailOutOnImplicitCallsPreOp, (IR::BailOutOnResultConditions | IR::BailOutForArrayBits | IR::BailOutMarkTempObject | IR::LazyBailOut) & ~IR::BailOutOnArrayAccessHelperCall )
BAIL_OUT_KIND(BailOutOnNotPrimitive, IR::BailOutMarkTempObject)
BAIL_OUT_KIND(BailOutOnMemOpError, IR::BailOutForArrayBits)
BAIL_OUT_KIND(BailOutOnInlineFunction, 0)
@@ -20,21 +20,21 @@ BAIL_OUT_KIND(BailOutOnNoProfile, 0)
BAIL_OUT_KIND(BailOutOnPolymorphicInlineFunction, 0)
BAIL_OUT_KIND(BailOutOnFailedPolymorphicInlineTypeCheck, 0)
BAIL_OUT_KIND(BailOutShared, 0)
-BAIL_OUT_KIND(BailOutOnNotArray, IR::BailOutOnMissingValue)
-BAIL_OUT_KIND(BailOutOnNotNativeArray, IR::BailOutOnMissingValue)
-BAIL_OUT_KIND(BailOutConventionalTypedArrayAccessOnly, IR::BailOutMarkTempObject)
-BAIL_OUT_KIND(BailOutOnIrregularLength, IR::BailOutMarkTempObject)
+BAIL_OUT_KIND(BailOutOnNotArray, IR::BailOutOnMissingValue | IR::LazyBailOut)
+BAIL_OUT_KIND(BailOutOnNotNativeArray, IR::BailOutOnMissingValue | IR::LazyBailOut)
+BAIL_OUT_KIND(BailOutConventionalTypedArrayAccessOnly, IR::BailOutMarkTempObject | IR::LazyBailOut)
+BAIL_OUT_KIND(BailOutOnIrregularLength, IR::BailOutMarkTempObject | IR::LazyBailOut)
BAIL_OUT_KIND(BailOutCheckThis, 0)
BAIL_OUT_KIND(BailOutOnTaggedValue, 0)
-BAIL_OUT_KIND(BailOutFailedTypeCheck, IR::BailOutMarkTempObject)
-BAIL_OUT_KIND(BailOutFailedEquivalentTypeCheck, IR::BailOutMarkTempObject)
+BAIL_OUT_KIND(BailOutFailedTypeCheck, IR::BailOutMarkTempObject | IR::LazyBailOut)
+BAIL_OUT_KIND(BailOutFailedEquivalentTypeCheck, IR::BailOutMarkTempObject | IR::LazyBailOut)
BAIL_OUT_KIND(BailOutInjected, 0)
BAIL_OUT_KIND(BailOutExpectingInteger, 0)
BAIL_OUT_KIND(BailOutExpectingString, 0)
BAIL_OUT_KIND(BailOutFailedInlineTypeCheck, IR::BailOutMarkTempObject)
-BAIL_OUT_KIND(BailOutFailedFixedFieldTypeCheck, IR::BailOutMarkTempObject)
-BAIL_OUT_KIND(BailOutFailedFixedFieldCheck, 0)
-BAIL_OUT_KIND(BailOutFailedEquivalentFixedFieldTypeCheck, IR::BailOutMarkTempObject)
+BAIL_OUT_KIND(BailOutFailedFixedFieldTypeCheck, IR::BailOutMarkTempObject | IR::LazyBailOut)
+BAIL_OUT_KIND(BailOutFailedFixedFieldCheck, IR::LazyBailOut)
+BAIL_OUT_KIND(BailOutFailedEquivalentFixedFieldTypeCheck, IR::BailOutMarkTempObject | IR::LazyBailOut)
BAIL_OUT_KIND(BailOutOnFloor, 0)
BAIL_OUT_KIND(BailOnModByPowerOf2, 0)
BAIL_OUT_KIND(BailOnIntMin, 0)
@@ -42,7 +42,6 @@ BAIL_OUT_KIND(BailOnDivResultNotInt, IR::BailOutOnDivByZero | IR:
BAIL_OUT_KIND(BailOnSimpleJitToFullJitLoopBody, 0)
BAIL_OUT_KIND(BailOutFailedCtorGuardCheck, 0)
BAIL_OUT_KIND(BailOutOnFailedHoistedBoundCheck, 0)
-BAIL_OUT_KIND(LazyBailOut, 0)
BAIL_OUT_KIND(BailOutOnFailedHoistedLoopCountBasedBoundCheck, 0)
BAIL_OUT_KIND(BailOutForGeneratorYield, 0)
BAIL_OUT_KIND(BailOutOnException, 0)
@@ -110,9 +109,11 @@ BAIL_OUT_KIND_VALUE(BailOutOnDivSrcConditions, BailOutOnDivByZero | BailOutOnDiv
#define BAIL_OUT_KIND_MISC_BIT_START BAIL_OUT_KIND_DIV_SRC_CONDITIONS_BIT_START + 2
BAIL_OUT_KIND_VALUE(BailOutMarkTempObject, 1 << (BAIL_OUT_KIND_MISC_BIT_START + 0))
+// this is the most significant bit, must cast it to unsigned int so that the compiler knows we are not using a negative number
+BAIL_OUT_KIND_VALUE(LazyBailOut, (uint) 1 << (BAIL_OUT_KIND_MISC_BIT_START + 1))
+BAIL_OUT_KIND_VALUE(BailOutMisc, BailOutMarkTempObject | LazyBailOut)
-
-BAIL_OUT_KIND_VALUE_LAST(BailOutKindBits, BailOutMarkTempObject | BailOutOnDivSrcConditions | BailOutOnResultConditions | BailOutForArrayBits | BailOutForDebuggerBits)
+BAIL_OUT_KIND_VALUE_LAST(BailOutKindBits, BailOutMisc | BailOutOnDivSrcConditions | BailOutOnResultConditions | BailOutForArrayBits | BailOutForDebuggerBits)
// Help caller undefine the macros
#undef BAIL_OUT_KIND
diff --git a/lib/Backend/CMakeLists.txt b/lib/Backend/CMakeLists.txt
index 6825144d4eb..f4b923b58d9 100644
--- a/lib/Backend/CMakeLists.txt
+++ b/lib/Backend/CMakeLists.txt
@@ -1,5 +1,59 @@
+if(CC_TARGETS_AMD64)
+ set (CC_BACKEND_ARCH_FOLDER amd64)
+ set (CC_BACKEND_ARCH_FILES
+ amd64/EncoderMD.cpp
+ amd64/LinearScanMD.cpp
+ amd64/LowererMDArch.cpp
+ amd64/PeepsMD.cpp
+ amd64/PrologEncoderMD.cpp
+ amd64/LinearScanMdA.S
+ amd64/Thunks.S
+ AgenPeeps.cpp
+ EhFrame.cpp
+ LowerMDShared.cpp
+ LowerMDSharedSimd128.cpp
+ PrologEncoder.cpp
+ )
+elseif(CC_TARGETS_X86)
+ set (CC_BACKEND_ARCH_FOLDER i386)
+ set (CC_BACKEND_ARCH_FILES
+ i386/EncoderMD.cpp
+ i386/LinearScanMD.cpp
+ i386/LowererMDArch.cpp
+ i386/PeepsMD.cpp
+ AgenPeeps.cpp
+ LowerMDShared.cpp
+ LowerMDSharedSimd128.cpp
+ )
+elseif(CC_TARGETS_ARM64)
+ set (CC_BACKEND_ARCH_FOLDER arm64)
+ set (CC_BACKEND_ARCH_FILES
+ arm64/ARM64LogicalImmediates.cpp
+ arm64/ARM64UnwindEncoder.cpp
+ arm64/EncoderMD.cpp
+ arm64/LegalizeMD.cpp
+ arm64/LinearScanMD.cpp
+ arm64/LinearScanMdA.S
+ arm64/LowerMD.cpp
+ arm64/PeepsMD.cpp
+ arm64/Thunks.S
+ arm64/UnwindInfoManager.cpp
+ )
+elseif(CC_TARGETS_ARM)
+ set (CC_BACKEND_ARCH_FOLDER arm)
+ set (CC_BACKEND_ARCH_FILES
+ arm/EncoderMD.cpp
+ arm/LegalizeMD.cpp
+ arm/LinearScanMD.cpp
+ arm/LinearScanMdA.asm
+ arm/LowerMD.cpp
+ arm/PeepsMD.cpp
+ arm/Thunks.asm
+ arm/UnwindInfoManager.cpp
+ )
+endif()
+
add_library (Chakra.Backend OBJECT
- AgenPeeps.cpp
AsmJsJITInfo.cpp
Backend.cpp
BackendApi.cpp
@@ -12,7 +66,6 @@ add_library (Chakra.Backend OBJECT
CodeGenWorkItem.cpp
DbCheckPostLower.cpp
Debug.cpp
- EhFrame.cpp
EmitBuffer.cpp
Encoder.cpp
EquivalentTypeSet.cpp
@@ -57,8 +110,6 @@ add_library (Chakra.Backend OBJECT
JnHelperMethod.cpp
LinearScan.cpp
Lower.cpp
- LowerMDShared.cpp
- LowerMDSharedSimd128.cpp
NativeCodeData.cpp
NativeCodeGenerator.cpp
NativeEntryPointData.cpp
@@ -68,7 +119,6 @@ add_library (Chakra.Backend OBJECT
PageAllocatorPool.cpp
Peeps.cpp
PreLowerPeeps.cpp
- PrologEncoder.cpp
QueuedFullJitWorkItem.cpp
Region.cpp
SccLiveness.cpp
@@ -83,30 +133,12 @@ add_library (Chakra.Backend OBJECT
TempTracker.cpp
ValueInfo.cpp
ValueRelativeOffset.cpp
- amd64/EncoderMD.cpp
- amd64/LinearScanMD.cpp
- amd64/LowererMDArch.cpp
- amd64/PeepsMD.cpp
- amd64/PrologEncoderMD.cpp
- amd64/LinearScanMdA.S
- amd64/Thunks.S
-# arm64/EncoderMD.cpp
-# arm64/LowerMD.cpp
-# arm/EncoderMD.cpp
-# arm/LegalizeMD.cpp
-# arm/LinearScanMD.cpp
-# arm/LowerMD.cpp
-# arm/PeepsMD.cpp
-# arm/UnwindInfoManager.cpp
-# i386/EncoderMD.cpp
-# i386/LinearScanMD.cpp
-# i386/LowererMDArch.cpp
-# i386/PeepsMD.cpp
+ ${CC_BACKEND_ARCH_FILES}
)
target_include_directories (
Chakra.Backend PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}
- amd64
+ ${CC_BACKEND_ARCH_FOLDER}
../Common
../JITIDL
../Runtime
diff --git a/lib/Backend/CodeGenNumberAllocator.cpp b/lib/Backend/CodeGenNumberAllocator.cpp
index 19ec6b2b200..ddf424c2dd2 100644
--- a/lib/Backend/CodeGenNumberAllocator.cpp
+++ b/lib/Backend/CodeGenNumberAllocator.cpp
@@ -571,4 +571,4 @@ XProcNumberPageSegmentManager::~XProcNumberPageSegmentManager()
temp = (XProcNumberPageSegmentImpl*)next;
}
}
-#endif
\ No newline at end of file
+#endif
diff --git a/lib/Backend/DbCheckPostLower.cpp b/lib/Backend/DbCheckPostLower.cpp
index 59f10a4d9af..1a77f7c70d4 100644
--- a/lib/Backend/DbCheckPostLower.cpp
+++ b/lib/Backend/DbCheckPostLower.cpp
@@ -20,6 +20,7 @@ DbCheckPostLower::Check()
{
case IR::InstrKindLabel:
case IR::InstrKindProfiledLabel:
+ {
isInHelperBlock = instr->AsLabelInstr()->isOpHelper;
if (doOpHelperCheck && !isInHelperBlock && !instr->AsLabelInstr()->m_noHelperAssert)
{
@@ -82,7 +83,7 @@ DbCheckPostLower::Check()
}
}
break;
-
+ }
case IR::InstrKindBranch:
if (doOpHelperCheck && !isInHelperBlock)
{
@@ -283,4 +284,135 @@ void DbCheckPostLower::Check(IR::RegOpnd *regOpnd)
}
}
+#if defined(_M_IX86) || defined(_M_X64)
+
+bool
+DbCheckPostLower::IsEndBoundary(IR::Instr *instr)
+{
+ const Js::OpCode opcode = instr->m_opcode;
+ return instr->IsLabelInstr() ||
+ opcode == Js::OpCode::CMP ||
+ opcode == Js::OpCode::TEST ||
+ opcode == Js::OpCode::JMP;
+}
+
+void
+DbCheckPostLower::EnsureValidEndBoundary(IR::Instr *instr)
+{
+ AssertMsg(IsEndBoundary(instr), "Nested helper call. Not a valid end boundary.");
+ if (instr->IsLabelInstr() && instr->AsLabelInstr()->GetNextNonEmptyLabel()->isOpHelper)
+ {
+ instr->Dump();
+ AssertMsg(false, "Nested helper call. Falling through a helper label.");
+ }
+
+ if (instr->m_opcode == Js::OpCode::JMP && instr->AsBranchInstr()->GetTarget()->GetNextNonEmptyLabel()->isOpHelper)
+ {
+ instr->Dump();
+ AssertMsg(false, "Nested helper call. Jumping to a helper label.");
+ }
+}
+
+bool
+DbCheckPostLower::IsAssign(IR::Instr *instr)
+{
+ return LowererMD::IsAssign(instr)
+#ifdef _M_X64
+ || instr->m_opcode == Js::OpCode::MOVQ
+#endif
+ ;
+}
+
+bool
+DbCheckPostLower::IsCallToHelper(IR::Instr *instr, IR::JnHelperMethod method)
+{
+ IR::Instr *prev = instr->m_prev;
+ IR::Opnd *src1 = prev->GetSrc1();
+ return instr->m_opcode == Js::OpCode::CALL &&
+ prev->m_opcode == Js::OpCode::MOV &&
+ src1 &&
+ src1->IsHelperCallOpnd() &&
+ src1->AsHelperCallOpnd()->m_fnHelper == method;
+}
+
+void
+DbCheckPostLower::EnsureOnlyMovesToRegisterOpnd(IR::Instr *instr)
+{
+ IR::Instr *startingCallInstrSequence = instr;
+ Assert(instr->m_opcode == Js::OpCode::CALL && instr->HasLazyBailOut());
+ instr = instr->m_next;
+ while (!this->IsEndBoundary(instr))
+ {
+ if (!instr->IsPragmaInstr())
+ {
+ if (this->IsAssign(instr))
+ {
+ if (!instr->GetDst()->IsRegOpnd())
+ {
+ // Instructions such as Op_SetElementI with LazyBailOut are
+ // followed by a MOV to re-enable implicit calls, don't throw
+ // in such cases.
+ if (!instr->m_noLazyHelperAssert)
+ {
+ instr->Dump();
+ AssertMsg(false, "Nested helper call. Non-register operand for destination.");
+ }
+ }
+ }
+ else if (this->IsCallToHelper(startingCallInstrSequence, IR::HelperOp_Typeof))
+ {
+ if (this->IsCallToHelper(instr, IR::HelperOp_Equal) ||
+ this->IsCallToHelper(instr, IR::HelperOp_StrictEqual) ||
+ this->IsCallToHelper(instr, IR::HelperOP_CmEq_A) ||
+ this->IsCallToHelper(instr, IR::HelperOP_CmNeq_A)
+ )
+ {
+ // Pattern matched
+ }
+ else
+ {
+ instr->Dump();
+ AssertMsg(false, "Nested helper call. Branch TypeOf/Equal doesn't match.");
+ }
+ }
+ else if (instr->m_opcode == Js::OpCode::LEA)
+ {
+ // Skip, this is probably NewScArray
+ }
+ else
+ {
+ instr->Dump();
+ AssertMsg(false, "Nested helper call. Not assignment after CALL.");
+ }
+ }
+
+ instr = instr->m_next;
+ }
+
+ this->EnsureValidEndBoundary(instr);
+}
+
+void
+DbCheckPostLower::CheckNestedHelperCalls()
+{
+ bool isInHelperBlock = false;
+ FOREACH_INSTR_IN_FUNC(instr, this->func)
+ {
+ if (instr->IsLabelInstr())
+ {
+ isInHelperBlock = instr->AsLabelInstr()->isOpHelper;
+ }
+
+ if (!isInHelperBlock || instr->m_opcode != Js::OpCode::CALL || !instr->HasLazyBailOut())
+ {
+ continue;
+ }
+
+ this->EnsureOnlyMovesToRegisterOpnd(instr);
+
+ } NEXT_INSTR_IN_FUNC;
+}
+
+#endif // X64 || X86
+
#endif // DBG
diff --git a/lib/Backend/DbCheckPostLower.h b/lib/Backend/DbCheckPostLower.h
index 8257fed75c5..658ee77cb8f 100644
--- a/lib/Backend/DbCheckPostLower.h
+++ b/lib/Backend/DbCheckPostLower.h
@@ -13,9 +13,21 @@ class DbCheckPostLower
void Check(IR::Opnd *opnd);
void Check(IR::RegOpnd *regOpnd);
+#if defined(_M_IX86) || defined(_M_X64)
+ bool IsCallToHelper(IR::Instr *instr, IR::JnHelperMethod method);
+ bool IsEndBoundary(IR::Instr * instr);
+ void EnsureValidEndBoundary(IR::Instr * instr);
+ bool IsAssign(IR::Instr * instr);
+ void EnsureOnlyMovesToRegisterOpnd(IR::Instr * instr);
+#endif
+
public:
DbCheckPostLower(Func *func) : func(func) { }
void Check();
+
+#if defined(_M_IX86) || defined(_M_X64)
+ void CheckNestedHelperCalls();
+#endif
};
#endif // DBG
diff --git a/lib/Backend/Encoder.cpp b/lib/Backend/Encoder.cpp
index 493f112f746..35f0a618983 100644
--- a/lib/Backend/Encoder.cpp
+++ b/lib/Backend/Encoder.cpp
@@ -77,7 +77,7 @@ Encoder::Encode()
m_pc = m_encodeBuffer;
m_inlineeFrameMap = Anew(m_tempAlloc, ArenaInlineeFrameMap, m_tempAlloc);
- m_bailoutRecordMap = Anew(m_tempAlloc, ArenaBailoutRecordMap, m_tempAlloc);
+ m_sortedLazyBailoutRecordList = Anew(m_tempAlloc, ArenaLazyBailoutRecordList, m_tempAlloc);
IR::PragmaInstr* pragmaInstr = nullptr;
uint32 pragmaOffsetInBuffer = 0;
@@ -254,9 +254,15 @@ Encoder::Encode()
isCallInstr = false;
this->RecordInlineeFrame(instr->m_func, GetCurrentOffset());
}
- if (instr->HasBailOutInfo() && Lowerer::DoLazyBailout(this->m_func))
+
+ if (instr->HasLazyBailOut())
+ {
+ this->SaveToLazyBailOutRecordList(instr, this->GetCurrentOffset());
+ }
+
+ if (instr->m_opcode == Js::OpCode::LazyBailOutThunkLabel)
{
- this->RecordBailout(instr, (uint32)(m_pc - m_encodeBuffer));
+ this->SaveLazyBailOutThunkOffset(this->GetCurrentOffset());
}
}
else
@@ -320,6 +326,165 @@ Encoder::Encode()
}
}
+ // Assembly Dump Phase
+ // This phase exists to assist tooling that expects "assemblable" output - that is,
+ // output that, with minimal manual handling, could theoretically be fed to another
+ // assembler to make a valid function for the target platform. We don't guarantee a
+ // dump from this will _actually_ be assemblable, but it is significantly closer to
+ // that than our normal, annotated output
+#if DBG_DUMP
+ if (PHASE_DUMP(Js::AssemblyPhase, m_func))
+ {
+ FOREACH_INSTR_IN_FUNC(instr, m_func)
+ {
+ bool hasPrintedForOpnds = false;
+ Func* localScopeFuncForLambda = m_func;
+ auto printOpnd = [&hasPrintedForOpnds, localScopeFuncForLambda](IR::Opnd* opnd)
+ {
+ if (hasPrintedForOpnds)
+ {
+ Output::Print(_u(", "));
+ }
+ switch (opnd->m_kind)
+ {
+ case IR::OpndKindInvalid:
+ AssertMsg(false, "Should be unreachable");
+ break;
+ case IR::OpndKindIntConst:
+ Output::Print(_u("%lli"), (long long int)opnd->AsIntConstOpnd()->GetValue());
+ break;
+ case IR::OpndKindInt64Const:
+ case IR::OpndKindFloatConst:
+ case IR::OpndKindFloat32Const:
+ case IR::OpndKindSimd128Const:
+ AssertMsg(false, "Not Yet Implemented");
+ break;
+ case IR::OpndKindHelperCall:
+ Output::Print(_u("%s"), IR::GetMethodName(opnd->AsHelperCallOpnd()->m_fnHelper));
+ break;
+ case IR::OpndKindSym:
+ Output::Print(_u("SYM("));
+ opnd->Dump(IRDumpFlags_SimpleForm, localScopeFuncForLambda);
+ Output::Print(_u(")"));
+ break;
+ case IR::OpndKindReg:
+ Output::Print(_u("%S"), RegNames[opnd->AsRegOpnd()->GetReg()]);
+ break;
+ case IR::OpndKindAddr:
+ Output::Print(_u("0x%p"), opnd->AsAddrOpnd()->m_address);
+ break;
+ case IR::OpndKindIndir:
+ {
+ IR::IndirOpnd* indirOpnd = opnd->AsIndirOpnd();
+ IR::RegOpnd* baseOpnd = indirOpnd->GetBaseOpnd();
+ IR::RegOpnd* indexOpnd = indirOpnd->GetIndexOpnd();
+ Output::Print(_u("["));
+ bool hasPrintedComponent = false;
+ if (baseOpnd != nullptr)
+ {
+ Output::Print(_u("%S"), RegNames[baseOpnd->GetReg()]);
+ hasPrintedComponent = true;
+ }
+ if (indexOpnd != nullptr)
+ {
+ if (hasPrintedComponent)
+ {
+ Output::Print(_u(" + "));
+ }
+ Output::Print(_u("%S * %u"), RegNames[indexOpnd->GetReg()], indirOpnd->GetScale());
+ hasPrintedComponent = true;
+ }
+ if (hasPrintedComponent)
+ {
+ Output::Print(_u(" + "));
+ }
+ Output::Print(_u("(%i)]"), indirOpnd->GetOffset());
+ break;
+ }
+ case IR::OpndKindLabel:
+ opnd->Dump(IRDumpFlags_SimpleForm, localScopeFuncForLambda);
+ break;
+ case IR::OpndKindMemRef:
+ opnd->DumpOpndKindMemRef(true, localScopeFuncForLambda);
+ break;
+ case IR::OpndKindRegBV:
+ AssertMsg(false, "Should be unreachable");
+ break;
+ case IR::OpndKindList:
+ AssertMsg(false, "Should be unreachable");
+ break;
+ default:
+ AssertMsg(false, "Missing operand type");
+ }
+ hasPrintedForOpnds = true;
+ };
+ switch(instr->GetKind())
+ {
+ case IR::InstrKindInvalid:
+ Assert(false);
+ break;
+ case IR::InstrKindJitProfiling:
+ case IR::InstrKindProfiled:
+ case IR::InstrKindInstr:
+ {
+ Output::SkipToColumn(4);
+ Output::Print(_u("%s "), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
+ Output::SkipToColumn(18);
+ IR::Opnd* dst = instr->GetDst();
+ IR::Opnd* src1 = instr->GetSrc1();
+ IR::Opnd* src2 = instr->GetSrc2();
+ if (dst != nullptr && (src1 == nullptr || !dst->IsRegOpnd() || !src1->IsRegOpnd() || dst->AsRegOpnd()->GetReg() != src1->AsRegOpnd()->GetReg())) // Print dst if it's there, and not the same reg as src1 (which is usually an instr that has a srcdest
+ {
+ printOpnd(dst);
+ }
+ if (src1 != nullptr)
+ {
+ printOpnd(src1);
+ }
+ if (src2 != nullptr)
+ {
+ printOpnd(src2);
+ }
+ break;
+ }
+ case IR::InstrKindBranch:
+ Output::SkipToColumn(4);
+ Output::Print(_u("%s "), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
+ Output::SkipToColumn(18);
+ if (instr->AsBranchInstr()->IsMultiBranch())
+ {
+ Assert(instr->GetSrc1() != nullptr);
+ printOpnd(instr->GetSrc1());
+ }
+ else
+ {
+ Output::Print(_u("L%u"), instr->AsBranchInstr()->GetTarget()->m_id);
+ }
+ break;
+ case IR::InstrKindProfiledLabel:
+ case IR::InstrKindLabel:
+ Output::Print(_u("L%u:"), instr->AsLabelInstr()->m_id);
+ break;
+ case IR::InstrKindEntry:
+ case IR::InstrKindExit:
+ case IR::InstrKindPragma:
+ // No output
+ break;
+ case IR::InstrKindByteCodeUses:
+ AssertMsg(false, "Instruction kind shouldn't be present here");
+ break;
+ default:
+ Assert(false);
+ break;
+ }
+ Output::SetAlignAndPrefix(60, _u("; "));
+ instr->Dump();
+ Output::ResetAlignAndPrefix();
+ } NEXT_INSTR_IN_FUNC;
+ }
+#endif
+ // End Assembly Dump Phase
+
BEGIN_CODEGEN_PHASE(m_func, Js::EmitterPhase);
// Copy to permanent buffer.
@@ -378,11 +543,6 @@ Encoder::Encode()
m_func->GetThreadContextInfo()->ResetIsAllJITCodeInPreReservedRegion();
}
- this->m_bailoutRecordMap->MapAddress([=](int index, LazyBailOutRecord* record)
- {
- this->m_encoderMD.AddLabelReloc((BYTE*)&record->instructionPointer);
- });
-
// Relocs
m_encoderMD.ApplyRelocs((size_t)allocation->address, codeSize, &bufferCRC, isSuccessBrShortAndLoopAlign);
@@ -472,10 +632,7 @@ Encoder::Encode()
}
}
- if (this->m_bailoutRecordMap->Count() > 0)
- {
- m_func->GetInProcJITEntryPointInfo()->GetInProcNativeEntryPointData()->RecordBailOutMap(m_bailoutRecordMap);
- }
+ this->SaveLazyBailOutJitTransferData();
if (this->m_func->pinnedTypeRefs != nullptr)
{
@@ -570,18 +727,6 @@ Encoder::Encode()
}
}
- if (this->m_func->lazyBailoutProperties.Count() > 0)
- {
- int count = this->m_func->lazyBailoutProperties.Count();
- Js::PropertyId* lazyBailoutProperties = HeapNewArrayZ(Js::PropertyId, count);
- Js::PropertyId* dstProperties = lazyBailoutProperties;
- this->m_func->lazyBailoutProperties.Map([&](Js::PropertyId propertyId)
- {
- *dstProperties++ = propertyId;
- });
- m_func->GetInProcJITEntryPointInfo()->GetJitTransferData()->SetLazyBailoutProperties(lazyBailoutProperties, count);
- }
-
// Save all property guards on the JIT transfer data in a map keyed by property ID. We will use this map when installing the entry
// point to register each guard for invalidation.
if (this->m_func->propertyGuardsByPropertyId != nullptr)
@@ -866,7 +1011,7 @@ void Encoder::RecordInlineeFrame(Func* inlinee, uint32 currentOffset)
if (!(this->m_func->IsLoopBody() && PHASE_OFF(Js::InlineInJitLoopBodyPhase, this->m_func)) && !this->m_func->IsSimpleJit())
{
InlineeFrameRecord* record = nullptr;
- if (inlinee->frameInfo && inlinee->m_hasInlineArgsOpt)
+ if (inlinee->frameInfo)
{
record = inlinee->frameInfo->record;
Assert(record != nullptr);
@@ -1059,7 +1204,8 @@ Encoder::ShortenBranchesAndLabelAlign(BYTE **codeStart, ptrdiff_t *codeSize, uin
, &m_origOffsetBuffer );
// Here we mark BRs to be shortened and adjust Labels and relocList entries offsets.
- uint32 offsetBuffIndex = 0, pragmaInstToRecordOffsetIndex = 0, inlineeFrameRecordsIndex = 0, inlineeFrameMapIndex = 0;
+ FixUpMapIndex mapIndices;
+
int32 totalBytesSaved = 0;
// loop over all BRs, find the ones we can convert to short form
@@ -1083,7 +1229,7 @@ Encoder::ShortenBranchesAndLabelAlign(BYTE **codeStart, ptrdiff_t *codeSize, uin
{
AssertMsg(reloc.isAlignedLabel(), "Expecting aligned label.");
// we aligned a loop, fix maps
- m_encoderMD.FixMaps((uint32)(reloc.getLabelOrigPC() - buffStart), totalBytesSaved, &inlineeFrameRecordsIndex, &inlineeFrameMapIndex, &pragmaInstToRecordOffsetIndex, &offsetBuffIndex);
+ m_encoderMD.FixMaps((uint32)(reloc.getLabelOrigPC() - buffStart), totalBytesSaved, &mapIndices);
codeChange = true;
}
totalBytesSaved = newTotalBytesSaved;
@@ -1144,7 +1290,7 @@ Encoder::ShortenBranchesAndLabelAlign(BYTE **codeStart, ptrdiff_t *codeSize, uin
// fix all maps entries from last shortened br to this one, before updating total bytes saved.
brOffset = (uint32) ((BYTE*)reloc.m_origPtr - buffStart);
- m_encoderMD.FixMaps(brOffset, totalBytesSaved, &inlineeFrameRecordsIndex, &inlineeFrameMapIndex, &pragmaInstToRecordOffsetIndex, &offsetBuffIndex);
+ m_encoderMD.FixMaps(brOffset, totalBytesSaved, &mapIndices);
codeChange = true;
totalBytesSaved += bytesSaved;
@@ -1160,9 +1306,10 @@ Encoder::ShortenBranchesAndLabelAlign(BYTE **codeStart, ptrdiff_t *codeSize, uin
// Fix the rest of the maps, if needed.
if (totalBytesSaved != 0)
{
- m_encoderMD.FixMaps((uint32) -1, totalBytesSaved, &inlineeFrameRecordsIndex, &inlineeFrameMapIndex, &pragmaInstToRecordOffsetIndex, &offsetBuffIndex);
+ m_encoderMD.FixMaps((uint32)-1, totalBytesSaved, &mapIndices);
codeChange = true;
newCodeSize -= totalBytesSaved;
+ this->FixLazyBailOutThunkOffset(totalBytesSaved);
}
// no BR shortening or Label alignment happened, no need to copy code
@@ -1485,27 +1632,6 @@ void Encoder::CopyMaps(OffsetList **m_origInlineeFrameRecords
#endif
-void Encoder::RecordBailout(IR::Instr* instr, uint32 currentOffset)
-{
- BailOutInfo* bailoutInfo = instr->GetBailOutInfo();
- if (bailoutInfo->bailOutRecord == nullptr)
- {
- return;
- }
-#if DBG_DUMP
- if (PHASE_DUMP(Js::LazyBailoutPhase, m_func))
- {
- Output::Print(_u("Offset: %u Instr: "), currentOffset);
- instr->Dump();
- Output::Print(_u("Bailout label: "));
- bailoutInfo->bailOutInstr->Dump();
- }
-#endif
- Assert(bailoutInfo->bailOutInstr->IsLabelInstr());
- LazyBailOutRecord record(currentOffset, (BYTE*)bailoutInfo->bailOutInstr, bailoutInfo->bailOutRecord);
- m_bailoutRecordMap->Add(record);
-}
-
#if DBG_DUMP
void Encoder::DumpInlineeFrameMap(size_t baseAddress)
{
@@ -1526,3 +1652,70 @@ void Encoder::DumpInlineeFrameMap(size_t baseAddress)
});
}
#endif
+
+void
+Encoder::SaveToLazyBailOutRecordList(IR::Instr* instr, uint32 currentOffset)
+{
+ BailOutInfo* bailOutInfo = instr->GetBailOutInfo();
+
+ Assert(instr->OnlyHasLazyBailOut() && bailOutInfo->bailOutRecord != nullptr);
+
+#if DBG_DUMP
+ if (PHASE_DUMP(Js::LazyBailoutPhase, m_func))
+ {
+ Output::Print(_u("Offset: %u Instr: "), currentOffset);
+ instr->Dump();
+ Output::Print(_u("Bailout label: "));
+ bailOutInfo->bailOutInstr->Dump();
+ }
+#endif
+
+ LazyBailOutRecord record(currentOffset, bailOutInfo->bailOutRecord);
+ this->m_sortedLazyBailoutRecordList->Add(record);
+}
+
+void
+Encoder::SaveLazyBailOutThunkOffset(uint32 currentOffset)
+{
+ AssertMsg(
+ this->m_lazyBailOutThunkOffset == 0,
+ "We should only have one thunk generated during final lowerer"
+ );
+ this->m_lazyBailOutThunkOffset = this->GetCurrentOffset();
+}
+
+void
+Encoder::SaveLazyBailOutJitTransferData()
+{
+ if (this->m_func->HasLazyBailOut())
+ {
+ Assert(this->m_sortedLazyBailoutRecordList->Count() > 0);
+ Assert(this->m_lazyBailOutThunkOffset != 0);
+ Assert(this->m_func->GetLazyBailOutRecordSlot() != nullptr);
+
+ auto nativeEntryPointData = this->m_func->GetInProcJITEntryPointInfo()->GetInProcNativeEntryPointData();
+ nativeEntryPointData->SetSortedLazyBailOutRecordList(this->m_sortedLazyBailoutRecordList);
+ nativeEntryPointData->SetLazyBailOutRecordSlotOffset(this->m_func->GetLazyBailOutRecordSlot()->m_offset);
+ nativeEntryPointData->SetLazyBailOutThunkOffset(this->m_lazyBailOutThunkOffset);
+ }
+
+ if (this->m_func->lazyBailoutProperties.Count() > 0)
+ {
+ const int count = this->m_func->lazyBailoutProperties.Count();
+ Js::PropertyId* lazyBailoutProperties = HeapNewArrayZ(Js::PropertyId, count);
+ Js::PropertyId* dstProperties = lazyBailoutProperties;
+ this->m_func->lazyBailoutProperties.Map([&](Js::PropertyId propertyId)
+ {
+ *dstProperties++ = propertyId;
+ });
+ this->m_func->GetInProcJITEntryPointInfo()->GetJitTransferData()->SetLazyBailoutProperties(lazyBailoutProperties, count);
+ }
+}
+
+void
+Encoder::FixLazyBailOutThunkOffset(uint32 bytesSaved)
+{
+ // Lazy bailout thunk is inserted at the end of the function,
+ // so just decrease the offset by the number of bytes saved
+ this->m_lazyBailOutThunkOffset -= bytesSaved;
+}
diff --git a/lib/Backend/Encoder.h b/lib/Backend/Encoder.h
index be2e122e200..8c76cb60d09 100644
--- a/lib/Backend/Encoder.h
+++ b/lib/Backend/Encoder.h
@@ -15,16 +15,28 @@ typedef JsUtil::List ArenaInlineeF
typedef JsUtil::List PragmaInstrList;
typedef JsUtil::List OffsetList;
typedef JsUtil::List JmpTableList;
+typedef JsUtil::List ArenaLazyBailoutRecordList;
+
+struct FixUpMapIndex
+{
+ uint32 offsetBuffIndex = 0;
+ uint32 pragmaInstToRecordOffsetIndex = 0;
+ uint32 inlineeFrameRecordsIndex = 0;
+ uint32 inlineeFrameMapIndex = 0;
+ uint32 lazyBailOutRecordListIndex = 0;
+};
class Encoder
{
friend class EncoderMD;
public:
- Encoder(Func * func) : m_func(func), m_encoderMD(func), m_inlineeFrameMap(nullptr) {}
+ Encoder(Func * func) :
+ m_func(func), m_encoderMD(func), m_inlineeFrameMap(nullptr),
+ m_lazyBailOutThunkOffset(0), m_sortedLazyBailoutRecordList(nullptr)
+ {}
void Encode();
void RecordInlineeFrame(Func* inlinee, uint32 currentOffset);
- void RecordBailout(IR::Instr* instr, uint32 currentOffset);
private:
bool DoTrackAllStatementBoundary() const;
@@ -38,8 +50,9 @@ class Encoder
ArenaInlineeFrameMap* m_inlineeFrameMap;
uint32 m_inlineeFrameMapDataOffset;
uint32 m_inlineeFrameMapRecordCount;
- typedef JsUtil::List ArenaBailoutRecordMap;
- ArenaBailoutRecordMap* m_bailoutRecordMap;
+
+ uint32 m_lazyBailOutThunkOffset;
+ ArenaLazyBailoutRecordList* m_sortedLazyBailoutRecordList;
#if DBG_DUMP
void DumpInlineeFrameMap(size_t baseAddress);
uint32 * m_offsetBuffer;
@@ -67,5 +80,8 @@ class Encoder
#if defined(_M_IX86) || defined(_M_X64)
void ValidateCRCOnFinalBuffer(_In_reads_bytes_(finalCodeSize) BYTE * finalCodeBufferStart, size_t finalCodeSize, size_t jumpTableSize, _In_reads_bytes_(finalCodeSize) BYTE * oldCodeBufferStart, uint initialCrcSeed, uint bufferCrcToValidate, BOOL isSuccessBrShortAndLoopAlign);
#endif
+ void FixLazyBailOutThunkOffset(uint32 bytesSaved);
+ void SaveLazyBailOutJitTransferData();
+ void SaveLazyBailOutThunkOffset(uint32 currentOffset);
+ void SaveToLazyBailOutRecordList(IR::Instr* instr, uint32 currentOffset);
};
-
diff --git a/lib/Backend/EquivalentTypeSet.cpp b/lib/Backend/EquivalentTypeSet.cpp
index d345708d79c..53e892e1789 100644
--- a/lib/Backend/EquivalentTypeSet.cpp
+++ b/lib/Backend/EquivalentTypeSet.cpp
@@ -162,4 +162,4 @@ void EquivalentTypeSet::SortAndRemoveDuplicates()
this->sortedAndDuplicatesRemoved = true;
}
}
-#endif
\ No newline at end of file
+#endif
diff --git a/lib/Backend/FixedFieldInfo.cpp b/lib/Backend/FixedFieldInfo.cpp
index b3f04c2f2de..947fbb226d8 100644
--- a/lib/Backend/FixedFieldInfo.cpp
+++ b/lib/Backend/FixedFieldInfo.cpp
@@ -14,16 +14,16 @@ FixedFieldInfo::PopulateFixedField(_In_opt_ Js::Type * type, _In_opt_ Js::Var va
FixedFieldIDL * rawFF = fixed->GetRaw();
rawFF->fieldValue = var;
rawFF->nextHasSameFixedField = false;
- if (var != nullptr && Js::JavascriptFunction::Is(var))
+ if (var != nullptr && Js::VarIs(var))
{
- Js::JavascriptFunction * funcObj = Js::JavascriptFunction::FromVar(var);
+ Js::JavascriptFunction * funcObj = Js::VarTo(var);
rawFF->valueType = ValueType::FromObject(funcObj).GetRawData();
rawFF->funcInfoAddr = (void*)funcObj->GetFunctionInfo();
rawFF->isClassCtor = funcObj->GetFunctionInfo()->IsClassConstructor();
rawFF->localFuncId = funcObj->GetFunctionInfo()->GetLocalFunctionId();
- if (Js::ScriptFunction::Is(var))
+ if (Js::VarIs(var))
{
- rawFF->environmentAddr = (void*)Js::ScriptFunction::FromVar(funcObj)->GetEnvironment();
+ rawFF->environmentAddr = (void*)Js::VarTo(funcObj)->GetEnvironment();
}
}
if (type != nullptr)
diff --git a/lib/Backend/FlowGraph.cpp b/lib/Backend/FlowGraph.cpp
index 645b58e7601..4a51776b3b3 100644
--- a/lib/Backend/FlowGraph.cpp
+++ b/lib/Backend/FlowGraph.cpp
@@ -196,6 +196,7 @@ FlowGraph::Build(void)
BasicBlock * currBlock = nullptr;
BasicBlock * nextBlock = nullptr;
bool hasCall = false;
+ bool hasYield = false;
FOREACH_INSTR_IN_FUNC_BACKWARD_EDITING(instr, instrPrev, func)
{
@@ -208,7 +209,9 @@ FlowGraph::Build(void)
nextBlock = currBlock;
currBlock = this->AddBlock(instr->m_next, currLastInstr, nextBlock);
currBlock->hasCall = hasCall;
+ currBlock->hasYield = hasYield;
hasCall = false;
+ hasYield = false;
}
currLastInstr = instr;
@@ -243,7 +246,9 @@ FlowGraph::Build(void)
nextBlock = currBlock;
currBlock = this->AddBlock(instr, currLastInstr, nextBlock);
currBlock->hasCall = hasCall;
+ currBlock->hasYield = hasYield;
hasCall = false;
+ hasYield = false;
currLastInstr = nullptr;
}
@@ -350,6 +355,11 @@ FlowGraph::Build(void)
break;
}
+ if (instr->m_opcode == Js::OpCode::Yield)
+ {
+ hasYield = true;
+ }
+
if (OpCodeAttr::UseAllFields(instr->m_opcode))
{
// UseAllFields opcode are call instruction or opcode that would call.
@@ -825,6 +835,8 @@ FlowGraph::RunPeeps()
case Js::OpCode::BrSrNeq_A:
case Js::OpCode::BrOnHasProperty:
case Js::OpCode::BrOnNoProperty:
+ case Js::OpCode::BrOnHasLocalProperty:
+ case Js::OpCode::BrOnNoLocalProperty:
case Js::OpCode::BrHasSideEffects:
case Js::OpCode::BrNotHasSideEffects:
case Js::OpCode::BrFncEqApply:
@@ -836,6 +848,9 @@ FlowGraph::RunPeeps()
case Js::OpCode::BrOnObject_A:
case Js::OpCode::BrOnClassConstructor:
case Js::OpCode::BrOnBaseConstructorKind:
+ case Js::OpCode::BrOnObjectOrNull_A:
+ case Js::OpCode::BrOnNotNullObj_A:
+ case Js::OpCode::BrOnConstructor_A:
if (tryUnsignedCmpPeep)
{
this->UnsignedCmpPeep(instr);
@@ -1137,9 +1152,9 @@ FlowGraph::MoveBlocksBefore(BasicBlock *blockStart, BasicBlock *blockEnd, BasicB
// We have to update region info for blocks whose predecessors changed
if (assignRegionsBeforeGlobopt)
{
- UpdateRegionForBlockFromEHPred(dstPredBlock, true);
- UpdateRegionForBlockFromEHPred(blockStart, true);
- UpdateRegionForBlockFromEHPred(srcNextBlock, true);
+ UpdateRegionForBlock(dstPredBlock);
+ UpdateRegionForBlock(blockStart);
+ UpdateRegionForBlock(srcNextBlock);
}
}
@@ -1399,6 +1414,10 @@ FlowGraph::WalkLoopBlocks(BasicBlock *block, Loop *loop, JitArenaAllocator *temp
{
loop->SetHasCall();
}
+ if (pred->loop->hasYield)
+ {
+ loop->SetHasYield();
+ }
loop->SetImplicitCallFlags(pred->loop->GetImplicitCallFlags());
}
// Add pred to loop bit vector
@@ -1429,6 +1448,10 @@ FlowGraph::AddBlockToLoop(BasicBlock *block, Loop *loop)
{
loop->SetHasCall();
}
+ if (block->hasYield)
+ {
+ loop->SetHasYield();
+ }
}
///----------------------------------------------------------------------------
@@ -1871,30 +1894,6 @@ FlowGraph::Destroy(void)
this->func->isFlowGraphValid = false;
}
-bool FlowGraph::IsEHTransitionInstr(IR::Instr *instr)
-{
- Js::OpCode op = instr->m_opcode;
- return (op == Js::OpCode::TryCatch || op == Js::OpCode::TryFinally || op == Js::OpCode::Leave || op == Js::OpCode::LeaveNull);
-}
-
-BasicBlock * FlowGraph::GetPredecessorForRegionPropagation(BasicBlock *block)
-{
- BasicBlock *ehPred = nullptr;
- FOREACH_PREDECESSOR_BLOCK(predBlock, block)
- {
- Region * predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
- if (IsEHTransitionInstr(predBlock->GetLastInstr()) && predRegion)
- {
- // MGTODO : change this to return, once you know there can exist only one eh transitioning pred
- Assert(ehPred == nullptr);
- ehPred = predBlock;
- }
- AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
- }
- NEXT_PREDECESSOR_BLOCK;
- return ehPred;
-}
-
// Propagate the region forward from the block's predecessor(s), tracking the effect
// of the flow transition. Record the region in the block-to-region map provided
// and on the label at the entry to the block (if any).
@@ -1958,7 +1957,6 @@ FlowGraph::UpdateRegionForBlock(BasicBlock * block)
}
}
- Assert(region || block->GetPredList()->Count() == 0);
if (region && !region->ehBailoutData)
{
region->AllocateEHBailoutData(this->func, tryInstr);
@@ -1997,106 +1995,6 @@ FlowGraph::UpdateRegionForBlock(BasicBlock * block)
}
}
-void
-FlowGraph::UpdateRegionForBlockFromEHPred(BasicBlock * block, bool reassign)
-{
- Region *region = nullptr;
- Region * predRegion = nullptr;
- IR::Instr * tryInstr = nullptr;
- IR::Instr * firstInstr = block->GetFirstInstr();
- if (!reassign && firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->GetRegion())
- {
- Assert(this->func->HasTry() && (this->func->DoOptimizeTry() || (this->func->IsSimpleJit() && this->func->hasBailout)));
- return;
- }
- if (block->isDead || block->isDeleted)
- {
- // We can end up calling this function with such blocks, return doing nothing
- // See test5() in tryfinallytests.js
- return;
- }
-
- if (block == this->blockList)
- {
- // Head of the graph: create the root region.
- region = Region::New(RegionTypeRoot, nullptr, this->func);
- }
- else if (block->GetPredList()->Count() == 1)
- {
- BasicBlock *predBlock = block->GetPredList()->Head()->GetPred();
- AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
- predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
- Assert(predRegion);
- region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
- }
- else
- {
- // Propagate the region forward by finding a predecessor we've already processed.
- // Since we do break block remval after region propagation, we cannot pick the first predecessor which has an assigned region
- // If there is a eh transitioning pred, we pick that
- // There cannot be more than one eh transitioning pred (?)
- BasicBlock *ehPred = this->GetPredecessorForRegionPropagation(block);
- if (ehPred)
- {
- predRegion = ehPred->GetFirstInstr()->AsLabelInstr()->GetRegion();
- Assert(predRegion != nullptr);
- region = this->PropagateRegionFromPred(block, ehPred, predRegion, tryInstr);
- }
- else
- {
- FOREACH_PREDECESSOR_BLOCK(predBlock, block)
- {
- predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
- if (predRegion != nullptr)
- {
- if ((predBlock->GetLastInstr()->m_opcode == Js::OpCode::BrOnException || predBlock->GetLastInstr()->m_opcode == Js::OpCode::BrOnNoException) &&
- predBlock->GetLastInstr()->AsBranchInstr()->m_brFinallyToEarlyExit)
- {
- Assert(predRegion->IsNonExceptingFinally());
- // BrOnException from finally region to early exit
- // Skip this edge
- continue;
- }
- if (predBlock->GetLastInstr()->m_opcode == Js::OpCode::Br &&
- predBlock->GetLastInstr()->GetPrevRealInstr()->m_opcode == Js::OpCode::BrOnNoException)
- {
- Assert(predBlock->GetLastInstr()->GetPrevRealInstr()->AsBranchInstr()->m_brFinallyToEarlyExit);
- Assert(predRegion->IsNonExceptingFinally());
- // BrOnException from finally region to early exit changed to BrOnNoException and Br during break block removal
- continue;
- }
- region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
- break;
- }
- }
- NEXT_PREDECESSOR_BLOCK;
- }
- }
-
- Assert(region || block->GetPredList()->Count() == 0 || block->firstInstr->AsLabelInstr()->GetRegion());
-
- if (region)
- {
- if (!region->ehBailoutData)
- {
- region->AllocateEHBailoutData(this->func, tryInstr);
- }
-
- Assert(firstInstr->IsLabelInstr());
- if (firstInstr->IsLabelInstr())
- {
- // Record the region on the label and make sure it stays around as a region
- // marker if we're entering a region at this point.
- IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
- labelInstr->SetRegion(region);
- if (region != predRegion)
- {
- labelInstr->m_hasNonBranchRef = true;
- }
- }
- }
-}
-
Region *
FlowGraph::PropagateRegionFromPred(BasicBlock * block, BasicBlock * predBlock, Region * predRegion, IR::Instr * &tryInstr)
{
@@ -2488,7 +2386,7 @@ FlowGraph::InsertCompensationCodeForBlockMove(FlowEdge * edge, bool insertToLoo
if (assignRegionsBeforeGlobopt)
{
- UpdateRegionForBlockFromEHPred(compBlock);
+ UpdateRegionForBlock(compBlock);
}
}
else
@@ -3399,6 +3297,16 @@ BasicBlock::CreateLoopTopBailOutInfo(GlobOpt * globOpt)
return bailOutInfo;
}
+BVSparse *
+BasicBlock::EnsureTypeIDsWithFinalType(JitArenaAllocator *alloc)
+{
+ if (typeIDsWithFinalType == nullptr)
+ {
+ typeIDsWithFinalType = JitAnew(alloc, BVSparse, alloc);
+ }
+ return typeIDsWithFinalType;
+}
+
IR::Instr *
FlowGraph::RemoveInstr(IR::Instr *instr, GlobOpt * globOpt)
{
@@ -3424,7 +3332,7 @@ FlowGraph::RemoveInstr(IR::Instr *instr, GlobOpt * globOpt)
* - When we restore HeapArguments object in the bail out path, it expects the scope object also to be restored - if one was created.
*/
Js::OpCode opcode = instr->m_opcode;
- if (opcode == Js::OpCode::LdElemI_A && instr->DoStackArgsOpt(this->func) &&
+ if (opcode == Js::OpCode::LdElemI_A && instr->DoStackArgsOpt() &&
globOpt->CurrentBlockData()->IsArgumentsOpnd(instr->GetSrc1()) && instr->m_func->GetScopeObjSym())
{
IR::ByteCodeUsesInstr * byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr);
@@ -3443,7 +3351,7 @@ FlowGraph::RemoveInstr(IR::Instr *instr, GlobOpt * globOpt)
if (opcode == Js::OpCode::Yield)
{
IR::Instr *instrLabel = newByteCodeUseInstr->m_next;
- while (instrLabel->m_opcode != Js::OpCode::Label)
+ while (instrLabel->m_opcode != Js::OpCode::GeneratorBailInLabel)
{
instrLabel = instrLabel->m_next;
}
@@ -3624,6 +3532,29 @@ Loop::SetHasCall()
while (current != nullptr);
}
+void
+Loop::SetHasYield()
+{
+ Loop* current = this;
+ do
+ {
+ if (current->hasYield)
+ {
+#if DBG
+ current = current->parent;
+ while (current)
+ {
+ Assert(current->hasYield);
+ current = current->parent;
+ }
+#endif
+ break;
+ }
+ current->hasYield = true;
+ current = current->parent;
+ } while (current != nullptr);
+}
+
void
Loop::SetImplicitCallFlags(Js::ImplicitCallFlags newFlags)
{
@@ -3694,7 +3625,7 @@ Loop::CanHoistInvariants() const
return false;
}
- return true;
+ return !this->hasYield;
}
IR::LabelInstr *
@@ -3725,10 +3656,15 @@ Loop::SetLoopTopInstr(IR::LabelInstr * loopTop)
bool
Loop::IsSymAssignedToInSelfOrParents(StackSym * const sym) const
+{
+ return IsSymAssignedToInSelfOrParents(sym->m_id);
+}
+
+bool Loop::IsSymAssignedToInSelfOrParents(SymID id) const
{
for (const Loop* curLoop = this; curLoop != nullptr; curLoop = curLoop->parent)
{
- if (curLoop->symsAssignedToInLoop->Test(sym->m_id))
+ if (curLoop->symsAssignedToInLoop->Test(id))
{
return true;
}
@@ -4571,18 +4507,47 @@ Value * BasicBlock::FindValueInLocalThenGlobalValueTableAndUpdate(GlobOpt *globO
return srcVal;
}
-IR::LabelInstr* BasicBlock::CanProveConditionalBranch(IR::BranchInstr *branch, GlobOpt* globOpt, GlobHashTable * localSymToValueMap)
+Value* BasicBlock::GetValueForConditionalBranch(
+ IR::BranchInstr* branch,
+ IR::Opnd* opnd,
+ GlobOpt* globOpt,
+ GlobHashTable* localSymToValueMap)
{
- if (!branch->GetSrc1() || !branch->GetSrc1()->GetStackSym())
+ if (!opnd || !opnd->GetStackSym())
{
return nullptr;
}
+ StackSym* sym = opnd->GetStackSym();
+
+ Value* val = FindValueInLocalThenGlobalValueTableAndUpdate(
+ globOpt,
+ localSymToValueMap,
+ branch,
+ nullptr,
+ sym);
+
+ if (val != nullptr && this->loop)
+ {
+ // If this branch is within a loop, the stack sym is type specialized, and the associated
+ // var sym is written to within the loop, then we cannot prove the condition: additional
+ // assignments to the type specialized sym might be inserted in a later block.
+ SymID varSymID = globOpt->GetVarSymID(sym);
+ if (varSymID != sym->m_id && this->loop->IsSymAssignedToInSelfOrParents(varSymID))
+ {
+ return nullptr;
+ }
+ }
+
+ return val;
+}
+
+IR::LabelInstr* BasicBlock::CanProveConditionalBranch(IR::BranchInstr *branch, GlobOpt *globOpt, GlobHashTable *localSymToValueMap)
+{
Value *src1Val = nullptr, *src2Val = nullptr;
Js::Var src1Var = nullptr, src2Var = nullptr;
- src1Val = FindValueInLocalThenGlobalValueTableAndUpdate(globOpt, localSymToValueMap, branch, nullptr, branch->GetSrc1()->GetStackSym());
-
+ src1Val = GetValueForConditionalBranch(branch, branch->GetSrc1(), globOpt, localSymToValueMap);
if (!src1Val)
{
return nullptr;
@@ -4591,10 +4556,7 @@ IR::LabelInstr* BasicBlock::CanProveConditionalBranch(IR::BranchInstr *branch, G
if (branch->GetSrc2() != nullptr)
{
- if (branch->GetSrc2()->GetStackSym())
- {
- src2Val = FindValueInLocalThenGlobalValueTableAndUpdate(globOpt, localSymToValueMap, branch, nullptr, branch->GetSrc2()->GetStackSym());
- }
+ src2Val = GetValueForConditionalBranch(branch, branch->GetSrc2(), globOpt, localSymToValueMap);
if (!src2Val)
{
return nullptr;
@@ -4613,115 +4575,96 @@ IR::LabelInstr* BasicBlock::CanProveConditionalBranch(IR::BranchInstr *branch, G
return newTarget;
}
-void
-BasicBlock::CheckLegalityAndFoldPathDepBranches(GlobOpt* globOpt)
+Value*
+BasicBlock::UpdateValueForCopyTypeInstr(GlobOpt* globOpt, GlobHashTable* localSymToValueMap, IR::Instr* instr)
{
- IR::LabelInstr * lastBranchTarget = nullptr;
- IR::Instr *currentInlineeEnd = nullptr, *unskippedInlineeEnd = nullptr;
- GlobHashTable * localSymToValueMap = nullptr;
- BVSparse * currentPathDefines = nullptr;
+ Value* dstValue = nullptr;
+ if (instr->m_opcode == Js::OpCode::LdFld)
+ {
+ // Special handling for LdFld
+ Assert(instr->GetSrc1()->IsSymOpnd());
+ IR::SymOpnd* symOpnd = instr->GetSrc1()->AsSymOpnd();
- auto UpdateValueForCopyTypeInstr = [&](IR::Instr *instr) -> Value* {
- Value * dstValue = nullptr;
- if (instr->m_opcode == Js::OpCode::LdFld)
+ if (symOpnd->m_sym->IsPropertySym())
{
- // Special handling for LdFld
- Assert(instr->GetSrc1()->IsSymOpnd());
- IR::SymOpnd *symOpnd = instr->GetSrc1()->AsSymOpnd();
-
- if (symOpnd->m_sym->IsPropertySym())
+ PropertySym* originalPropertySym = symOpnd->m_sym->AsPropertySym();
+ Value* const objectValue = FindValueInLocalThenGlobalValueTableAndUpdate(globOpt, localSymToValueMap, instr, nullptr, originalPropertySym->m_stackSym);
+ Sym* objSym = objectValue ? objectValue->GetValueInfo()->GetSymStore() : nullptr;
+ PropertySym* prop = PropertySym::Find(objSym ? objSym->m_id : originalPropertySym->m_stackSym->m_id, originalPropertySym->m_propertyId, globOpt->func);
+ if (prop)
{
- PropertySym * originalPropertySym = symOpnd->m_sym->AsPropertySym();
- Value *const objectValue = FindValueInLocalThenGlobalValueTableAndUpdate(globOpt, localSymToValueMap, instr, nullptr, originalPropertySym->m_stackSym);
- Sym* objSym = objectValue ? objectValue->GetValueInfo()->GetSymStore() : nullptr;
- PropertySym *prop = PropertySym::Find(objSym ? objSym->m_id : originalPropertySym->m_stackSym->m_id, originalPropertySym->m_propertyId, globOpt->func);
- if (prop)
- {
- dstValue = FindValueInLocalThenGlobalValueTableAndUpdate(globOpt, localSymToValueMap, instr, instr->GetDst()->GetStackSym(), prop);
- }
- else
- {
- Value ** localDstValue = localSymToValueMap->FindOrInsertNew(instr->GetDst()->GetStackSym());
- dstValue = *localDstValue = nullptr;
- }
- }
- }
- else if (instr->GetSrc1()->GetStackSym())
- {
- StackSym* src1Sym = instr->GetSrc1()->GetStackSym();
- dstValue = FindValueInLocalThenGlobalValueTableAndUpdate(globOpt, localSymToValueMap, instr, instr->GetDst()->GetSym(), src1Sym);
- }
- else if (instr->GetSrc1()->IsIntConstOpnd())
- {
- Value **localValue = localSymToValueMap->FindOrInsertNew(instr->GetDst()->GetSym());
- dstValue = *localValue = globOpt->GetIntConstantValue(instr->GetSrc1()->AsIntConstOpnd()->AsInt32(), instr);
- }
- else if (instr->GetSrc1()->IsInt64ConstOpnd())
- {
- Value **localValue = localSymToValueMap->FindOrInsertNew(instr->GetDst()->GetSym());
- dstValue = *localValue = globOpt->GetIntConstantValue(instr->GetSrc1()->AsInt64ConstOpnd()->GetValue(), instr);
- }
- else
- {
- ValueType src1Value = instr->GetSrc1()->GetValueType();
- Value **localValue = localSymToValueMap->FindOrInsertNew(instr->GetDst()->GetSym());
- if (src1Value.IsUndefined() || src1Value.IsBoolean())
- {
- dstValue = *localValue = globOpt->GetVarConstantValue(instr->GetSrc1()->AsAddrOpnd());
+ dstValue = FindValueInLocalThenGlobalValueTableAndUpdate(globOpt, localSymToValueMap, instr, instr->GetDst()->GetStackSym(), prop);
}
else
{
- dstValue = *localValue = nullptr;
+ Value** localDstValue = localSymToValueMap->FindOrInsertNew(instr->GetDst()->GetStackSym());
+ dstValue = *localDstValue = nullptr;
}
}
- return dstValue;
- };
-
- FOREACH_INSTR_IN_BLOCK(instr, this)
+ }
+ else if (instr->GetSrc1()->GetStackSym())
{
- if (OpCodeAttr::HasDeadFallThrough(instr->m_opcode))
+ StackSym* src1Sym = instr->GetSrc1()->GetStackSym();
+ dstValue = FindValueInLocalThenGlobalValueTableAndUpdate(globOpt, localSymToValueMap, instr, instr->GetDst()->GetSym(), src1Sym);
+ }
+ else if (instr->GetSrc1()->IsIntConstOpnd())
+ {
+ Value** localValue = localSymToValueMap->FindOrInsertNew(instr->GetDst()->GetSym());
+ dstValue = *localValue = globOpt->GetIntConstantValue(instr->GetSrc1()->AsIntConstOpnd()->AsInt32(), instr);
+ }
+ else if (instr->GetSrc1()->IsInt64ConstOpnd())
+ {
+ Value** localValue = localSymToValueMap->FindOrInsertNew(instr->GetDst()->GetSym());
+ dstValue = *localValue = globOpt->GetIntConstantValue(instr->GetSrc1()->AsInt64ConstOpnd()->GetValue(), instr);
+ }
+ else
+ {
+ ValueType src1Value = instr->GetSrc1()->GetValueType();
+ Value** localValue = localSymToValueMap->FindOrInsertNew(instr->GetDst()->GetSym());
+ if (src1Value.IsUndefined() || src1Value.IsBoolean())
{
- return;
+ dstValue = *localValue = globOpt->GetVarConstantValue(instr->GetSrc1()->AsAddrOpnd());
}
- if (instr->m_opcode == Js::OpCode::InlineeEnd)
+ else
{
- unskippedInlineeEnd = currentInlineeEnd = instr;
+ dstValue = *localValue = nullptr;
}
- } NEXT_INSTR_IN_BLOCK;
-
- IR::Instr * instr = this->GetLastInstr();
-
- // We have to first check the legality and only then allocate expensive data structures on the tempArena, because most block will have instructions we cant skip
+ }
+ return dstValue;
+}
+bool
+BasicBlock::IsLegalForPathDepBranches(IR::Instr* instr)
+{
while (instr)
{
if (!instr->IsBranchInstr() && !instr->IsLabelInstr() && !IsLegalOpcodeForPathDepBrFold(instr))
{
- return;
+ return false;
}
if (instr->IsLabelInstr())
{
if (instr->AsLabelInstr()->m_isLoopTop)
{
// don't cross over to loops
- return;
+ return false;
}
}
if (instr->IsBranchInstr())
{
- IR::BranchInstr *branch = instr->AsBranchInstr();
+ IR::BranchInstr* branch = instr->AsBranchInstr();
if (branch->IsUnconditional())
{
if (!branch->GetTarget())
{
- return;
+ return false;
}
instr = branch->GetTarget();
}
else
{
// Found only legal instructions until a conditional branch, build expensive data structures and check provability
- break;
+ return true;
}
}
else
@@ -4730,7 +4673,38 @@ BasicBlock::CheckLegalityAndFoldPathDepBranches(GlobOpt* globOpt)
}
}
- instr = this->GetLastInstr();
+ Assert(UNREACHED);
+ return false;
+}
+
+void
+BasicBlock::CheckLegalityAndFoldPathDepBranches(GlobOpt* globOpt)
+{
+ IR::LabelInstr * lastBranchTarget = nullptr;
+ IR::Instr *currentInlineeEnd = nullptr, *unskippedInlineeEnd = nullptr;
+ GlobHashTable * localSymToValueMap = nullptr;
+ BVSparse * currentPathDefines = nullptr;
+
+ FOREACH_INSTR_IN_BLOCK(instr, this)
+ {
+ if (OpCodeAttr::HasDeadFallThrough(instr->m_opcode))
+ {
+ return;
+ }
+ if (instr->m_opcode == Js::OpCode::InlineeEnd)
+ {
+ unskippedInlineeEnd = currentInlineeEnd = instr;
+ }
+ } NEXT_INSTR_IN_BLOCK;
+
+ IR::Instr * instr = this->GetLastInstr();
+
+ // We have to first check the legality and only then allocate expensive data structures on the tempArena, because most block will have instructions we cant skip
+ if (!IsLegalForPathDepBranches(instr))
+ {
+ return;
+ }
+
// Allocate hefty structures, we will not free them because OptBlock does a Reset on the tempAlloc
localSymToValueMap = GlobHashTable::New(globOpt->tempAlloc, 8);
currentPathDefines = JitAnew(globOpt->tempAlloc, BVSparse, globOpt->tempAlloc);
@@ -4778,7 +4752,7 @@ BasicBlock::CheckLegalityAndFoldPathDepBranches(GlobOpt* globOpt)
if (IsCopyTypeInstr(instr))
{
- Value *dstValue = UpdateValueForCopyTypeInstr(instr);
+ Value *dstValue = UpdateValueForCopyTypeInstr(globOpt, localSymToValueMap, instr);
if (instr->m_opcode == Js::OpCode::LdFld && !dstValue)
{
// We cannot skip a LdFld if we didnt find its valueInfo in the localValueTable
diff --git a/lib/Backend/FlowGraph.h b/lib/Backend/FlowGraph.h
index 0d516222075..1d9ba14bcb5 100644
--- a/lib/Backend/FlowGraph.h
+++ b/lib/Backend/FlowGraph.h
@@ -201,10 +201,7 @@ class FlowGraph
void BuildLoop(BasicBlock *headBlock, BasicBlock *tailBlock, Loop *parentLoop = nullptr);
void WalkLoopBlocks(BasicBlock *block, Loop *loop, JitArenaAllocator *tempAlloc);
void AddBlockToLoop(BasicBlock *block, Loop *loop);
- bool IsEHTransitionInstr(IR::Instr *instr);
- BasicBlock * GetPredecessorForRegionPropagation(BasicBlock *block);
void UpdateRegionForBlock(BasicBlock *block);
- void UpdateRegionForBlockFromEHPred(BasicBlock *block, bool reassign = false);
Region * PropagateRegionFromPred(BasicBlock *block, BasicBlock *predBlock, Region *predRegion, IR::Instr * &tryInstr);
IR::Instr * PeepCm(IR::Instr *instr);
IR::Instr * PeepTypedCm(IR::Instr *instr);
@@ -349,14 +346,19 @@ class BasicBlock
bool IsLandingPad();
BailOutInfo * CreateLoopTopBailOutInfo(GlobOpt * globOpt);
+ BVSparse *EnsureTypeIDsWithFinalType(JitArenaAllocator *alloc);
+
// GlobOpt Stuff
public:
bool PathDepBranchFolding(GlobOpt* globOptState);
void MergePredBlocksValueMaps(GlobOpt* globOptState);
private:
void CleanUpValueMaps();
+ Value* UpdateValueForCopyTypeInstr(GlobOpt* globOpt, GlobHashTable* localSymToValueMap, IR::Instr* instr);
+ static bool IsLegalForPathDepBranches(IR::Instr* instr);
void CheckLegalityAndFoldPathDepBranches(GlobOpt* globOpt);
Value * FindValueInLocalThenGlobalValueTableAndUpdate(GlobOpt *globOpt, GlobHashTable * localSymToValueMap, IR::Instr *instr, Sym *dstSym, Sym *srcSym);
+ Value* GetValueForConditionalBranch(IR::BranchInstr* branch, IR::Opnd* opnd, GlobOpt* globOpt, GlobHashTable* localSymToValueMap);
IR::LabelInstr* CanProveConditionalBranch(IR::BranchInstr *branch, GlobOpt* globOpt, GlobHashTable * localSymToValueMap);
#if DBG_DUMP
@@ -374,6 +376,7 @@ class BasicBlock
uint8 isDead:1;
uint8 isLoopHeader:1;
uint8 hasCall:1;
+ uint8 hasYield:1;
uint8 isVisited:1;
uint8 isAirLockCompensationBlock:1;
uint8 beginsBailOnNoProfile:1;
@@ -386,6 +389,7 @@ class BasicBlock
#endif
// Deadstore data
+ BVSparse * liveFixedFields;
BVSparse * upwardExposedUses;
BVSparse * upwardExposedFields;
BVSparse * typesNeedingKnownObjectLayout;
@@ -400,6 +404,7 @@ class BasicBlock
HashTable * stackSymToFinalType;
HashTable * stackSymToGuardedProperties; // Dead store pass only
HashTable * stackSymToWriteGuardsMap; // Backward pass only
+ BVSparse * typeIDsWithFinalType;
BVSparse * noImplicitCallUses;
BVSparse * noImplicitCallNoMissingValuesUses;
BVSparse * noImplicitCallNativeArrayUses;
@@ -431,6 +436,8 @@ class BasicBlock
isDead(false),
isLoopHeader(false),
hasCall(false),
+ hasYield(false),
+ liveFixedFields(nullptr),
upwardExposedUses(nullptr),
upwardExposedFields(nullptr),
typesNeedingKnownObjectLayout(nullptr),
@@ -443,6 +450,7 @@ class BasicBlock
stackSymToFinalType(nullptr),
stackSymToGuardedProperties(nullptr),
stackSymToWriteGuardsMap(nullptr),
+ typeIDsWithFinalType(nullptr),
noImplicitCallUses(nullptr),
noImplicitCallNoMissingValuesUses(nullptr),
noImplicitCallNativeArrayUses(nullptr),
@@ -615,6 +623,7 @@ class Loop
bool hasDeadStoreCollectionPass : 1;
bool hasDeadStorePrepass : 1;
bool hasCall : 1;
+ bool hasYield : 1;
bool hasHoistedFields : 1;
bool needImplicitCallBailoutChecksForJsArrayCheckHoist : 1;
bool allFieldsKilled : 1;
@@ -764,10 +773,12 @@ class Loop
bool CanHoistInvariants() const;
bool CanDoFieldCopyProp();
void SetHasCall();
+ void SetHasYield();
IR::LabelInstr * GetLoopTopInstr() const;
void SetLoopTopInstr(IR::LabelInstr * loopTop);
Func * GetFunc() const { return GetLoopTopInstr()->m_func; }
bool IsSymAssignedToInSelfOrParents(StackSym * const sym) const;
+ bool IsSymAssignedToInSelfOrParents(SymID id) const;
BasicBlock * GetAnyTailBlock() const;
#if DBG_DUMP
bool GetHasCall() const { return hasCall; }
diff --git a/lib/Backend/Func.cpp b/lib/Backend/Func.cpp
index 898d054fa7f..8e25640b935 100644
--- a/lib/Backend/Func.cpp
+++ b/lib/Backend/Func.cpp
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#include "Backend.h"
@@ -51,20 +52,23 @@ Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
m_cloner(nullptr),
m_cloneMap(nullptr),
m_loopParamSym(nullptr),
- m_funcObjSym(nullptr),
m_localClosureSym(nullptr),
m_paramClosureSym(nullptr),
m_localFrameDisplaySym(nullptr),
m_bailoutReturnValueSym(nullptr),
m_hasBailedOutSym(nullptr),
m_inlineeFrameStartSym(nullptr),
+ inlineeStart(nullptr),
m_regsUsed(0),
m_fg(nullptr),
m_labelCount(0),
m_argSlotsForFunctionsCalled(0),
m_hasCalls(false),
m_hasInlineArgsOpt(false),
+ m_hasInlineOverheadRemoved(false),
m_canDoInlineArgsOpt(true),
+ unoptimizableArgumentsObjReference(0),
+ unoptimizableArgumentsObjReferenceInInlinees(0),
m_doFastPaths(false),
hasBailout(false),
firstIRTemp(0),
@@ -92,6 +96,7 @@ Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
hasInlinee(false),
thisOrParentInlinerHasArguments(false),
hasStackArgs(false),
+ hasArgLenAndConstOpt(false),
hasImplicitParamLoad(false),
hasThrow(false),
hasNonSimpleParams(false),
@@ -106,6 +111,7 @@ Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
loopCount(0),
callSiteIdInParentFunc(callSiteIdInParentFunc),
isGetterSetter(isGetterSetter),
+ cachedInlineeFrameInfo(nullptr),
frameInfo(nullptr),
isTJLoopBody(false),
m_nativeCodeDataSym(nullptr),
@@ -134,23 +140,25 @@ Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
, vtableMap(nullptr)
#endif
, m_yieldOffsetResumeLabelList(nullptr)
- , m_bailOutNoSaveLabel(nullptr)
+ , m_bailOutForElidedYieldInsertionPoint(nullptr)
, constantAddressRegOpnd(alloc)
, lastConstantAddressRegLoadInstr(nullptr)
, m_totalJumpTableSizeInBytesForSwitchStatements(0)
- , slotArrayCheckTable(nullptr)
, frameDisplayCheckTable(nullptr)
, stackArgWithFormalsTracker(nullptr)
, m_forInLoopBaseDepth(0)
, m_forInEnumeratorArrayOffset(-1)
, argInsCount(0)
, m_globalObjTypeSpecFldInfoArray(nullptr)
+ , m_generatorFrameSym(nullptr)
#if LOWER_SPLIT_INT64
, m_int64SymPairMap(nullptr)
#endif
#ifdef RECYCLER_WRITE_BARRIER_JIT
, m_lowerer(nullptr)
#endif
+ , m_lazyBailOutRecordSlot(nullptr)
+ , hasLazyBailOut(false)
{
Assert(this->IsInlined() == !!runtimeInfo);
@@ -301,8 +309,10 @@ Func::Codegen(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT)
{
bool rejit;
+ int rejitCounter = 0;
do
{
+ Assert(rejitCounter < 25);
Func func(alloc, workItem, threadContextInfo,
scriptContextInfo, outputData, epInfo, runtimeInfo,
polymorphicInlineCacheInfo, codeGenAllocators,
@@ -334,6 +344,8 @@ Func::Codegen(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
case RejitReason::DisableStackArgOpt:
outputData->disableStackArgOpt = TRUE;
break;
+ case RejitReason::DisableStackArgLenAndConstOpt:
+ break;
case RejitReason::DisableSwitchOptExpectingInteger:
case RejitReason::DisableSwitchOptExpectingString:
outputData->disableSwitchOpt = TRUE;
@@ -366,6 +378,7 @@ Func::Codegen(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
}
rejit = true;
+ rejitCounter++;
}
// Either the entry point has a reference to the number now, or we failed to code gen and we
// don't need to numbers, we can flush the completed page now.
@@ -858,13 +871,6 @@ Func::AjustLocalVarSlotOffset()
}
#endif
-bool
-Func::DoGlobOptsForGeneratorFunc() const
-{
- // Disable GlobOpt optimizations for generators initially. Will visit and enable each one by one.
- return !GetJITFunctionBody()->IsCoroutine();
-}
-
bool
Func::DoSimpleJitDynamicProfile() const
{
@@ -1025,29 +1031,6 @@ Func::GetLocalsPointer() const
#endif
-void Func::AddSlotArrayCheck(IR::SymOpnd *fieldOpnd)
-{
- if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
- {
- return;
- }
-
- Assert(IsTopFunc());
- if (this->slotArrayCheckTable == nullptr)
- {
- this->slotArrayCheckTable = SlotArrayCheckTable::New(m_alloc, 4);
- }
-
- PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
- uint32 slot = propertySym->m_propertyId;
- uint32 *pSlotId = this->slotArrayCheckTable->FindOrInsert(slot, propertySym->m_stackSym->m_id);
-
- if (pSlotId && (*pSlotId == (uint32)-1 || *pSlotId < slot))
- {
- *pSlotId = propertySym->m_propertyId;
- }
-}
-
void Func::AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId)
{
if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
@@ -1348,6 +1331,10 @@ Func::EndPhase(Js::Phase tag, bool dump)
{
Assert(!this->isPostLower);
this->isPostLower = true;
+#if !defined(_M_ARM) && !defined(_M_ARM64) // Need to verify ARM is clean.
+ DbCheckPostLower dbCheck(this);
+ dbCheck.CheckNestedHelperCalls();
+#endif
}
else if (tag == Js::RegAllocPhase)
{
@@ -1381,6 +1368,30 @@ Func::EndPhase(Js::Phase tag, bool dump)
#endif
}
+StackSym *
+Func::EnsureBailoutReturnValueSym()
+{
+ if (m_bailoutReturnValueSym == nullptr)
+ {
+ m_bailoutReturnValueSym = StackSym::New(TyVar, this);
+ StackAllocate(m_bailoutReturnValueSym, sizeof(Js::Var));
+ }
+
+ return m_bailoutReturnValueSym;
+}
+
+StackSym *
+Func::EnsureHasBailedOutSym()
+{
+ if (m_hasBailedOutSym == nullptr)
+ {
+ m_hasBailedOutSym = StackSym::New(TyUint32, this);
+ StackAllocate(m_hasBailedOutSym, MachRegInt);
+ }
+
+ return m_hasBailedOutSym;
+}
+
StackSym *
Func::EnsureLoopParamSym()
{
@@ -2077,6 +2088,60 @@ Func::GetForInEnumeratorArrayOffset() const
+ this->m_forInLoopBaseDepth * sizeof(Js::ForInObjectEnumerator);
}
+void
+Func::SetHasLazyBailOut()
+{
+ this->hasLazyBailOut = true;
+}
+
+bool
+Func::HasLazyBailOut() const
+{
+ AssertMsg(
+ this->isPostRegAlloc,
+ "We don't know whether a function has lazy bailout until after RegAlloc"
+ );
+ return this->hasLazyBailOut;
+}
+
+void
+Func::EnsureLazyBailOutRecordSlot()
+{
+ if (this->m_lazyBailOutRecordSlot == nullptr)
+ {
+ this->m_lazyBailOutRecordSlot = StackSym::New(TyMachPtr, this);
+ this->StackAllocate(this->m_lazyBailOutRecordSlot, MachPtr);
+ }
+}
+
+StackSym *
+Func::GetLazyBailOutRecordSlot() const
+{
+ Assert(this->m_lazyBailOutRecordSlot != nullptr);
+ return this->m_lazyBailOutRecordSlot;
+}
+
+bool
+Func::ShouldDoLazyBailOut() const
+{
+#if defined(_M_X64)
+ if (!PHASE_ON1(Js::LazyBailoutPhase) ||
+ this->GetJITFunctionBody()->IsAsmJsMode() || // don't have bailouts in asm.js
+ this->HasTry() || // lazy bailout in function with try/catch not supported for now
+ // `EHBailoutPatchUp` set a `hasBailedOut` bit to rethrow the exception in the interpreter
+ // if the instruction has ANY bailout. In the future, to implement lazy bailout with try/catch,
+ // we would need to change how this bit is generated.
+ this->IsLoopBody()) // don't do lazy bailout on jit'd loop body either
+ {
+ return false;
+ }
+
+ return true;
+#else
+ return false;
+#endif
+}
+
#if DBG_DUMP
///----------------------------------------------------------------------------
///
diff --git a/lib/Backend/Func.h b/lib/Backend/Func.h
index 03eedb5f448..f1ca1dff944 100644
--- a/lib/Backend/Func.h
+++ b/lib/Backend/Func.h
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#pragma once
@@ -119,11 +120,11 @@ class Func
Js::RegSlot returnValueRegSlot = Js::Constants::NoRegister, const bool isInlinedConstructor = false,
Js::ProfileId callSiteIdInParentFunc = UINT16_MAX, bool isGetterSetter = false);
public:
- void * const GetCodeGenAllocators()
+ void * GetCodeGenAllocators()
{
return this->GetTopFunc()->m_codeGenAllocators;
}
- InProcCodeGenAllocators * const GetInProcCodeGenAllocators()
+ InProcCodeGenAllocators * GetInProcCodeGenAllocators()
{
Assert(!JITManager::GetJITManager()->IsJITServer());
return reinterpret_cast(this->GetTopFunc()->m_codeGenAllocators);
@@ -205,7 +206,8 @@ class Func
return
!PHASE_OFF(Js::GlobOptPhase, this) && !IsSimpleJit() &&
(!GetTopFunc()->HasTry() || GetTopFunc()->CanOptimizeTryCatch()) &&
- (!GetTopFunc()->HasFinally() || GetTopFunc()->CanOptimizeTryFinally());
+ (!GetTopFunc()->HasFinally() || GetTopFunc()->CanOptimizeTryFinally()) &&
+ (!GetTopFunc()->GetJITFunctionBody()->IsCoroutine() || !PHASE_OFF(Js::GeneratorGlobOptPhase, this));
}
bool DoInline() const
@@ -274,7 +276,7 @@ class Func
return &m_output;
}
- const JITTimeFunctionBody * const GetJITFunctionBody() const
+ const JITTimeFunctionBody * GetJITFunctionBody() const
{
return m_workItem->GetJITFunctionBody();
}
@@ -328,8 +330,6 @@ class Func
void AjustLocalVarSlotOffset();
#endif
- bool DoGlobOptsForGeneratorFunc() const;
-
static int32 AdjustOffsetValue(int32 offset);
static inline uint32 GetDiagLocalSlotSize()
@@ -416,32 +416,9 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
return !GetHasCalls() && !GetHasImplicitCalls();
}
- StackSym *EnsureLoopParamSym();
-
void UpdateForInLoopMaxDepth(uint forInLoopMaxDepth);
int GetForInEnumeratorArrayOffset() const;
- StackSym *GetFuncObjSym() const { return m_funcObjSym; }
- void SetFuncObjSym(StackSym *sym) { m_funcObjSym = sym; }
-
- StackSym *GetJavascriptLibrarySym() const { return m_javascriptLibrarySym; }
- void SetJavascriptLibrarySym(StackSym *sym) { m_javascriptLibrarySym = sym; }
-
- StackSym *GetScriptContextSym() const { return m_scriptContextSym; }
- void SetScriptContextSym(StackSym *sym) { m_scriptContextSym = sym; }
-
- StackSym *GetFunctionBodySym() const { return m_functionBodySym; }
- void SetFunctionBodySym(StackSym *sym) { m_functionBodySym = sym; }
-
- StackSym *GetLocalClosureSym() const { return m_localClosureSym; }
- void SetLocalClosureSym(StackSym *sym) { m_localClosureSym = sym; }
-
- StackSym *GetParamClosureSym() const { return m_paramClosureSym; }
- void SetParamClosureSym(StackSym *sym) { m_paramClosureSym = sym; }
-
- StackSym *GetLocalFrameDisplaySym() const { return m_localFrameDisplaySym; }
- void SetLocalFrameDisplaySym(StackSym *sym) { m_localFrameDisplaySym = sym; }
-
intptr_t GetJittedLoopIterationsSinceLastBailoutAddress() const;
void EnsurePinnedTypeRefs();
void PinTypeRef(void* typeRef);
@@ -511,10 +488,15 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
return m_inlineeFrameStartSym != nullptr;
}
- void SetInlineeFrameStartSym(StackSym *sym)
+ void SetInlineeStart(IR::Instr *inlineeStartInstr)
{
- Assert(m_inlineeFrameStartSym == nullptr);
- m_inlineeFrameStartSym = sym;
+ Assert(inlineeStart == nullptr);
+ inlineeStart = inlineeStartInstr;
+ }
+
+ IR::Instr* GetInlineeStart()
+ {
+ return inlineeStart;
}
IR::SymOpnd *GetInlineeArgCountSlotOpnd()
@@ -655,7 +637,6 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
PropertyIdSet lazyBailoutProperties;
bool anyPropertyMayBeWrittenTo;
- SlotArrayCheckTable *slotArrayCheckTable;
FrameDisplayCheckTable *frameDisplayCheckTable;
IR::Instr * m_headInstr;
@@ -669,16 +650,7 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
#endif
SymTable * m_symTable;
- StackSym * m_loopParamSym;
- StackSym * m_funcObjSym;
- StackSym * m_javascriptLibrarySym;
- StackSym * m_scriptContextSym;
- StackSym * m_functionBodySym;
- StackSym * m_localClosureSym;
- StackSym * m_paramClosureSym;
- StackSym * m_localFrameDisplaySym;
- StackSym * m_bailoutReturnValueSym;
- StackSym * m_hasBailedOutSym;
+
uint m_forInLoopMaxDepth;
uint m_forInLoopBaseDepth;
int32 m_forInEnumeratorArrayOffset;
@@ -712,16 +684,19 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
FlowGraph * m_fg;
unsigned int m_labelCount;
BitVector m_regsUsed;
- StackSym * tempSymDouble;
- StackSym * tempSymBool;
uint32 loopCount;
+ uint32 unoptimizableArgumentsObjReference;
+ uint32 unoptimizableArgumentsObjReferenceInInlinees;
Js::ProfileId callSiteIdInParentFunc;
+ InlineeFrameInfo* cachedInlineeFrameInfo;
bool m_hasCalls: 1; // This is more accurate compared to m_isLeaf
bool m_hasInlineArgsOpt : 1;
+ bool m_hasInlineOverheadRemoved : 1;
bool m_doFastPaths : 1;
bool hasBailout: 1;
bool hasBailoutInEHRegion : 1;
bool hasStackArgs: 1;
+ bool hasArgLenAndConstOpt : 1;
bool hasImplicitParamLoad : 1; // True if there is a load of CallInfo, FunctionObject
bool hasThrow : 1;
bool hasUnoptimizedArgumentsAccess : 1; // True if there are any arguments access beyond the simple case of this.apply pattern
@@ -741,7 +716,6 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
bool isPostPeeps:1;
bool isPostLayout:1;
bool isPostFinalLower:1;
-
struct InstrByteCodeRegisterUses
{
Js::OpCode capturingOpCode;
@@ -843,6 +817,7 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
{
curFunc->m_canDoInlineArgsOpt = false;
curFunc->m_hasInlineArgsOpt = false;
+ curFunc->frameInfo = nullptr;
curFunc = curFunc->GetParentFunc();
}
}
@@ -981,7 +956,6 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
void MarkConstantAddressSyms(BVSparse * bv);
void DisableConstandAddressLoadHoist() { canHoistConstantAddressLoad = false; }
- void AddSlotArrayCheck(IR::SymOpnd *fieldOpnd);
void AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId = (uint32)-1);
void EnsureStackArgWithFormalsTracker();
@@ -993,8 +967,6 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
StackSym* GetStackSymForFormal(Js::ArgSlot formalsIndex);
bool HasStackSymForFormal(Js::ArgSlot formalsIndex);
- void SetScopeObjSym(StackSym * sym);
- StackSym * GetScopeObjSym();
bool IsTrackCompoundedIntOverflowDisabled() const;
bool IsMemOpDisabled() const;
bool IsArrayCheckHoistDisabled() const;
@@ -1026,12 +998,9 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
uint32 m_inlineeId;
- IR::LabelInstr * m_bailOutNoSaveLabel;
+ IR::Instr * m_bailOutForElidedYieldInsertionPoint;
- StackSym * GetNativeCodeDataSym() const;
- void SetNativeCodeDataSym(StackSym * sym);
private:
-
Js::EntryPointInfo* m_entryPointInfo; // for in-proc JIT only
JITOutput m_output;
@@ -1040,7 +1009,7 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
#endif
Func * const topFunc;
Func * const parentFunc;
- StackSym * m_inlineeFrameStartSym;
+ IR::Instr * inlineeStart;
uint maxInlineeArgOutSize;
const bool m_isBackgroundJIT;
bool hasInstrNumber;
@@ -1066,11 +1035,9 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
YieldOffsetResumeLabelList * m_yieldOffsetResumeLabelList;
StackArgWithFormalsTracker * stackArgWithFormalsTracker;
ObjTypeSpecFldInfo ** m_globalObjTypeSpecFldInfoArray;
- StackSym *CreateInlineeStackSym();
IR::SymOpnd *GetInlineeOpndAtOffset(int32 offset);
bool HasLocalVarSlotCreated() const { return m_localVarSlotsOffset != Js::Constants::InvalidOffset; }
void EnsureLocalVarSlots();
- StackSym * m_nativeCodeDataSym;
SList constantAddressRegOpnd;
IR::Instr * lastConstantAddressRegLoadInstr;
bool canHoistConstantAddressLoad;
@@ -1087,6 +1054,79 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece;
public:
Lowerer* m_lowerer;
#endif
+
+private:
+ StackSym* m_localClosureSym;
+ StackSym* m_paramClosureSym;
+ StackSym* m_localFrameDisplaySym;
+ StackSym* m_nativeCodeDataSym;
+ StackSym* m_inlineeFrameStartSym;
+ StackSym* m_loopParamSym;
+ StackSym* m_bailoutReturnValueSym;
+ StackSym* m_hasBailedOutSym;
+ StackSym* m_generatorFrameSym;
+
+public:
+ StackSym* tempSymDouble;
+ StackSym* tempSymBool;
+
+ void SetGeneratorFrameSym(StackSym* sym)
+ {
+ Assert(this->m_generatorFrameSym == nullptr);
+ this->m_generatorFrameSym = sym;
+ }
+
+ StackSym* GetGeneratorFrameSym() const
+ {
+ return this->m_generatorFrameSym;
+ }
+
+ // StackSyms' corresponding getters/setters
+ void SetInlineeFrameStartSym(StackSym* sym)
+ {
+ Assert(m_inlineeFrameStartSym == nullptr);
+ m_inlineeFrameStartSym = sym;
+ }
+
+ StackSym* EnsureHasBailedOutSym();
+ StackSym* GetHasBailedOutSym() const { return m_hasBailedOutSym; }
+
+ StackSym* EnsureBailoutReturnValueSym();
+ StackSym* GetBailoutReturnValueSym() const { return m_bailoutReturnValueSym; }
+
+ StackSym* EnsureLoopParamSym();
+ StackSym* GetLoopParamSym() const { return m_loopParamSym; }
+
+ StackSym* GetLocalClosureSym() const { return m_localClosureSym; }
+ void SetLocalClosureSym(StackSym* sym) { m_localClosureSym = sym; }
+
+ StackSym* GetParamClosureSym() const { return m_paramClosureSym; }
+ void SetParamClosureSym(StackSym* sym) { m_paramClosureSym = sym; }
+
+ StackSym* GetLocalFrameDisplaySym() const { return m_localFrameDisplaySym; }
+ void SetLocalFrameDisplaySym(StackSym* sym) { m_localFrameDisplaySym = sym; }
+
+ void SetScopeObjSym(StackSym* sym);
+ StackSym* GetScopeObjSym();
+
+ StackSym* GetNativeCodeDataSym() const;
+ void SetNativeCodeDataSym(StackSym* sym);
+
+ StackSym* CreateInlineeStackSym();
+
+ // Lazy bailout
+ // The stack sym is used to store the pointer to
+ // the BailOutRecord associated with the lazy bailout point
+private:
+ bool hasLazyBailOut : 1;
+ StackSym * m_lazyBailOutRecordSlot;
+
+public:
+ void EnsureLazyBailOutRecordSlot();
+ StackSym *GetLazyBailOutRecordSlot() const;
+ void SetHasLazyBailOut();
+ bool HasLazyBailOut() const;
+ bool ShouldDoLazyBailOut() const;
};
class AutoCodeGenPhase
diff --git a/lib/Backend/FunctionCodeGenJitTimeData.cpp b/lib/Backend/FunctionCodeGenJitTimeData.cpp
index 65760b69131..d1bc6b31bb8 100644
--- a/lib/Backend/FunctionCodeGenJitTimeData.cpp
+++ b/lib/Backend/FunctionCodeGenJitTimeData.cpp
@@ -16,6 +16,8 @@ namespace Js
#endif
next(nullptr),
ldFldInlinees(nullptr),
+ callbackInlinees(nullptr),
+ callApplyTargetInlinees(nullptr),
globalThisObject(globalThis),
profiledIterations(profiledIterations),
sharedPropertyGuards(nullptr),
@@ -110,6 +112,14 @@ namespace Js
return callbackInlinees ? callbackInlinees[profiledCallSiteId] : nullptr;
}
+ const FunctionCodeGenJitTimeData * FunctionCodeGenJitTimeData::GetCallApplyTargetInlinee(const ProfileId callApplyCallSiteId) const
+ {
+ Assert(GetFunctionBody());
+ Assert(callApplyCallSiteId < GetFunctionBody()->GetProfiledCallApplyCallSiteCount());
+
+ return callApplyTargetInlinees ? callApplyTargetInlinees[callApplyCallSiteId] : nullptr;
+ }
+
FunctionCodeGenJitTimeData *FunctionCodeGenJitTimeData::AddInlinee(
Recycler *const recycler,
const ProfileId profiledCallSiteId,
@@ -197,6 +207,32 @@ namespace Js
return inlineeData;
}
+ FunctionCodeGenJitTimeData * FunctionCodeGenJitTimeData::AddCallApplyTargetInlinee(
+ Recycler *const recycler,
+ const ProfileId profiledCallSiteId,
+ const ProfileId callApplyCallSiteId,
+ FunctionInfo *const inlinee)
+ {
+ Assert(recycler != nullptr);
+ FunctionBody * functionBody = GetFunctionBody();
+ Assert(functionBody != nullptr);
+ Assert(profiledCallSiteId < functionBody->GetProfiledCallSiteCount());
+ Assert(callApplyCallSiteId < functionBody->GetProfiledCallApplyCallSiteCount());
+ Assert(inlinee != nullptr);
+
+ if (!callApplyTargetInlinees)
+ {
+ callApplyTargetInlinees = RecyclerNewArrayZ(recycler, Field(FunctionCodeGenJitTimeData *), functionBody->GetProfiledCallApplyCallSiteCount());
+ }
+
+ // Polymorphic call/apply targets are not inlined.
+ Assert(callApplyTargetInlinees[callApplyCallSiteId] == nullptr);
+
+ FunctionCodeGenJitTimeData * inlineeData = FunctionCodeGenJitTimeData::New(recycler, inlinee, nullptr /* entryPoint */, true /*isInlined*/);
+ callApplyTargetInlinees[callApplyCallSiteId] = inlineeData;
+ return inlineeData;
+ }
+
uint FunctionCodeGenJitTimeData::InlineeCount() const
{
return inlineeCount;
diff --git a/lib/Backend/FunctionCodeGenJitTimeData.h b/lib/Backend/FunctionCodeGenJitTimeData.h
index f5d7a4b4f47..bf4ffb78d47 100644
--- a/lib/Backend/FunctionCodeGenJitTimeData.h
+++ b/lib/Backend/FunctionCodeGenJitTimeData.h
@@ -32,6 +32,8 @@ namespace Js
Field(Field(FunctionCodeGenJitTimeData*)*) inlinees;
Field(Field(FunctionCodeGenJitTimeData*)*) ldFldInlinees;
Field(Field(FunctionCodeGenJitTimeData*)*) callbackInlinees;
+ Field(Field(FunctionCodeGenJitTimeData*)*) callApplyTargetInlinees;
+
Field(RecyclerWeakReference*) weakFuncRef;
Field(PolymorphicInlineCacheInfoIDL*) inlineeInfo;
@@ -92,6 +94,7 @@ namespace Js
const FunctionCodeGenJitTimeData *GetInlinee(const ProfileId profiledCallSiteId) const;
const FunctionCodeGenJitTimeData *GetLdFldInlinee(const InlineCacheIndex inlineCacheIndex) const;
const FunctionCodeGenJitTimeData * GetCallbackInlinee(const ProfileId profiledCallSiteId) const;
+ const FunctionCodeGenJitTimeData * GetCallApplyTargetInlinee(const ProfileId callApplyCallSiteId) const;
FunctionCodeGenJitTimeData *AddInlinee(
Recycler *const recycler,
const ProfileId profiledCallSiteId,
@@ -125,6 +128,12 @@ namespace Js
const ProfileId profiledCallSiteId,
FunctionInfo *const inlinee);
+ FunctionCodeGenJitTimeData * AddCallApplyTargetInlinee(
+ Recycler *const recycler,
+ const ProfileId profiledCallSiteId,
+ const ProfileId callApplyCallSiteId,
+ FunctionInfo *const inlinee);
+
bool IsPolymorphicCallSite(const ProfileId profiledCallSiteId) const;
// This function walks all the chained jittimedata and returns the one which match the functionInfo.
// This can return null, if the functionInfo doesn't match.
diff --git a/lib/Backend/FunctionJITTimeInfo.cpp b/lib/Backend/FunctionJITTimeInfo.cpp
index a6c0fb897f6..700000d7222 100644
--- a/lib/Backend/FunctionJITTimeInfo.cpp
+++ b/lib/Backend/FunctionJITTimeInfo.cpp
@@ -94,7 +94,6 @@ FunctionJITTimeInfo::BuildJITTimeData(
}
}
- jitData->callbackInlineeCount = jitData->bodyData->profiledCallSiteCount;
jitData->callbackInlinees = AnewArrayZ(alloc, FunctionJITTimeDataIDL*, jitData->bodyData->profiledCallSiteCount);
for (Js::ProfileId i = 0; i < jitData->bodyData->profiledCallSiteCount; ++i)
@@ -111,6 +110,26 @@ FunctionJITTimeInfo::BuildJITTimeData(
BuildJITTimeData(alloc, inlineeJITData, inlineeRuntimeData, jitData->callbackInlinees[i], true, isForegroundJIT);
}
}
+
+ jitData->callApplyTargetInlineeCount = jitData->bodyData->profiledCallApplyCallSiteCount;
+ if (jitData->bodyData->profiledCallApplyCallSiteCount > 0)
+ {
+ jitData->callApplyTargetInlinees = AnewArrayZ(alloc, FunctionJITTimeDataIDL*, jitData->bodyData->profiledCallApplyCallSiteCount);
+ }
+ for (Js::ProfileId i = 0; i < jitData->bodyData->profiledCallApplyCallSiteCount; ++i)
+ {
+ const Js::FunctionCodeGenJitTimeData * inlineeJITData = codeGenData->GetCallApplyTargetInlinee(i);
+ if (inlineeJITData != nullptr)
+ {
+ const Js::FunctionCodeGenRuntimeData * inlineeRuntimeData = nullptr;
+ if (inlineeJITData->GetFunctionInfo()->HasBody())
+ {
+ inlineeRuntimeData = isInlinee ? targetRuntimeData->GetCallApplyTargetInlinee(i) : functionBody->GetCallApplyTargetInlineeCodeGenRuntimeData(i);
+ }
+ jitData->callApplyTargetInlinees[i] = AnewStructZ(alloc, FunctionJITTimeDataIDL);
+ BuildJITTimeData(alloc, inlineeJITData, inlineeRuntimeData, jitData->callApplyTargetInlinees[i], true, isForegroundJIT);
+ }
+ }
}
jitData->profiledRuntimeData = AnewStructZ(alloc, FunctionJITRuntimeIDL);
if (isInlinee && targetRuntimeData->ClonedInlineCaches()->HasInlineCaches())
@@ -293,6 +312,12 @@ FunctionJITTimeInfo::GetInlineeForCallbackInlineeRuntimeData(const Js::ProfileId
return inlineeData->GetRuntimeInfo();
}
+const FunctionJITRuntimeInfo *
+FunctionJITTimeInfo::GetCallApplyTargetInlineeRuntimeData(const Js::ProfileId callApplyCallSiteId) const
+{
+ return GetCallApplyTargetInlinee(callApplyCallSiteId) ? GetCallApplyTargetInlinee(callApplyCallSiteId)->GetRuntimeInfo() : nullptr;
+}
+
const FunctionJITRuntimeInfo *
FunctionJITTimeInfo::GetRuntimeInfo() const
{
@@ -353,11 +378,23 @@ FunctionJITTimeInfo::GetCallbackInlinee(Js::ProfileId profileId) const
{
return nullptr;
}
- AssertOrFailFast(profileId < m_data.callbackInlineeCount);
+ AssertOrFailFast(profileId < m_data.inlineeCount);
return reinterpret_cast(m_data.callbackInlinees[profileId]);
}
+const FunctionJITTimeInfo *
+FunctionJITTimeInfo::GetCallApplyTargetInlinee(Js::ProfileId callApplyCallSiteId) const
+{
+ if (!m_data.callApplyTargetInlinees)
+ {
+ return nullptr;
+ }
+ AssertOrFailFast(callApplyCallSiteId < m_data.bodyData->profiledCallApplyCallSiteCount);
+
+ return reinterpret_cast(m_data.callApplyTargetInlinees[callApplyCallSiteId]);
+}
+
const FunctionJITTimeInfo *
FunctionJITTimeInfo::GetLdFldInlinee(Js::InlineCacheIndex inlineCacheIndex) const
{
diff --git a/lib/Backend/FunctionJITTimeInfo.h b/lib/Backend/FunctionJITTimeInfo.h
index e204c1395a9..2f8c2952891 100644
--- a/lib/Backend/FunctionJITTimeInfo.h
+++ b/lib/Backend/FunctionJITTimeInfo.h
@@ -21,6 +21,8 @@ class FunctionJITTimeInfo
bool IsLdFldInlineePresent() const;
const FunctionJITTimeInfo * GetCallbackInlinee(Js::ProfileId profileId) const;
+ const FunctionJITTimeInfo * GetCallApplyTargetInlinee(Js::ProfileId profileId) const;
+ const Js::ProfileId GetCallApplyCallSiteIdForCallSiteId(Js::ProfileId profiledCallSiteId) const;
const FunctionJITTimeInfo * GetLdFldInlinee(Js::InlineCacheIndex inlineCacheIndex) const;
const FunctionJITTimeInfo * GetInlinee(Js::ProfileId profileId) const;
const FunctionJITTimeInfo * GetNext() const;
@@ -46,6 +48,7 @@ class FunctionJITTimeInfo
const FunctionJITRuntimeInfo *GetLdFldInlineeRuntimeData(const Js::InlineCacheIndex inlineCacheIndex) const;
const FunctionJITRuntimeInfo * GetCallbackInlineeRuntimeData(const Js::ProfileId profiledCallSiteId) const;
const FunctionJITRuntimeInfo * GetInlineeForCallbackInlineeRuntimeData(const Js::ProfileId profiledCallSiteId, intptr_t inlineeFuncBodyAddr) const;
+ const FunctionJITRuntimeInfo * GetCallApplyTargetInlineeRuntimeData(const Js::ProfileId callApplyCallSiteId) const;
bool ForceJITLoopBody() const;
bool HasSharedPropertyGuards() const;
bool HasSharedPropertyGuard(Js::PropertyId id) const;
diff --git a/lib/Backend/GlobHashTable.h b/lib/Backend/GlobHashTable.h
index 65418bcb931..7c390752970 100644
--- a/lib/Backend/GlobHashTable.h
+++ b/lib/Backend/GlobHashTable.h
@@ -31,18 +31,30 @@ class Key
static uint Get(ExprHash hash) { return static_cast(hash); }
};
-#define FOREACH_GLOBHASHTABLE_ENTRY(bucket, hashTable) \
+#define FOREACH_VALUEHASHTABLE_ENTRY(BucketType, bucket, hashTable) \
for (uint _iterHash = 0; _iterHash < (hashTable)->tableSize; _iterHash++) \
{ \
- FOREACH_SLISTBASE_ENTRY(GlobHashBucket, bucket, &(hashTable)->table[_iterHash]) \
+ FOREACH_SLISTBASE_ENTRY(BucketType, bucket, &(hashTable)->table[_iterHash]) \
{
-#define NEXT_GLOBHASHTABLE_ENTRY \
+#define NEXT_VALUEHASHTABLE_ENTRY \
} \
NEXT_SLISTBASE_ENTRY; \
}
+#define FOREACH_VALUEHASHTABLE_ENTRY_EDITING(BucketType, bucket, hashTable, iter) \
+ for (uint _iterHash = 0; _iterHash < (hashTable)->tableSize; _iterHash++) \
+ { \
+ FOREACH_SLISTBASE_ENTRY_EDITING(BucketType, bucket, &(hashTable)->table[_iterHash], iter) \
+ {
+
+
+#define NEXT_VALUEHASHTABLE_ENTRY_EDITING \
+ } \
+ NEXT_SLISTBASE_ENTRY_EDITING; \
+ }
+
template
class ValueHashTable
{
@@ -390,7 +402,7 @@ class ValueHashTable
#if DBG_DUMP
void Dump()
{
- FOREACH_GLOBHASHTABLE_ENTRY(bucket, this)
+ FOREACH_VALUEHASHTABLE_ENTRY(HashBucket, bucket, this)
{
Output::Print(_u("%4d => "), bucket.value);
@@ -398,20 +410,20 @@ class ValueHashTable
Output::Print(_u("\n"));
Output::Print(_u("\n"));
}
- NEXT_GLOBHASHTABLE_ENTRY;
+ NEXT_VALUEHASHTABLE_ENTRY;
}
void Dump(void (*valueDump)(TData))
{
Output::Print(_u("\n-------------------------------------------------------------------------------------------------\n"));
- FOREACH_GLOBHASHTABLE_ENTRY(bucket, this)
+ FOREACH_VALUEHASHTABLE_ENTRY(HashBucket, bucket, this)
{
valueDump(bucket.value);
Output::Print(_u(" => "), bucket.value);
bucket.element->Dump();
Output::Print(_u("\n"));
}
- NEXT_GLOBHASHTABLE_ENTRY;
+ NEXT_VALUEHASHTABLE_ENTRY;
}
#endif
diff --git a/lib/Backend/GlobOpt.cpp b/lib/Backend/GlobOpt.cpp
index 7e6bea97f02..578b009f907 100644
--- a/lib/Backend/GlobOpt.cpp
+++ b/lib/Backend/GlobOpt.cpp
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft Corporation and contributors. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#include "Backend.h"
@@ -87,6 +88,7 @@ GlobOpt::GlobOpt(Func * func)
updateInductionVariableValueNumber(false),
isPerformingLoopBackEdgeCompensation(false),
currentRegion(nullptr),
+ auxSlotPtrSyms(nullptr),
changedSymsAfterIncBailoutCandidate(nullptr),
doTypeSpec(
!IsTypeSpecPhaseOff(func)),
@@ -164,7 +166,13 @@ void
GlobOpt::Optimize()
{
this->objectTypeSyms = nullptr;
- this->func->argInsCount = this->func->GetInParamsCount() - 1; //Don't include "this" pointer in the count.
+
+ this->func->argInsCount = this->func->GetInParamsCount();
+ if (!func->GetJITFunctionBody()->IsAsmJsMode())
+ {
+ // Don't include "this" pointer in the count when not in AsmJs mode (AsmJS does not have "this").
+ this->func->argInsCount--;
+ }
if (!func->DoGlobOpt())
{
@@ -174,7 +182,7 @@ GlobOpt::Optimize()
// Still need to run the dead store phase to calculate the live reg on back edge
this->BackwardPass(Js::DeadStorePhase);
- CannotAllocateArgumentsObjectOnStack();
+ CannotAllocateArgumentsObjectOnStack(nullptr);
return;
}
@@ -353,6 +361,8 @@ GlobOpt::ForwardPass()
// changedSymsAfterIncBailoutCandidate helps track building incremental bailout in ForwardPass
this->changedSymsAfterIncBailoutCandidate = JitAnew(alloc, BVSparse, alloc);
+ this->auxSlotPtrSyms = JitAnew(alloc, BVSparse, alloc);
+
#if DBG
this->byteCodeUsesBeforeOpt = JitAnew(this->alloc, BVSparse, this->alloc);
if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase) && this->DoFunctionFieldCopyProp())
@@ -434,6 +444,7 @@ GlobOpt::ForwardPass()
// this->alloc will be freed right after return, no need to free it here
this->changedSymsAfterIncBailoutCandidate = nullptr;
+ this->auxSlotPtrSyms = nullptr;
END_CODEGEN_PHASE(this->func, Js::ForwardPhase);
}
@@ -457,7 +468,7 @@ GlobOpt::OptBlock(BasicBlock *block)
{
loop->fieldPRESymStores->Or(loop->parent->fieldPRESymStores);
}
-
+
if (!this->IsLoopPrePass() && DoFieldPRE(loop))
{
// Note: !IsLoopPrePass means this was a root loop pre-pass. FieldPre() is called once per loop.
@@ -486,7 +497,7 @@ GlobOpt::OptBlock(BasicBlock *block)
{
this->KillAllFields(CurrentBlockData()->liveFields);
}
-
+
this->tempAlloc->Reset();
if(loop && block->isLoopHeader)
@@ -828,14 +839,19 @@ GlobOpt::TryTailDup(IR::BranchInstr *tailBranch)
}
void
-GlobOpt::ToVar(BVSparse *bv, BasicBlock *block)
+GlobOpt::ToVar(BVSparse *bv, BasicBlock *block, IR::Instr* insertBeforeInstr /* = nullptr */)
{
FOREACH_BITSET_IN_SPARSEBV(id, bv)
{
StackSym *stackSym = this->func->m_symTable->FindStackSym(id);
IR::RegOpnd *newOpnd = IR::RegOpnd::New(stackSym, TyVar, this->func);
- IR::Instr *lastInstr = block->GetLastInstr();
- if (lastInstr->IsBranchInstr() || lastInstr->m_opcode == Js::OpCode::BailTarget)
+ IR::Instr* lastInstr = block->GetLastInstr();
+
+ if (insertBeforeInstr != nullptr)
+ {
+ this->ToVar(insertBeforeInstr, newOpnd, block, nullptr, false);
+ }
+ else if (lastInstr->IsBranchInstr() || lastInstr->m_opcode == Js::OpCode::BailTarget)
{
// If branch is using this symbol, hoist the operand as the ToVar load will get
// inserted right before the branch.
@@ -891,7 +907,7 @@ GlobOpt::ToTypeSpec(BVSparse *bv, BasicBlock *block, IRType t
// instruction itself should disable arguments object optimization.
if(block->globOptData.argObjSyms && block->globOptData.IsArgumentsSymID(id))
{
- CannotAllocateArgumentsObjectOnStack();
+ CannotAllocateArgumentsObjectOnStack(nullptr);
}
if (block->globOptData.liveVarSyms->Test(id))
@@ -972,7 +988,7 @@ BOOL GlobOpt::PRE::PreloadPRECandidate(Loop *loop, GlobHashBucket* candidate)
// We'll have to add a def instruction for the object sym in the landing pad, and then we can continue
// pre-loading the current PRE candidate.
// Case in point:
- // $L1
+ // $L1
// value|symStore
// t1 = o.x (v1|t3)
// t2 = t1.y (v2|t4) <-- t1 is not live in the loop landing pad
@@ -1032,15 +1048,15 @@ BOOL GlobOpt::PRE::PreloadPRECandidate(Loop *loop, GlobHashBucket* candidate)
ldInstr->SetDst(IR::RegOpnd::New(symStore->AsStackSym(), TyVar, this->globOpt->func));
loop->fieldPRESymStores->Set(symStore->m_id);
landingPad->globOptData.liveVarSyms->Set(symStore->m_id);
-
+
Value * objPtrValue = landingPad->globOptData.FindValue(objPtrSym);
objPtrCopyPropSym = objPtrCopyPropSym ? objPtrCopyPropSym : objPtrValue ? landingPad->globOptData.GetCopyPropSym(objPtrSym, objPtrValue) : nullptr;
if (objPtrCopyPropSym)
{
- // If we inserted T4 = T1.y, and T3 is the copy prop sym for T1 in the landing pad, we need T3.y
- // to be live on back edges to have the merge produce a value for T3.y. Having a value for T1.y
- // produced from the merge is not enough as the T1.y in the loop will get obj-ptr-copy-propped to
+ // If we inserted T4 = T1.y, and T3 is the copy prop sym for T1 in the landing pad, we need T3.y
+ // to be live on back edges to have the merge produce a value for T3.y. Having a value for T1.y
+ // produced from the merge is not enough as the T1.y in the loop will get obj-ptr-copy-propped to
// T3.y
// T3.y
@@ -1521,7 +1537,7 @@ GlobOpt::OptArguments(IR::Instr *instr)
if (instr->m_func->GetJITFunctionBody()->GetInParamsCount() != 1 && !instr->m_func->IsStackArgsEnabled())
{
- CannotAllocateArgumentsObjectOnStack();
+ CannotAllocateArgumentsObjectOnStack(instr->m_func);
}
else
{
@@ -1536,7 +1552,18 @@ GlobOpt::OptArguments(IR::Instr *instr)
// In the debug mode, we don't want to optimize away the aliases. Since we may have to show them on the inspection.
if (((!AreFromSameBytecodeFunc(src1->AsRegOpnd(), dst->AsRegOpnd()) || this->currentBlock->loop) && instr->m_opcode != Js::OpCode::BytecodeArgOutCapture) || this->func->IsJitInDebugMode())
{
- CannotAllocateArgumentsObjectOnStack();
+ CannotAllocateArgumentsObjectOnStack(instr->m_func);
+ return;
+ }
+
+ // Disable stack args if we are aliasing arguments inside try block to a writethrough symbol.
+ // We don't have precise tracking of these symbols, so bailout couldn't know if it needs to restore arguments object or not after exception
+ Region* tryRegion = this->currentRegion ? this->currentRegion->GetSelfOrFirstTryAncestor() : nullptr;
+ if (tryRegion && tryRegion->GetType() == RegionTypeTry &&
+ tryRegion->writeThroughSymbolsSet &&
+ tryRegion->writeThroughSymbolsSet->Test(dst->AsRegOpnd()->m_sym->m_id))
+ {
+ CannotAllocateArgumentsObjectOnStack(instr->m_func);
return;
}
if(!dst->AsRegOpnd()->GetStackSym()->m_nonEscapingArgObjAlias)
@@ -1570,7 +1597,7 @@ GlobOpt::OptArguments(IR::Instr *instr)
if (indexOpnd && CurrentBlockData()->IsArgumentsSymID(indexOpnd->m_sym->m_id))
{
// Pathological test cases such as a[arguments]
- CannotAllocateArgumentsObjectOnStack();
+ CannotAllocateArgumentsObjectOnStack(instr->m_func);
return;
}
@@ -1597,6 +1624,7 @@ GlobOpt::OptArguments(IR::Instr *instr)
if (CurrentBlockData()->IsArgumentsOpnd(src1))
{
instr->usesStackArgumentsObject = true;
+ instr->m_func->unoptimizableArgumentsObjReference++;
}
if (CurrentBlockData()->IsArgumentsOpnd(src1) &&
@@ -1616,6 +1644,7 @@ GlobOpt::OptArguments(IR::Instr *instr)
if (builtinFunction == Js::BuiltinFunction::JavascriptFunction_Apply)
{
CurrentBlockData()->ClearArgumentsSym(src1->AsRegOpnd());
+ instr->m_func->unoptimizableArgumentsObjReference--;
}
}
else if (builtinOpnd->IsRegOpnd())
@@ -1623,6 +1652,7 @@ GlobOpt::OptArguments(IR::Instr *instr)
if (builtinOpnd->AsRegOpnd()->m_sym->m_builtInIndex == Js::BuiltinFunction::JavascriptFunction_Apply)
{
CurrentBlockData()->ClearArgumentsSym(src1->AsRegOpnd());
+ instr->m_func->unoptimizableArgumentsObjReference--;
}
}
}
@@ -1659,7 +1689,7 @@ GlobOpt::OptArguments(IR::Instr *instr)
WritePerfHint(PerfHints::HeapArgumentsCreated, instr->m_func, instr->GetByteCodeOffset());
}
#endif
- CannotAllocateArgumentsObjectOnStack();
+ CannotAllocateArgumentsObjectOnStack(instr->m_func);
return;
}
}
@@ -1677,7 +1707,7 @@ GlobOpt::OptArguments(IR::Instr *instr)
WritePerfHint(PerfHints::HeapArgumentsCreated, instr->m_func, instr->GetByteCodeOffset());
}
#endif
- CannotAllocateArgumentsObjectOnStack();
+ CannotAllocateArgumentsObjectOnStack(instr->m_func);
return;
}
}
@@ -1696,7 +1726,7 @@ GlobOpt::OptArguments(IR::Instr *instr)
WritePerfHint(PerfHints::HeapArgumentsModification, instr->m_func, instr->GetByteCodeOffset());
}
#endif
- CannotAllocateArgumentsObjectOnStack();
+ CannotAllocateArgumentsObjectOnStack(instr->m_func);
return;
}
}
@@ -1710,7 +1740,7 @@ GlobOpt::OptArguments(IR::Instr *instr)
WritePerfHint(PerfHints::HeapArgumentsModification, instr->m_func, instr->GetByteCodeOffset());
}
#endif
- CannotAllocateArgumentsObjectOnStack();
+ CannotAllocateArgumentsObjectOnStack(instr->m_func);
return;
}
CurrentBlockData()->ClearArgumentsSym(dst->AsRegOpnd());
@@ -2241,10 +2271,20 @@ GlobOpt::CollectMemOpInfo(IR::Instr *instrBegin, IR::Instr *instr, Value *src1Va
if (!loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID))
{
loop->memOpInfo->inductionVariableChangeInfoMap->Add(inductionSymID, inductionVariableChangeInfo);
+ if (sym->m_id != inductionSymID)
+ {
+ // Backwards pass uses this bit-vector to lookup upwardExposedUsed/bytecodeUpwardExposedUsed symbols, which are not necessarily vars. Just add both.
+ loop->memOpInfo->inductionVariableChangeInfoMap->Add(sym->m_id, inductionVariableChangeInfo);
+ }
}
else
{
loop->memOpInfo->inductionVariableChangeInfoMap->Item(inductionSymID, inductionVariableChangeInfo);
+ if (sym->m_id != inductionSymID)
+ {
+ // Backwards pass uses this bit-vector to lookup upwardExposedUsed/bytecodeUpwardExposedUsed symbols, which are not necessarily vars. Just add both.
+ loop->memOpInfo->inductionVariableChangeInfoMap->Item(sym->m_id, inductionVariableChangeInfo);
+ }
}
}
else
@@ -2253,12 +2293,17 @@ GlobOpt::CollectMemOpInfo(IR::Instr *instrBegin, IR::Instr *instr, Value *src1Va
{
Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 1, isIncr };
loop->memOpInfo->inductionVariableChangeInfoMap->Add(inductionSymID, inductionVariableChangeInfo);
+ if (sym->m_id != inductionSymID)
+ {
+ // Backwards pass uses this bit-vector to lookup upwardExposedUsed/bytecodeUpwardExposedUsed symbols, which are not necessarily vars. Just add both.
+ loop->memOpInfo->inductionVariableChangeInfoMap->Add(sym->m_id, inductionVariableChangeInfo);
+ }
}
else
{
Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
inductionVariableChangeInfo = loop->memOpInfo->inductionVariableChangeInfoMap->Lookup(inductionSymID, inductionVariableChangeInfo);
-
+
// If inductionVariableChangeInfo.unroll has been invalidated, do
// not modify the Js::Constants::InvalidLoopUnrollFactor value
if (inductionVariableChangeInfo.unroll != Js::Constants::InvalidLoopUnrollFactor)
@@ -2267,6 +2312,11 @@ GlobOpt::CollectMemOpInfo(IR::Instr *instrBegin, IR::Instr *instr, Value *src1Va
}
inductionVariableChangeInfo.isIncremental = isIncr;
loop->memOpInfo->inductionVariableChangeInfoMap->Item(inductionSymID, inductionVariableChangeInfo);
+ if (sym->m_id != inductionSymID)
+ {
+ // Backwards pass uses this bit-vector to lookup upwardExposedUsed/bytecodeUpwardExposedUsed symbols, which are not necessarily vars. Just add both.
+ loop->memOpInfo->inductionVariableChangeInfoMap->Item(sym->m_id, inductionVariableChangeInfo);
+ }
}
}
break;
@@ -2315,7 +2365,10 @@ GlobOpt::CollectMemOpInfo(IR::Instr *instrBegin, IR::Instr *instr, Value *src1Va
// Line #2: s3(s1) = Ld_A s4(s2)
// do not consider line #2 as a violating instr
(instr->m_opcode == Js::OpCode::Ld_I4 &&
- prevInstr && (prevInstr->m_opcode == Js::OpCode::Add_I4 || prevInstr->m_opcode == Js::OpCode::Sub_I4) &&
+ // note Ld_A is for the case where the add was 0
+ prevInstr && (prevInstr->m_opcode == Js::OpCode::Add_I4 ||
+ prevInstr->m_opcode == Js::OpCode::Sub_I4 ||
+ prevInstr->m_opcode == Js::OpCode::Ld_A ) &&
instr->GetSrc1()->IsRegOpnd() &&
instr->GetDst()->IsRegOpnd() &&
prevInstr->GetDst()->IsRegOpnd() &&
@@ -2436,15 +2489,15 @@ GlobOpt::OptInstr(IR::Instr *&instr, bool* isInstrRemoved)
return instrNext;
}
- if (!instr->IsRealInstr() || instr->IsByteCodeUsesInstr() || instr->m_opcode == Js::OpCode::Conv_Bool)
+ if (instr->m_opcode == Js::OpCode::Yield)
{
- return instrNext;
+ // TODO[generators][ianhall]: Can this and the FillBailOutInfo call below be moved to after Src1 and Src2 so that Yield can be optimized right up to the actual yield?
+ this->ProcessKills(instr);
}
- if (instr->m_opcode == Js::OpCode::Yield)
+ if (!instr->IsRealInstr() || instr->IsByteCodeUsesInstr() || instr->m_opcode == Js::OpCode::Conv_Bool)
{
- // TODO[generators][ianhall]: Can this and the FillBailOutInfo call below be moved to after Src1 and Src2 so that Yield can be optimized right up to the actual yield?
- CurrentBlockData()->KillStateForGeneratorYield();
+ return instrNext;
}
if (!IsLoopPrePass())
@@ -2467,7 +2520,7 @@ GlobOpt::OptInstr(IR::Instr *&instr, bool* isInstrRemoved)
//StackArguments Optimization - We bail out if the index is out of range of actuals.
if ((instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::TypeofElem) &&
- instr->DoStackArgsOpt(this->func) && !this->IsLoopPrePass())
+ instr->DoStackArgsOpt() && !this->IsLoopPrePass())
{
GenerateBailAtOperation(&instr, IR::BailOnStackArgsOutOfActualsRange);
}
@@ -2509,6 +2562,7 @@ GlobOpt::OptInstr(IR::Instr *&instr, bool* isInstrRemoved)
OptimizeChecks(instr);
OptArraySrc(&instr, &src1Val, &src2Val);
OptNewScObject(&instr, src1Val);
+ OptStackArgLenAndConst(instr, &src1Val);
instr = this->OptPeep(instr, src1Val, src2Val);
@@ -2736,6 +2790,11 @@ GlobOpt::OptInstr(IR::Instr *&instr, bool* isInstrRemoved)
}
}
+ if (this->IsLazyBailOutCurrentlyNeeded(instr, src1Val, src2Val, isHoisted))
+ {
+ this->GenerateLazyBailOut(instr);
+ }
+
if (CurrentBlockData()->capturedValuesCandidate && !this->IsLoopPrePass())
{
this->CommitCapturedValuesCandidate();
@@ -2754,7 +2813,7 @@ GlobOpt::OptInstr(IR::Instr *&instr, bool* isInstrRemoved)
}
bool
-GlobOpt::IsNonNumericRegOpnd(IR::RegOpnd *opnd, bool inGlobOpt, bool *isSafeToTransferInPrepass /*=nullptr*/) const
+GlobOpt::IsNonNumericRegOpnd(IR::RegOpnd* opnd, bool inGlobOpt, bool* isSafeToTransferInPrepass /*=nullptr*/) const
{
if (opnd == nullptr)
{
@@ -3047,13 +3106,11 @@ GlobOpt::OptDst(
else if (dstVal)
{
opnd->SetValueType(dstVal->GetValueInfo()->Type());
-
- if(currentBlock->loop &&
+ if (currentBlock->loop &&
!IsLoopPrePass() &&
(instr->m_opcode == Js::OpCode::Ld_A || instr->m_opcode == Js::OpCode::Ld_I4) &&
instr->GetSrc1()->IsRegOpnd() &&
- !func->IsJitInDebugMode() &&
- func->DoGlobOptsForGeneratorFunc())
+ !func->IsJitInDebugMode())
{
// Look for the following patterns:
//
@@ -3176,7 +3233,7 @@ GlobOpt::SetLoopFieldInitialValue(Loop *loop, IR::Instr *instr, PropertySym *pro
Value *landingPadObjPtrVal, *currentObjPtrVal;
landingPadObjPtrVal = loop->landingPad->globOptData.FindValue(objectSym);
currentObjPtrVal = CurrentBlockData()->FindValue(objectSym);
-
+
auto CanSetInitialValue = [&]() -> bool {
if (!currentObjPtrVal)
{
@@ -3385,7 +3442,7 @@ GlobOpt::OptSrc(IR::Opnd *opnd, IR::Instr * *pInstr, Value **indirIndexValRef, I
opnd->AsSymOpnd()->SetPropertyOwnerValueType(
objectValue ? objectValue->GetValueInfo()->Type() : ValueType::Uninitialized);
-
+
sym = this->CopyPropPropertySymObj(opnd->AsSymOpnd(), instr);
if (!DoFieldCopyProp())
@@ -3407,7 +3464,9 @@ GlobOpt::OptSrc(IR::Opnd *opnd, IR::Instr * *pInstr, Value **indirIndexValRef, I
case Js::OpCode::ScopedDeleteFldStrict:
case Js::OpCode::LdMethodFromFlags:
case Js::OpCode::BrOnNoProperty:
+ case Js::OpCode::BrOnNoLocalProperty:
case Js::OpCode::BrOnHasProperty:
+ case Js::OpCode::BrOnHasLocalProperty:
case Js::OpCode::LdMethodFldPolyInlineMiss:
case Js::OpCode::StSlotChkUndecl:
case Js::OpCode::ScopedLdInst:
@@ -3435,7 +3494,7 @@ GlobOpt::OptSrc(IR::Opnd *opnd, IR::Instr * *pInstr, Value **indirIndexValRef, I
}
}
}
- break;
+ break;
}
case IR::OpndKindReg:
// Clear the opnd's value type up-front, so that this code cannot accidentally use the value type set from a previous
@@ -3571,7 +3630,7 @@ GlobOpt::OptSrc(IR::Opnd *opnd, IR::Instr * *pInstr, Value **indirIndexValRef, I
if (profiledArrayType.IsLikelyObject())
{
// Ideally we want to use the most specialized type seen by this path, but when that causes bailouts use the least specialized type instead.
- if (useAggressiveSpecialization &&
+ if (useAggressiveSpecialization &&
profiledArrayType.GetObjectType() == valueType.GetObjectType() &&
!valueType.IsLikelyNativeIntArray() &&
(
@@ -3582,7 +3641,7 @@ GlobOpt::OptSrc(IR::Opnd *opnd, IR::Instr * *pInstr, Value **indirIndexValRef, I
valueType = profiledArrayType.SetHasNoMissingValues(valueType.HasNoMissingValues());
ChangeValueType(this->currentBlock, CurrentBlockData()->FindValue(opnd->AsRegOpnd()->m_sym), valueType, false);
}
- else if (!useAggressiveSpecialization &&
+ else if (!useAggressiveSpecialization &&
(profiledArrayType.GetObjectType() != valueType.GetObjectType() ||
(
valueType.IsLikelyNativeArray() &&
@@ -3671,15 +3730,6 @@ GlobOpt::CopyProp(IR::Opnd *opnd, IR::Instr *instr, Value *val, IR::IndirOpnd *p
return opnd;
}
- if (!this->func->DoGlobOptsForGeneratorFunc())
- {
- // Don't copy prop in generator functions because non-bytecode temps that span a yield
- // cannot be saved and restored by the current bail-out mechanics utilized by generator
- // yield/resume.
- // TODO[generators][ianhall]: Enable copy-prop at least for in between yields.
- return opnd;
- }
-
if (instr->m_opcode == Js::OpCode::CheckFixedFld || instr->m_opcode == Js::OpCode::CheckPropertyGuardAndLoadType)
{
// Don't copy prop into CheckFixedFld or CheckPropertyGuardAndLoadType
@@ -4718,6 +4768,14 @@ GlobOpt::ValueNumberDst(IR::Instr **pInstr, Value *src1Val, Value *src2Val)
case Js::OpCode::Coerce_Str:
AssertMsg(instr->GetDst()->GetValueType().IsString(),
"Creator of this instruction should have set the type");
+
+ // Due to fall through and the fact that Ld_A only takes one source,
+ // free the other source here.
+ if (instr->GetSrc2() && !(this->IsLoopPrePass() || src1ValueInfo == nullptr || !src1ValueInfo->IsString()))
+ {
+ instr->FreeSrc2();
+ }
+
// fall-through
case Js::OpCode::Coerce_StrOrRegex:
// We don't set the ValueType of src1 for Coerce_StrOrRegex, hence skip the ASSERT
@@ -5217,6 +5275,18 @@ GlobOpt::ValueNumberDst(IR::Instr **pInstr, Value *src1Val, Value *src2Val)
case Js::OpCode::IsInst:
case Js::OpCode::LdTrue:
case Js::OpCode::LdFalse:
+ case Js::OpCode::CmEq_A:
+ case Js::OpCode::CmSrEq_A:
+ case Js::OpCode::CmNeq_A:
+ case Js::OpCode::CmSrNeq_A:
+ case Js::OpCode::CmLe_A:
+ case Js::OpCode::CmUnLe_A:
+ case Js::OpCode::CmLt_A:
+ case Js::OpCode::CmUnLt_A:
+ case Js::OpCode::CmGe_A:
+ case Js::OpCode::CmUnGe_A:
+ case Js::OpCode::CmGt_A:
+ case Js::OpCode::CmUnGt_A:
return this->NewGenericValue(ValueType::Boolean, dst);
case Js::OpCode::LdUndef:
@@ -5292,7 +5362,7 @@ GlobOpt::ValueNumberLdElemDst(IR::Instr **pInstr, Value *srcVal)
IR::IndirOpnd *src = instr->GetSrc1()->AsIndirOpnd();
const ValueType baseValueType(src->GetBaseOpnd()->GetValueType());
- if (instr->DoStackArgsOpt(this->func) ||
+ if (instr->DoStackArgsOpt() ||
!(
baseValueType.IsLikelyOptimizedTypedArray() ||
(baseValueType.IsLikelyNativeArray() && instr->IsProfiledInstr()) // Specialized native array lowering for LdElem requires that it is profiled.
@@ -5316,7 +5386,7 @@ GlobOpt::ValueNumberLdElemDst(IR::Instr **pInstr, Value *srcVal)
this->func->GetDebugNumberSet(debugStringBuffer),
Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
baseValueTypeStr,
- instr->DoStackArgsOpt(this->func) ? _u("instruction uses the arguments object") :
+ instr->DoStackArgsOpt() ? _u("instruction uses the arguments object") :
baseValueType.IsLikelyOptimizedTypedArray() ? _u("index is negative or likely not int") : _u("of array type"));
Output::Flush();
}
@@ -5671,7 +5741,7 @@ GlobOpt::SafeToCopyPropInPrepass(StackSym * const originalSym, StackSym * const
Assert(this->currentBlock->globOptData.GetCopyPropSym(originalSym, value) == copySym);
// In the following example, to copy-prop s2 into s1, it is not enough to check if s1 and s2 are safe to transfer.
- // In fact, both s1 and s2 are safe to transfer, but it is not legal to copy prop s2 into s1.
+ // In fact, both s1 and s2 are safe to transfer, but it is not legal to copy prop s2 into s1.
//
// s1 = s2
// $Loop:
@@ -5682,7 +5752,7 @@ GlobOpt::SafeToCopyPropInPrepass(StackSym * const originalSym, StackSym * const
// In general, requirements for copy-propping in prepass are more restricted than those for transferring values.
// For copy prop in prepass, if the original sym is live on back-edge, then the copy-prop sym should not be written to
// in the loop (or its parents)
-
+
ValueInfo* const valueInfo = value->GetValueInfo();
return IsSafeToTransferInPrepass(originalSym, valueInfo) &&
IsSafeToTransferInPrepass(copySym, valueInfo) &&
@@ -5869,7 +5939,7 @@ GlobOpt::ValueNumberTransferDstInPrepass(IR::Instr *const instr, Value *const sr
// for aggressive int type spec.
bool isSafeToTransferInPrepass = false;
isValueInfoPrecise = IsPrepassSrcValueInfoPrecise(instr, src1Val, nullptr, &isSafeToTransferInPrepass);
-
+
const ValueType valueType(GetPrepassValueTypeForDst(src1ValueInfo->Type(), instr, src1Val, nullptr, isValueInfoPrecise, isSafeToTransferInPrepass));
if(isValueInfoPrecise || isSafeToTransferInPrepass)
{
@@ -6587,6 +6657,12 @@ GlobOpt::GetConstantVar(IR::Opnd *opnd, Value *val)
return Js::TaggedInt::ToVarUnchecked(opnd->AsIntConstOpnd()->AsInt32());
}
}
+#if FLOATVAR
+ else if (opnd->IsFloatConstOpnd())
+ {
+ return Js::JavascriptNumber::ToVar(opnd->AsFloatConstOpnd()->m_value);
+ }
+#endif
else if (opnd->IsRegOpnd() && opnd->AsRegOpnd()->m_sym->IsSingleDef())
{
if (valueInfo->IsBoolean())
@@ -6608,19 +6684,110 @@ GlobOpt::GetConstantVar(IR::Opnd *opnd, Value *val)
{
return (Js::Var)this->func->GetScriptContextInfo()->GetNullAddr();
}
+#if FLOATVAR
+ else if (valueInfo->IsFloat())
+ {
+ IR::Instr * defInstr = opnd->AsRegOpnd()->m_sym->GetInstrDef();
+ if ((defInstr->m_opcode == Js::OpCode::LdC_F8_R8 || defInstr->m_opcode == Js::OpCode::LdC_A_R8) && defInstr->GetSrc1()->IsFloatConstOpnd())
+ {
+ return Js::JavascriptNumber::ToVar(defInstr->GetSrc1()->AsFloatConstOpnd()->m_value);
+ }
+ }
+#endif
}
return nullptr;
}
-bool BoolAndIntStaticAndTypeMismatch(Value* src1Val, Value* src2Val, Js::Var src1Var, Js::Var src2Var)
+namespace
{
- ValueInfo *src1ValInfo = src1Val->GetValueInfo();
- ValueInfo *src2ValInfo = src2Val->GetValueInfo();
- return (src1ValInfo->IsNumber() && src1Var && src2ValInfo->IsBoolean() && src1Var != Js::TaggedInt::ToVarUnchecked(0) && src1Var != Js::TaggedInt::ToVarUnchecked(1)) ||
- (src2ValInfo->IsNumber() && src2Var && src1ValInfo->IsBoolean() && src2Var != Js::TaggedInt::ToVarUnchecked(0) && src2Var != Js::TaggedInt::ToVarUnchecked(1));
-}
+ bool TryCompIntAndFloat(bool * result, Js::Var left, Js::Var right)
+ {
+ if (Js::TaggedInt::Is(left))
+ {
+ // If both are tagged ints we should not get here.
+ Assert(!Js::TaggedInt::Is(right));
+ if (Js::JavascriptNumber::Is_NoTaggedIntCheck(right))
+ {
+ double value = Js::JavascriptNumber::GetValue(right);
+ *result = (Js::TaggedInt::ToInt32(left) == value);
+ return true;
+ }
+ }
+ return false;
+ }
+
+ bool Op_JitEq(bool * result, Value * src1Val, Value * src2Val, Js::Var src1Var, Js::Var src2Var, Func * func, bool isStrict)
+ {
+ Assert(src1Val != nullptr && src2Val != nullptr);
+ Assert(src1Var != nullptr && src2Var != nullptr);
+
+ if (src1Var == src2Var)
+ {
+ if (Js::TaggedInt::Is(src1Var))
+ {
+ *result = true;
+ return true;
+ }
+ if (!isStrict && src1Val->GetValueInfo()->IsNotFloat())
+ {
+ // If the vars are equal and they are not NaN, non-strict equal returns true. Not float guarantees not NaN.
+ *result = true;
+ return true;
+ }
+
+#if FLOATVAR
+ if (Js::JavascriptNumber::Is_NoTaggedIntCheck(src1Var))
+ {
+ *result = !Js::JavascriptNumber::IsNan(Js::JavascriptNumber::GetValue(src1Var));
+ return true;
+ }
+#endif
+
+ if (src1Var == reinterpret_cast(func->GetScriptContextInfo()->GetTrueAddr()) ||
+ src1Var == reinterpret_cast(func->GetScriptContextInfo()->GetFalseAddr()) ||
+ src1Var == reinterpret_cast(func->GetScriptContextInfo()->GetNullAddr()) ||
+ src1Var == reinterpret_cast(func->GetScriptContextInfo()->GetUndefinedAddr()))
+ {
+ *result = true;
+ return true;
+ }
+
+ // Other var comparisons require the runtime to prove.
+ return false;
+ }
+
+#if FLOATVAR
+ if (TryCompIntAndFloat(result, src1Var, src2Var) || TryCompIntAndFloat(result, src2Var, src1Var))
+ {
+ return true;
+ }
+
+#endif
+
+ return false;
+ }
+
+ bool Op_JitNeq(bool * result, Value * src1Val, Value * src2Val, Js::Var src1Var, Js::Var src2Var, Func * func, bool isStrict)
+ {
+ if (Op_JitEq(result, src1Val, src2Val, src1Var, src2Var, func, isStrict))
+ {
+ *result = !*result;
+ return true;
+ }
+
+ return false;
+ }
+
+ bool BoolAndIntStaticAndTypeMismatch(Value* src1Val, Value* src2Val, Js::Var src1Var, Js::Var src2Var)
+ {
+ ValueInfo *src1ValInfo = src1Val->GetValueInfo();
+ ValueInfo *src2ValInfo = src2Val->GetValueInfo();
+ return (src1ValInfo->IsNumber() && src1Var && src2ValInfo->IsBoolean() && src1Var != Js::TaggedInt::ToVarUnchecked(0) && src1Var != Js::TaggedInt::ToVarUnchecked(1)) ||
+ (src2ValInfo->IsNumber() && src2Var && src1ValInfo->IsBoolean() && src2Var != Js::TaggedInt::ToVarUnchecked(0) && src2Var != Js::TaggedInt::ToVarUnchecked(1));
+ }
+}
bool
GlobOpt::CanProveConditionalBranch(IR::Instr *instr, Value *src1Val, Value *src2Val, Js::Var src1Var, Js::Var src2Var, bool *result)
@@ -6634,7 +6801,8 @@ GlobOpt::CanProveConditionalBranch(IR::Instr *instr, Value *src1Val, Value *src2
{
return undefinedCmp;
}
- return val1->GetValueInfo()->IsPrimitive() && val1->GetValueInfo()->IsNotFloat();
+ ValueInfo * valInfo = val1->GetValueInfo();
+ return !valInfo->HasBeenUndefined() && valInfo->IsPrimitive() && valInfo->IsNotFloat();
}
return false;
};
@@ -6748,12 +6916,10 @@ GlobOpt::CanProveConditionalBranch(IR::Instr *instr, Value *src1Val, Value *src2
}
else
{
- if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
+ if (!Op_JitEq(result, src1Val, src2Val, src1Var, src2Var, this->func, false /* isStrict */))
{
- // TODO: OOP JIT, const folding
return false;
}
- *result = Js::JavascriptOperators::Equal(src1Var, src2Var, this->func->GetScriptContext());
}
break;
case Js::OpCode::BrNeq_A:
@@ -6780,12 +6946,10 @@ GlobOpt::CanProveConditionalBranch(IR::Instr *instr, Value *src1Val, Value *src2
}
else
{
- if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
+ if (!Op_JitNeq(result, src1Val, src2Val, src1Var, src2Var, this->func, false /* isStrict */))
{
- // TODO: OOP JIT, const folding
return false;
}
- *result = Js::JavascriptOperators::NotEqual(src1Var, src2Var, this->func->GetScriptContext());
}
break;
case Js::OpCode::BrSrEq_A:
@@ -6821,12 +6985,10 @@ GlobOpt::CanProveConditionalBranch(IR::Instr *instr, Value *src1Val, Value *src2
}
else
{
- if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
+ if (!Op_JitEq(result, src1Val, src2Val, src1Var, src2Var, this->func, true /* isStrict */))
{
- // TODO: OOP JIT, const folding
return false;
}
- *result = Js::JavascriptOperators::StrictEqual(src1Var, src2Var, this->func->GetScriptContext());
}
break;
@@ -6863,12 +7025,10 @@ GlobOpt::CanProveConditionalBranch(IR::Instr *instr, Value *src1Val, Value *src2
}
else
{
- if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
+ if (!Op_JitNeq(result, src1Val, src2Val, src1Var, src2Var, this->func, true /* isStrict */))
{
- // TODO: OOP JIT, const folding
return false;
}
- *result = Js::JavascriptOperators::NotStrictEqual(src1Var, src2Var, this->func->GetScriptContext());
}
break;
@@ -6888,16 +7048,36 @@ GlobOpt::CanProveConditionalBranch(IR::Instr *instr, Value *src1Val, Value *src2
break;
}
- if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
+ if (!src1Var)
{
- // TODO: OOP JIT, const folding
return false;
}
- if (!src1Var)
+
+ // Set *result = (evaluates true) and negate it later for BrFalse
+ if (src1Var == reinterpret_cast(this->func->GetScriptContextInfo()->GetTrueAddr()))
+ {
+ *result = true;
+ }
+ else if (src1Var == reinterpret_cast(this->func->GetScriptContextInfo()->GetFalseAddr()))
+ {
+ *result = false;
+ }
+ else if (Js::TaggedInt::Is(src1Var))
+ {
+ *result = (src1Var != reinterpret_cast(Js::AtomTag_IntPtr));
+ }
+#if FLOATVAR
+ else if (Js::JavascriptNumber::Is_NoTaggedIntCheck(src1Var))
+ {
+ double value = Js::JavascriptNumber::GetValue(src1Var);
+ *result = (!Js::JavascriptNumber::IsNan(value)) && (!Js::JavascriptNumber::IsZero(value));
+ }
+#endif
+ else
{
return false;
}
- *result = Js::JavascriptConversion::ToBoolean(src1Var, this->func->GetScriptContext());
+
if (instr->m_opcode == Js::OpCode::BrFalse_A)
{
*result = !(*result);
@@ -6922,7 +7102,20 @@ GlobOpt::CanProveConditionalBranch(IR::Instr *instr, Value *src1Val, Value *src2
{
return false;
}
- *result = !src1ValueInfo->IsPrimitive();
+
+ if (src1ValueInfo->IsPrimitive())
+ {
+ *result = false;
+ }
+ else
+ {
+ if (src1ValueInfo->HasBeenPrimitive())
+ {
+ return false;
+ }
+ *result = true;
+ }
+
break;
}
default:
@@ -9346,7 +9539,7 @@ GlobOpt::TypeSpecializeBinary(IR::Instr **pInstr, Value **pSrc1Val, Value **pSrc
bool isConservativeMulInt = !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec();
// Be conservative about predicting Mul overflow in prepass.
- // Operands that are live on back edge may be denied lossless-conversion to int32 and
+ // Operands that are live on back edge may be denied lossless-conversion to int32 and
// trigger rejit with AggressiveIntTypeSpec off.
// Besides multiplying a variable in a loop can overflow in just a few iterations even in simple cases like v *= 2
// So, make sure we definitely know the source max/min values, otherwise assume the full range.
@@ -10582,6 +10775,7 @@ GlobOpt::TypeSpecializeFloatBinary(IR::Instr *instr, Value *src1Val, Value *src2
bool skipSrc1 = false;
bool skipSrc2 = false;
bool skipDst = false;
+ bool convertDstToBool = false;
if (!this->DoFloatTypeSpec())
{
@@ -10653,6 +10847,36 @@ GlobOpt::TypeSpecializeFloatBinary(IR::Instr *instr, Value *src1Val, Value *src2
skipDst = true;
break;
+ case Js::OpCode::CmEq_A:
+ case Js::OpCode::CmSrEq_A:
+ case Js::OpCode::CmNeq_A:
+ case Js::OpCode::CmSrNeq_A:
+ {
+ if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
+ {
+ return false;
+ }
+
+ allowUndefinedOrNullSrc1 = false;
+ allowUndefinedOrNullSrc2 = false;
+ convertDstToBool = true;
+ break;
+ }
+
+ case Js::OpCode::CmLe_A:
+ case Js::OpCode::CmLt_A:
+ case Js::OpCode::CmGe_A:
+ case Js::OpCode::CmGt_A:
+ {
+ if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
+ {
+ return false;
+ }
+
+ convertDstToBool = true;
+ break;
+ }
+
default:
return false;
}
@@ -10698,13 +10922,19 @@ GlobOpt::TypeSpecializeFloatBinary(IR::Instr *instr, Value *src1Val, Value *src2
if (!skipDst)
{
dst = instr->GetDst();
-
if (dst)
{
- *pDstVal = CreateDstUntransferredValue(ValueType::Float, instr, src1Val, src2Val);
-
- AssertMsg(dst->IsRegOpnd(), "What else?");
- this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
+ if (convertDstToBool)
+ {
+ *pDstVal = CreateDstUntransferredValue(ValueType::Boolean, instr, src1Val, src2Val);
+ ToVarRegOpnd(dst->AsRegOpnd(), currentBlock);
+ }
+ else
+ {
+ *pDstVal = CreateDstUntransferredValue(ValueType::Float, instr, src1Val, src2Val);
+ AssertMsg(dst->IsRegOpnd(), "What else?");
+ this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
+ }
}
}
@@ -10728,7 +10958,7 @@ GlobOpt::TypeSpecializeStElem(IR::Instr ** pInstr, Value *src1Val, Value **pDstV
IR::RegOpnd *baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
ValueType baseValueType(baseOpnd->GetValueType());
- if (instr->DoStackArgsOpt(this->func) ||
+ if (instr->DoStackArgsOpt() ||
(!this->DoTypedArrayTypeSpec() && baseValueType.IsLikelyOptimizedTypedArray()) ||
(!this->DoNativeArrayTypeSpec() && baseValueType.IsLikelyNativeArray()) ||
!(baseValueType.IsLikelyOptimizedTypedArray() || baseValueType.IsLikelyNativeArray()))
@@ -10744,7 +10974,7 @@ GlobOpt::TypeSpecializeStElem(IR::Instr ** pInstr, Value *src1Val, Value **pDstV
this->func->GetDebugNumberSet(debugStringBuffer),
Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
baseValueTypeStr,
- instr->DoStackArgsOpt(this->func) ?
+ instr->DoStackArgsOpt() ?
_u("instruction uses the arguments object") :
_u("typed array type specialization is disabled, or base is not an optimized typed array"));
Output::Flush();
@@ -11069,7 +11299,7 @@ GlobOpt::ToVarUses(IR::Instr *instr, IR::Opnd *opnd, bool isDst, Value *val)
return instr;
}
-IR::Instr *
+IR::Instr *
GlobOpt::ToTypeSpecIndex(IR::Instr * instr, IR::RegOpnd * indexOpnd, IR::IndirOpnd * indirOpnd)
{
Assert(indirOpnd != nullptr || indexOpnd == instr->GetSrc1());
@@ -11941,7 +12171,17 @@ GlobOpt::ToTypeSpecUse(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Valu
const FloatConstType floatValue = valueInfo->AsFloatConstant()->FloatValue();
if(toType == TyInt32)
{
- Assert(lossy);
+ // In some loop scenarios, a sym can be specialized to int32 on loop entry
+ // during the prepass and then subsequentely specialized to float within
+ // the loop, leading to an attempted lossy conversion from float64 to int32
+ // on the backedge. For these cases, disable aggressive int type specialization
+ // and try again.
+ if (!lossy)
+ {
+ AssertOrFailFast(DoAggressiveIntTypeSpec());
+ throw Js::RejitException(RejitReason::AggressiveIntTypeSpecDisabled);
+ }
+
constOpnd =
IR::IntConstOpnd::New(
Js::JavascriptMath::ToInt32(floatValue),
@@ -13243,6 +13483,98 @@ GlobOpt::ProcessNoImplicitCallArrayUses(IR::RegOpnd * baseOpnd, IR::ArrayRegOpnd
}
}
+void
+GlobOpt::OptStackArgLenAndConst(IR::Instr* instr, Value** src1Val)
+{
+ if (!PHASE_OFF(Js::StackArgLenConstOptPhase, instr->m_func) && instr->m_func->IsStackArgsEnabled() && instr->usesStackArgumentsObject && instr->IsInlined())
+ {
+ IR::Opnd* src1 = instr->GetSrc1();
+ auto replaceInstr = [&](IR::Opnd* newopnd, Js::OpCode opcode)
+ {
+ if (PHASE_TESTTRACE(Js::StackArgLenConstOptPhase, instr->m_func))
+ {
+ Output::Print(_u("Inlined function %s have replaced opcode %s with opcode %s for stack arg optimization. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(),
+ Js::OpCodeUtil::GetOpCodeName(instr->m_opcode), Js::OpCodeUtil::GetOpCodeName(opcode));
+ Output::Flush();
+ }
+ this->CaptureByteCodeSymUses(instr);
+ instr->m_opcode = opcode;
+ instr->ReplaceSrc1(newopnd);
+ if (instr->HasBailOutInfo())
+ {
+ instr->ClearBailOutInfo();
+ }
+ if (instr->IsProfiledInstr())
+ {
+ Assert(opcode == Js::OpCode::Ld_A || opcode == Js::OpCode::Typeof);
+ instr->AsProfiledInstr()->u.FldInfo().valueType = ValueType::Uninitialized;
+ }
+ *src1Val = this->OptSrc(instr->GetSrc1(), &instr);
+ instr->m_func->hasArgLenAndConstOpt = true;
+ };
+ Assert(CurrentBlockData()->IsArgumentsOpnd(src1));
+ switch(instr->m_opcode)
+ {
+ case Js::OpCode::LdLen_A:
+ {
+ IR::AddrOpnd* newopnd = IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(instr->m_func->actualCount - 1), IR::AddrOpndKindConstantVar, instr->m_func);
+ replaceInstr(newopnd, Js::OpCode::Ld_A);
+ break;
+ }
+ case Js::OpCode::LdElemI_A:
+ case Js::OpCode::TypeofElem:
+ {
+ IR::IndirOpnd* indirOpndSrc1 = src1->AsIndirOpnd();
+ if (!indirOpndSrc1->GetIndexOpnd())
+ {
+ int argIndex = indirOpndSrc1->GetOffset() + 1;
+ IR::Instr* defInstr = nullptr;
+ if (argIndex > 0)
+ {
+ IR::Instr* inlineeStart = instr->m_func->GetInlineeStart();
+ inlineeStart->IterateArgInstrs([&](IR::Instr* argInstr) {
+ StackSym *argSym = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
+ if (argSym->GetArgSlotNum() - 1 == argIndex)
+ {
+ defInstr = argInstr;
+ return true;
+ }
+ return false;
+ });
+ }
+
+ Js::OpCode replacementOpcode;
+ if (instr->m_opcode == Js::OpCode::TypeofElem)
+ {
+ replacementOpcode = Js::OpCode::Typeof;
+ }
+ else
+ {
+ replacementOpcode = Js::OpCode::Ld_A;
+ }
+
+ // If we cannot find the right instruction. I.E. When calling arguments[2] and no arguments were passed to the func
+ if (defInstr == nullptr)
+ {
+ IR::Opnd * undefined = IR::AddrOpnd::New(instr->m_func->GetScriptContextInfo()->GetUndefinedAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
+ undefined->SetValueType(ValueType::Undefined);
+ replaceInstr(undefined, replacementOpcode);
+ }
+ else
+ {
+ replaceInstr(defInstr->GetSrc1(), replacementOpcode);
+ }
+ }
+ else
+ {
+ instr->m_func->unoptimizableArgumentsObjReference++;
+ }
+ break;
+ }
+ }
+ }
+}
+
void
GlobOpt::CaptureNoImplicitCallUses(
IR::Opnd *opnd,
@@ -13419,6 +13751,7 @@ GlobOpt::CheckJsArrayKills(IR::Instr *const instr)
case Js::OpCode::StFld:
case Js::OpCode::StFldStrict:
case Js::OpCode::StSuperFld:
+ case Js::OpCode::StSuperFldStrict:
{
Assert(instr->GetDst());
@@ -13564,7 +13897,6 @@ GlobOpt::CheckJsArrayKills(IR::Instr *const instr)
case IR::HelperArray_Splice:
case IR::HelperArray_Unshift:
case IR::HelperArray_Concat:
- case IR::HelperArray_Slice:
kills.SetKillsArrayHeadSegments();
kills.SetKillsArrayHeadSegmentLengths();
break;
@@ -13618,23 +13950,16 @@ GlobOpt::CheckJsArrayKills(IR::Instr *const instr)
kills.SetKillsNativeArrays();
}
break;
- }
+ }
- case Js::OpCode::InitClass:
+ case Js::OpCode::NewClassProto:
Assert(instr->GetSrc1());
- if (instr->GetSrc2() == nullptr)
+ if (IR::AddrOpnd::IsEqualAddr(instr->GetSrc1(), (void*)func->GetScriptContextInfo()->GetObjectPrototypeAddr()))
{
- // No extends operand, so the InitClass will not make something into a prototype
+ // No extends operand, the proto parent is the Object prototype
break;
}
-
- if(doNativeArrayTypeSpec)
- {
- // Class/object construction can make something a prototype
- kills.SetKillsNativeArrays();
- }
- break;
-
+ // Fall through
case Js::OpCode::NewScObjectNoCtor:
case Js::OpCode::NewScObjectNoCtorFull:
if(doNativeArrayTypeSpec)
@@ -14287,10 +14612,6 @@ GlobOpt::PreLowerCanonicalize(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2V
case Js::OpCode::NewConcatStrMulti:
case Js::OpCode::NewConcatStrMultiBE:
case Js::OpCode::ExtendArg_A:
-#ifdef ENABLE_DOM_FAST_PATH
- case Js::OpCode::DOMFastPathGetter:
- case Js::OpCode::DOMFastPathSetter:
-#endif
case Js::OpCode::NewScopeSlots:
case Js::OpCode::NewScopeSlotsWithoutPropIds:
case Js::OpCode::NewStackScopeSlots:
@@ -14329,6 +14650,10 @@ GlobOpt::PreLowerCanonicalize(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2V
void
GlobOpt::ProcessKills(IR::Instr *instr)
{
+ if (instr->m_opcode == Js::OpCode::Yield)
+ {
+ this->CurrentBlockData()->KillStateForGeneratorYield(instr);
+ }
this->ProcessFieldKills(instr);
this->ProcessValueKills(instr);
this->ProcessArrayValueKills(instr);
@@ -15469,7 +15794,7 @@ GlobOpt::DoConstFold() const
bool
GlobOpt::IsTypeSpecPhaseOff(Func const *func)
{
- return PHASE_OFF(Js::TypeSpecPhase, func) || func->IsJitInDebugMode() || !func->DoGlobOptsForGeneratorFunc();
+ return PHASE_OFF(Js::TypeSpecPhase, func) || func->IsJitInDebugMode();
}
bool
@@ -15576,8 +15901,7 @@ GlobOpt::DoArrayCheckHoist(Func const * const func)
return
!PHASE_OFF(Js::ArrayCheckHoistPhase, func) &&
!func->IsArrayCheckHoistDisabled() &&
- !func->IsJitInDebugMode() && // StElemI fast path is not allowed when in debug mode, so it cannot have bailout
- func->DoGlobOptsForGeneratorFunc();
+ !func->IsJitInDebugMode(); // StElemI fast path is not allowed when in debug mode, so it cannot have bailout
}
bool
@@ -15589,7 +15913,7 @@ GlobOpt::DoArrayCheckHoist() const
bool
GlobOpt::DoArrayCheckHoist(const ValueType baseValueType, Loop* loop, IR::Instr const * const instr) const
{
- if(!DoArrayCheckHoist() || (instr && !IsLoopPrePass() && instr->DoStackArgsOpt(func)))
+ if(!DoArrayCheckHoist() || (instr && !IsLoopPrePass() && instr->DoStackArgsOpt()))
{
return false;
}
@@ -15723,7 +16047,7 @@ GlobOpt::DoLdLenIntSpec(IR::Instr * const instr, const ValueType baseValueType)
if(PHASE_OFF(Js::LdLenIntSpecPhase, func) ||
IsTypeSpecPhaseOff(func) ||
(func->HasProfileInfo() && func->GetReadOnlyProfileInfo()->IsLdLenIntSpecDisabled()) ||
- (instr && !IsLoopPrePass() && instr->DoStackArgsOpt(func)))
+ (instr && !IsLoopPrePass() && instr->DoStackArgsOpt()))
{
return false;
}
@@ -15791,7 +16115,7 @@ GlobOpt::TrackArgumentsObject()
{
if (PHASE_OFF(Js::StackArgOptPhase, this->func))
{
- this->CannotAllocateArgumentsObjectOnStack();
+ this->CannotAllocateArgumentsObjectOnStack(nullptr);
return false;
}
@@ -15799,8 +16123,15 @@ GlobOpt::TrackArgumentsObject()
}
void
-GlobOpt::CannotAllocateArgumentsObjectOnStack()
+GlobOpt::CannotAllocateArgumentsObjectOnStack(Func * curFunc)
{
+ if (curFunc != nullptr && curFunc->hasArgLenAndConstOpt)
+ {
+ Assert(!curFunc->GetJITOutput()->GetOutputData()->disableStackArgOpt);
+ curFunc->GetJITOutput()->GetOutputData()->disableStackArgOpt = true;
+ throw Js::RejitException(RejitReason::DisableStackArgLenAndConstOpt);
+ }
+
func->SetHasStackArgs(false);
#ifdef ENABLE_DEBUG_CONFIG_OPTIONS
@@ -16778,13 +17109,12 @@ GlobOpt::GenerateInductionVariableChangeForMemOp(Loop *loop, byte unroll, IR::In
IR::Opnd *unrollOpnd = IR::IntConstOpnd::New(unroll, type, localFunc);
- IR::Instr* inductionChangeMultiplier = IR::Instr::New(
+ IR::Instr *inductionChangeMultiplier = IR::Instr::New(
Js::OpCode::Mul_I4, sizeOpnd, loopCountOpnd, unrollOpnd, localFunc);
InsertInstr(inductionChangeMultiplier);
inductionChangeMultiplier->ConvertToBailOutInstr(loop->bailOutInfo, IR::BailOutOnOverflow);
-
}
}
else
@@ -17155,7 +17485,7 @@ GlobOpt::EmitMemop(Loop * loop, LoopCount *loopCount, const MemOpEmitData* emitD
RemoveMemOpSrcInstr(memopInstr, ldElemInstr, emitData->block);
}
InsertNoImplicitCallUses(memopInstr);
- noImplicitCallUsesToInsert->Clear();
+ noImplicitCallUsesToInsert->Clear();
}
bool
@@ -17441,7 +17771,7 @@ GlobOpt::PRE::InsertSymDefinitionInLandingPad(StackSym * sym, Loop * loop, Sym *
BasicBlock* loopTail = loop->GetAnyTailBlock();
Value * valueOnBackEdge = loopTail->globOptData.FindValue(propSym);
-
+
// If o.x is not invariant in the loop, we can't use the preloaded value of o.x.y in the landing pad
Value * valueInLandingPad = loop->landingPad->globOptData.FindValue(propSym);
if (valueOnBackEdge->GetValueNumber() != valueInLandingPad->GetValueNumber())
@@ -17464,7 +17794,7 @@ GlobOpt::PRE::InsertSymDefinitionInLandingPad(StackSym * sym, Loop * loop, Sym *
Assert(loop->landingPad->globOptData.IsLive(valueOnBackEdge->GetValueInfo()->GetSymStore()));
// Inserted T3 = o.x
- // Now, we want to
+ // Now, we want to
// 1. Insert T1 = o.x
// 2. Insert T4 = T1.y
// 3. Indentify T3 as the objptr copy prop sym for T1, and make T3.y live on the back-edges
@@ -17634,8 +17964,8 @@ void GlobOpt::PRE::RemoveOverlyOptimisticInitialValues(Loop * loop)
{
BasicBlock * landingPad = loop->landingPad;
- // For a property sym whose obj ptr sym wasn't live in the landing pad, we can optmistically (if the obj ptr sym was
- // single def) insert an initial value in the landing pad, with the hope that PRE could make the obj ptr sym live.
+ // For a property sym whose obj ptr sym wasn't live in the landing pad, we can optimistically (if the obj ptr sym was
+ // single def) insert an initial value in the landing pad, with the hope that PRE could make the obj ptr sym live.
// But, if PRE couldn't make the obj ptr sym live, we need to clear the value for the property sym from the landing pad
for (auto it = loop->initialValueFieldMap.GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
diff --git a/lib/Backend/GlobOpt.h b/lib/Backend/GlobOpt.h
index 725ac0ea975..8bae2a93bb8 100644
--- a/lib/Backend/GlobOpt.h
+++ b/lib/Backend/GlobOpt.h
@@ -288,18 +288,14 @@ typedef JsUtil::BaseDictionary Valu
namespace JsUtil
{
template <>
- class ValueEntry : public BaseValueEntry
+ inline void ClearValue::Clear(StackLiteralInitFldData* value)
{
- public:
- void Clear()
- {
#if DBG
- this->value.propIds = nullptr;
- this->value.currentInitFldCount = (uint)-1;
+ value->propIds = nullptr;
+ value->currentInitFldCount = (uint)-1;
#endif
- }
- };
-};
+ }
+}
typedef JsUtil::BaseDictionary IntConstantToStackSymMap;
typedef JsUtil::BaseDictionary IntConstantToValueMap;
@@ -468,6 +464,8 @@ class GlobOpt
BVSparse * changedSymsAfterIncBailoutCandidate;
+ BVSparse * auxSlotPtrSyms;
+
JitArenaAllocator * alloc;
JitArenaAllocator * tempAlloc;
@@ -697,6 +695,7 @@ class GlobOpt
IR::Instr* CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, IR::BailOutKind bailoutkind, BailOutInfo* bailoutInfo, Func* func);
IR::Instr* AttachBoundsCheckData(IR::Instr* instr, IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset);
void OptArraySrc(IR::Instr **const instrRef, Value ** src1Val, Value ** src2Val);
+ void OptStackArgLenAndConst(IR::Instr* instr, Value** src1Val);
private:
void TrackIntSpecializedAddSubConstant(IR::Instr *const instr, const AddSubConstantInfo *const addSubConstantInfo, Value *const dstValue, const bool updateSourceBounds);
@@ -745,7 +744,7 @@ class GlobOpt
void InsertCloneStrs(BasicBlock *toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData);
void InsertValueCompensation(BasicBlock *const predecessor, BasicBlock *const successor, const SymToValueInfoMap *symsRequiringCompensationToMergedValueInfoMap);
IR::Instr * ToVarUses(IR::Instr *instr, IR::Opnd *opnd, bool isDst, Value *val);
- void ToVar(BVSparse *bv, BasicBlock *block);
+ void ToVar(BVSparse *bv, BasicBlock *block, IR::Instr* insertBeforeInstr = nullptr);
IR::Instr * ToVar(IR::Instr *instr, IR::RegOpnd *regOpnd, BasicBlock *block, Value *val, bool needsUpdate);
void ToInt32(BVSparse *bv, BasicBlock *block, bool lossy, IR::Instr *insertBeforeInstr = nullptr);
void ToFloat64(BVSparse *bv, BasicBlock *block);
@@ -873,6 +872,7 @@ class GlobOpt
void ProcessInlineeEnd(IR::Instr * instr);
void TrackCalls(IR::Instr * instr);
void RecordInlineeFrameInfo(IR::Instr* instr);
+ void ClearInlineeFrameInfo(IR::Instr* instr);
void EndTrackCall(IR::Instr * instr);
void EndTrackingOfArgObjSymsForInlinee();
void FillBailOutInfo(BasicBlock *block, BailOutInfo *bailOutInfo);
@@ -881,8 +881,14 @@ class GlobOpt
static void MarkNonByteCodeUsed(IR::Instr * instr);
static void MarkNonByteCodeUsed(IR::Opnd * opnd);
+ void GenerateLazyBailOut(IR::Instr *& instr);
+ bool IsLazyBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val, bool isHoisted) const;
+
bool IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val) const;
- bool IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val, BasicBlock const * block, bool hasLiveFields, bool mayNeedImplicitCallBailOut, bool isForwardPass) const;
+ bool IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val,
+ BasicBlock const * block, bool hasLiveFields,
+ bool mayNeedImplicitCallBailOut, bool isForwardPass, bool mayNeedLazyBailOut = false) const;
+
static bool IsTypeCheckProtected(const IR::Instr * instr);
static bool MayNeedBailOnImplicitCall(IR::Instr const * instr, Value const * src1Val, Value const * src2Val);
static bool MaySrcNeedBailOnImplicitCall(IR::Opnd const * opnd, Value const * val);
@@ -923,7 +929,7 @@ class GlobOpt
void UpdateObjPtrValueType(IR::Opnd * opnd, IR::Instr * instr);
bool TrackArgumentsObject();
- void CannotAllocateArgumentsObjectOnStack();
+ void CannotAllocateArgumentsObjectOnStack(Func * curFunc);
#if DBG
bool IsPropertySymId(SymID symId) const;
@@ -945,6 +951,8 @@ class GlobOpt
bool CheckIfInstrInTypeCheckSeqEmitsTypeCheck(IR::Instr* instr, IR::PropertySymOpnd *opnd);
template
bool ProcessPropOpInTypeCheckSeq(IR::Instr* instr, IR::PropertySymOpnd *opnd, BasicBlock* block, bool updateExistingValue, bool* emitsTypeCheckOut = nullptr, bool* changesTypeValueOut = nullptr, bool *isObjTypeChecked = nullptr);
+ StackSym * EnsureAuxSlotPtrSym(IR::PropertySymOpnd *opnd);
+ void KillAuxSlotPtrSyms(IR::PropertySymOpnd *opnd, BasicBlock *block, bool isObjTypeSpecialized);
template
bool MapObjectHeaderInlinedTypeSymsUntil(BasicBlock *block, bool isObjTypeSpecialized, SymID opndId, Fn fn);
void KillObjectHeaderInlinedTypeSyms(BasicBlock *block, bool isObjTypeSpecialized, SymID symId = SymID_Invalid);
diff --git a/lib/Backend/GlobOptArrays.cpp b/lib/Backend/GlobOptArrays.cpp
index 132df67069e..54ba70ac919 100644
--- a/lib/Backend/GlobOptArrays.cpp
+++ b/lib/Backend/GlobOptArrays.cpp
@@ -151,6 +151,16 @@ bool GlobOpt::ArraySrcOpt::CheckOpCode()
return false;
}
+ if (instr->GetSrc1()->IsAddrOpnd())
+ {
+ const Js::Var val = instr->GetSrc1()->AsAddrOpnd()->m_address;
+ if (Js::TaggedInt::Is(val))
+ {
+ originalIndexOpnd = instr->UnlinkSrc1();
+ instr->SetSrc1(IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(val), TyInt32, instr->m_func));
+ }
+ }
+
if (!instr->GetSrc1()->IsRegOpnd() && !instr->GetSrc1()->IsIntConstOpnd())
{
return false;
@@ -199,7 +209,7 @@ void GlobOpt::ArraySrcOpt::TypeSpecIndex()
{
// If the optimization is unable to eliminate the bounds checks, we need to restore the original var sym.
Assert(originalIndexOpnd == nullptr);
- originalIndexOpnd = instr->GetSrc1()->Copy(func)->AsRegOpnd();
+ originalIndexOpnd = instr->GetSrc1()->Copy(func);
globOpt->ToTypeSpecIndex(instr, instr->GetSrc1()->AsRegOpnd(), nullptr);
}
}
@@ -2016,7 +2026,8 @@ void GlobOpt::ArraySrcOpt::Optimize()
const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
Assert(
!(bailOutKind & ~IR::BailOutKindBits) ||
- (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp);
+ (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp ||
+ (bailOutKind & ~IR::BailOutKindBits) == IR::LazyBailOut);
instr->SetBailOutKind(bailOutKind & IR::BailOutKindBits | IR::BailOutOnArrayAccessHelperCall);
}
else
diff --git a/lib/Backend/GlobOptArrays.h b/lib/Backend/GlobOptArrays.h
index d6e0b2c7d09..ed99dd78b0d 100644
--- a/lib/Backend/GlobOptArrays.h
+++ b/lib/Backend/GlobOptArrays.h
@@ -52,7 +52,7 @@ class GlobOpt::ArraySrcOpt
IR::IndirOpnd * baseOwnerIndir = nullptr;
IR::RegOpnd * baseOpnd = nullptr;
IR::Opnd * indexOpnd = nullptr;
- IR::RegOpnd * originalIndexOpnd = nullptr;
+ IR::Opnd * originalIndexOpnd = nullptr;
bool isProfilableLdElem = false;
bool isProfilableStElem = false;
bool isLoad = false;
diff --git a/lib/Backend/GlobOptBailOut.cpp b/lib/Backend/GlobOptBailOut.cpp
index 21b0c7753b6..bec5339fa59 100644
--- a/lib/Backend/GlobOptBailOut.cpp
+++ b/lib/Backend/GlobOptBailOut.cpp
@@ -31,7 +31,7 @@ GlobOpt::CaptureValuesFromScratch(BasicBlock * block,
block->globOptData.changedSyms->ClearAll();
- FOREACH_GLOBHASHTABLE_ENTRY(bucket, block->globOptData.symToValueMap)
+ FOREACH_VALUEHASHTABLE_ENTRY(GlobHashBucket, bucket, block->globOptData.symToValueMap)
{
value = bucket.element;
valueInfo = value->GetValueInfo();
@@ -48,7 +48,7 @@ GlobOpt::CaptureValuesFromScratch(BasicBlock * block,
}
block->globOptData.changedSyms->Set(sym->m_id);
}
- NEXT_GLOBHASHTABLE_ENTRY;
+ NEXT_VALUEHASHTABLE_ENTRY;
if (argsToCapture)
{
@@ -239,14 +239,6 @@ GlobOpt::CaptureValuesIncremental(BasicBlock * block,
void
GlobOpt::CaptureValues(BasicBlock *block, BailOutInfo * bailOutInfo, BVSparse* argsToCapture)
{
- if (!this->func->DoGlobOptsForGeneratorFunc())
- {
- // TODO[generators][ianhall]: Enable constprop and copyprop for generator functions; see GlobOpt::CopyProp()
- // Even though CopyProp is disabled for generator functions we must also not put the copy-prop sym into the
- // bailOutInfo so that the bailOutInfo keeps track of the key sym in its byteCodeUpwardExposed list.
- return;
- }
-
CapturedValues capturedValues;
SListBase::EditingIterator bailOutConstValuesIter(&capturedValues.constantValues);
SListBase::EditingIterator bailOutCopySymsIter(&capturedValues.copyPropSyms);
@@ -482,6 +474,32 @@ GlobOpt::CaptureByteCodeSymUses(IR::Instr * instr)
void
GlobOpt::ProcessInlineeEnd(IR::Instr* instr)
{
+ if (!PHASE_OFF(Js::StackArgLenConstOptPhase, instr->m_func) &&
+ !IsLoopPrePass() &&
+ (!instr->m_func->GetJITFunctionBody()->UsesArgumentsObject() || instr->m_func->IsStackArgsEnabled()))
+ {
+ if (instr->m_func->unoptimizableArgumentsObjReference == 0 && instr->m_func->unoptimizableArgumentsObjReferenceInInlinees == 0)
+ {
+ instr->m_func->hasUnoptimizedArgumentsAccess = false;
+ if (!instr->m_func->m_hasInlineArgsOpt && DoInlineArgsOpt(instr->m_func))
+ {
+ instr->m_func->m_hasInlineArgsOpt = true;
+ Assert(instr->m_func->cachedInlineeFrameInfo);
+ instr->m_func->frameInfo = instr->m_func->cachedInlineeFrameInfo;
+ }
+ }
+ else
+ {
+ instr->m_func->hasUnoptimizedArgumentsAccess = true;
+
+ if (instr->m_func->m_hasInlineArgsOpt && instr->m_func->cachedInlineeFrameInfo)
+ {
+ instr->m_func->m_hasInlineArgsOpt = false;
+ ClearInlineeFrameInfo(instr);
+ }
+ }
+ }
+
if (instr->m_func->m_hasInlineArgsOpt)
{
RecordInlineeFrameInfo(instr);
@@ -490,6 +508,8 @@ GlobOpt::ProcessInlineeEnd(IR::Instr* instr)
Assert(this->currentBlock->globOptData.inlinedArgOutSize >= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false));
this->currentBlock->globOptData.inlinedArgOutSize -= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false);
+
+ instr->m_func->GetParentFunc()->unoptimizableArgumentsObjReferenceInInlinees += instr->m_func->unoptimizableArgumentsObjReference;
}
void
@@ -506,7 +526,6 @@ GlobOpt::TrackCalls(IR::Instr * instr)
if (this->currentBlock->globOptData.callSequence == nullptr)
{
this->currentBlock->globOptData.callSequence = JitAnew(this->alloc, SListBase);
- this->currentBlock->globOptData.callSequence = this->currentBlock->globOptData.callSequence;
}
this->currentBlock->globOptData.callSequence->Prepend(this->alloc, instr->GetDst());
@@ -571,6 +590,7 @@ GlobOpt::TrackCalls(IR::Instr * instr)
}
case Js::OpCode::InlineeStart:
+ {
Assert(instr->m_func->GetParentFunc() == this->currentBlock->globOptData.curFunc);
Assert(instr->m_func->GetParentFunc());
this->currentBlock->globOptData.curFunc = instr->m_func;
@@ -578,18 +598,24 @@ GlobOpt::TrackCalls(IR::Instr * instr)
this->func->UpdateMaxInlineeArgOutSize(this->currentBlock->globOptData.inlinedArgOutSize);
this->EndTrackCall(instr);
+ InlineeFrameInfo* inlineeFrameInfo = InlineeFrameInfo::New(instr->m_func->m_alloc);
+ inlineeFrameInfo->functionSymStartValue = instr->GetSrc1()->GetSym() ?
+ CurrentBlockData()->FindValue(instr->GetSrc1()->GetSym()) : nullptr;
+ inlineeFrameInfo->floatSyms = CurrentBlockData()->liveFloat64Syms->CopyNew(this->alloc);
+ inlineeFrameInfo->intSyms = CurrentBlockData()->liveInt32Syms->MinusNew(CurrentBlockData()->liveLossyInt32Syms, this->alloc);
+ inlineeFrameInfo->varSyms = CurrentBlockData()->liveVarSyms->CopyNew(this->alloc);
+
if (DoInlineArgsOpt(instr->m_func))
{
instr->m_func->m_hasInlineArgsOpt = true;
- InlineeFrameInfo* frameInfo = InlineeFrameInfo::New(func->m_alloc);
- instr->m_func->frameInfo = frameInfo;
- frameInfo->functionSymStartValue = instr->GetSrc1()->GetSym() ?
- CurrentBlockData()->FindValue(instr->GetSrc1()->GetSym()) : nullptr;
- frameInfo->floatSyms = CurrentBlockData()->liveFloat64Syms->CopyNew(this->alloc);
- frameInfo->intSyms = CurrentBlockData()->liveInt32Syms->MinusNew(CurrentBlockData()->liveLossyInt32Syms, this->alloc);
- frameInfo->varSyms = CurrentBlockData()->liveVarSyms->CopyNew(this->alloc);
+ instr->m_func->frameInfo = inlineeFrameInfo;
+ }
+ else
+ {
+ instr->m_func->cachedInlineeFrameInfo = inlineeFrameInfo;
}
break;
+ }
case Js::OpCode::EndCallForPolymorphicInlinee:
// Have this opcode mimic the functions of both InlineeStart and InlineeEnd in the bailout block of a polymorphic call inlined using fixed methods.
@@ -713,7 +739,14 @@ GlobOpt::TrackCalls(IR::Instr * instr)
if (OpCodeAttr::CallInstr(instr->m_opcode))
{
this->EndTrackCall(instr);
- if (this->inInlinedBuiltIn && instr->m_opcode == Js::OpCode::CallDirect)
+ // With `InlineeBuiltInStart` and `InlineeBuiltInEnd` surrounding CallI/CallIDirect/CallIDynamic/CallIFixed,
+ // we are not popping the call sequence correctly. That makes the bailout code thinks that we need to restore
+ // argouts of the remaining call even though we shouldn't.
+ // Also see Inline::InlineApplyWithArgumentsObject, Inline::InlineApplyWithoutArrayArgument, Inline::InlineCall
+ // in which we set the end tag instruction's opcode to InlineNonTrackingBuiltInEnd
+ if (this->inInlinedBuiltIn &&
+ (instr->m_opcode == Js::OpCode::CallDirect || instr->m_opcode == Js::OpCode::CallI ||
+ instr->m_opcode == Js::OpCode::CallIDynamic || instr->m_opcode == Js::OpCode::CallIFixed))
{
// We can end up in this situation when a built-in apply target is inlined to a CallDirect. We have the following IR:
//
@@ -739,6 +772,24 @@ GlobOpt::TrackCalls(IR::Instr * instr)
}
}
+void GlobOpt::ClearInlineeFrameInfo(IR::Instr* inlineeEnd)
+{
+ if (this->IsLoopPrePass())
+ {
+ return;
+ }
+
+ InlineeFrameInfo* frameInfo = inlineeEnd->m_func->frameInfo;
+ inlineeEnd->m_func->frameInfo = nullptr;
+
+ if (!frameInfo || !frameInfo->isRecorded)
+ {
+ return;
+ }
+ frameInfo->function = InlineFrameInfoValue();
+ frameInfo->arguments->Clear();
+}
+
void GlobOpt::RecordInlineeFrameInfo(IR::Instr* inlineeEnd)
{
if (this->IsLoopPrePass())
@@ -860,7 +911,7 @@ void GlobOpt::EndTrackingOfArgObjSymsForInlinee()
// This means there are arguments object symbols in the current function which are not in the current block.
// This could happen when one of the blocks has a throw and arguments object aliased in it and other blocks don't see it.
// Rare case, abort stack arguments optimization in this case.
- CannotAllocateArgumentsObjectOnStack();
+ CannotAllocateArgumentsObjectOnStack(this->currentBlock->globOptData.curFunc);
}
else
{
@@ -1186,26 +1237,159 @@ GlobOpt::MaySrcNeedBailOnImplicitCall(IR::Opnd const * opnd, Value const * val)
};
}
+bool
+GlobOpt::IsLazyBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val, bool isHoisted) const
+{
+#ifdef _M_X64
+
+ if (!this->func->ShouldDoLazyBailOut() ||
+ this->IsLoopPrePass() ||
+ isHoisted
+ )
+ {
+ return false;
+ }
+
+ if (this->currentBlock->IsLandingPad())
+ {
+ Assert(!instr->HasAnyImplicitCalls() || this->currentBlock->GetNext()->loop->endDisableImplicitCall != nullptr);
+ return false;
+ }
+
+ // These opcodes can change the value of a field regardless whether the
+ // instruction has any implicit call
+ if (OpCodeAttr::CallInstr(instr->m_opcode) || instr->IsStElemVariant() || instr->IsStFldVariant())
+ {
+ return true;
+ }
+
+ // Now onto those that might change values of fixed fields through implicit calls.
+ // There are certain bailouts that are already attached to this instruction that
+ // prevent implicit calls from happening, so we won't need lazy bailout for those.
+
+ // If a type check fails, we will bail out and therefore no need for lazy bailout
+ if (instr->HasTypeCheckBailOut())
+ {
+ return false;
+ }
+
+ // We decided to do StackArgs optimization, which means that this instruction
+ // could only either be LdElemI_A or TypeofElem, and that it does not have
+ // an implicit call. So no need for lazy bailout.
+ if (instr->HasBailOutInfo() && instr->GetBailOutKind() == IR::BailOnStackArgsOutOfActualsRange)
+ {
+ Assert(instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::TypeofElem);
+ return false;
+ }
+
+ // If all operands are type specialized, we won't generate helper path;
+ // therefore no need for lazy bailout
+ if (instr->AreAllOpndsTypeSpecialized())
+ {
+ return false;
+ }
+
+ // The instruction might have other bailouts that prevent
+ // implicit calls from happening. That is captured in
+ // GlobOpt::MayNeedBailOnImplicitCall. So we only
+ // need lazy bailout of we think there might be implicit calls
+ // or if there aren't any bailouts that prevent them from happening.
+ return this->MayNeedBailOnImplicitCall(instr, src1Val, src2Val);
+
+#else // _M_X64
+
+ return false;
+
+#endif
+}
+
+void
+GlobOpt::GenerateLazyBailOut(IR::Instr *&instr)
+{
+ // LazyBailOut:
+ // + For all StFld variants (o.x), in the forward pass, we set LazyBailOutBit in the instruction.
+ // In DeadStore, we will remove the bit if the field that the instruction is setting to is not fixed
+ // downstream.
+ // + For StElem variants (o[x]), we do not need LazyBailOut if the `x` operand is a number because
+ // we currently only "fix" a field if the property name is non-numeric.
+ // + For all other cases (instructions that may have implicit calls), we will just add on the bit anyway and figure
+ // out later whether we need LazyBailOut during DeadStore.
+ //
+ // Note that for StFld and StElem instructions which can change fixed fields whether or not implicit calls will happen,
+ // if such instructions already have a preop bailout, they should both have BailOnImplicitCallPreOp and LazyBailOut attached.
+ // This is to cover two cases:
+ // + if the operation turns out to be an implicit call, we do a preop bailout
+ // + if the operation isn't an implicit call, but if it invalidates our fixed field's PropertyGuard, then LazyBailOut preop
+ // is triggered. LazyBailOut preop means that we will perform the StFld/StElem again in the interpreter, but that is fine
+ // since we are simply overwriting the value again.
+ if (instr->forcePreOpBailOutIfNeeded)
+ {
+ // `forcePreOpBailOutIfNeeded` indicates that when we need to bail on implicit calls,
+ // the bailout should be preop because these instructions are lowerered to multiple helper calls.
+ // In such cases, simply adding a postop lazy bailout to the instruction wouldn't be correct,
+ // so we must generate a bailout on implicit calls preop in place of lazy bailout.
+ if (instr->HasBailOutInfo())
+ {
+ Assert(instr->GetBailOutKind() == IR::BailOutOnImplicitCallsPreOp);
+ instr->SetBailOutKind(BailOutInfo::WithLazyBailOut(instr->GetBailOutKind()));
+ }
+ else
+ {
+ this->GenerateBailAtOperation(&instr, BailOutInfo::WithLazyBailOut(IR::BailOutOnImplicitCallsPreOp));
+ }
+ }
+ else if (!instr->IsStElemVariant() || this->IsNonNumericRegOpnd(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd(), true /* inGlobOpt */))
+ {
+ if (instr->HasBailOutInfo())
+ {
+ instr->SetBailOutKind(BailOutInfo::WithLazyBailOut(instr->GetBailOutKind()));
+ }
+ else
+ {
+ this->GenerateBailAfterOperation(&instr, IR::LazyBailOut);
+ }
+ }
+}
+
bool
GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val) const
{
Assert(!this->IsLoopPrePass());
- return this->IsImplicitCallBailOutCurrentlyNeeded(instr, src1Val, src2Val, this->currentBlock,
- (!this->currentBlock->globOptData.liveFields->IsEmpty()), !this->currentBlock->IsLandingPad(), true);
+ return this->IsImplicitCallBailOutCurrentlyNeeded(
+ instr, src1Val, src2Val, this->currentBlock,
+ (!this->currentBlock->globOptData.liveFields->IsEmpty()) /* hasLiveFields */,
+ !this->currentBlock->IsLandingPad() /* mayNeedImplicitCallBailOut */,
+ true /* isForwardPass */
+ );
}
bool
-GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val, BasicBlock const * block, bool hasLiveFields, bool mayNeedImplicitCallBailOut, bool isForwardPass) const
+GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val, BasicBlock const * block,
+ bool hasLiveFields, bool mayNeedImplicitCallBailOut, bool isForwardPass, bool mayNeedLazyBailOut) const
{
+ // We use BailOnImplicitCallPreOp for fixed field optimization in place of LazyBailOut when
+ // an instruction already has a preop bailout. This function is called both from the forward
+ // and backward passes to check if implicit bailout is needed and use the result to insert/remove
+ // bailout. In the backward pass, we would want to override the decision to not
+ // use implicit call to true when we need lazy bailout so that the bailout isn't removed.
+ // In the forward pass, however, we don't want to influence the result. So make sure that
+ // mayNeedLazyBailOut is false when we are in the forward pass.
+ Assert(!isForwardPass || !mayNeedLazyBailOut);
+
if (mayNeedImplicitCallBailOut &&
- !instr->CallsAccessor() &&
+
+ // If we know that we are calling an accessor, don't insert bailout on implicit calls
+ // because we will bail out anyway. However, with fixed field optimization we still
+ // want the bailout to prevent any side effects from happening.
+ (!instr->CallsAccessor() || mayNeedLazyBailOut) &&
(
NeedBailOnImplicitCallForLiveValues(block, isForwardPass) ||
NeedBailOnImplicitCallForCSE(block, isForwardPass) ||
NeedBailOnImplicitCallWithFieldOpts(block->loop, hasLiveFields) ||
- NeedBailOnImplicitCallForArrayCheckHoist(block, isForwardPass) ||
- (instr->HasBailOutInfo() && (instr->GetBailOutKind() & IR::BailOutMarkTempObject) != 0)
+ NeedBailOnImplicitCallForArrayCheckHoist(block, isForwardPass) ||
+ (instr->HasBailOutInfo() && (instr->GetBailOutKind() & IR::BailOutMarkTempObject) != 0) ||
+ mayNeedLazyBailOut
) &&
(!instr->HasTypeCheckBailOut() && MayNeedBailOnImplicitCall(instr, src1Val, src2Val)))
{
diff --git a/lib/Backend/GlobOptBlockData.cpp b/lib/Backend/GlobOptBlockData.cpp
index 46e10460844..bf5c3a4b557 100644
--- a/lib/Backend/GlobOptBlockData.cpp
+++ b/lib/Backend/GlobOptBlockData.cpp
@@ -660,7 +660,7 @@ GlobOptBlockData::MergeBlockData(
{
if (!this->argObjSyms->Equal(fromData->argObjSyms))
{
- this->globOpt->CannotAllocateArgumentsObjectOnStack();
+ this->globOpt->CannotAllocateArgumentsObjectOnStack(nullptr);
}
}
@@ -767,12 +767,23 @@ GlobOptBlockData::MergeValueMaps(
if (iter2.IsValid() && bucket.value->m_id == iter2.Data().value->m_id)
{
+ // Syms that are assigned to within the loop must have unique
+ // value numbers in the loop header after merging; a single
+ // prepass is not adequate to determine that sym values are
+ // equivalent through all possible loop paths.
+ bool forceUniqueValue =
+ isLoopBackEdge &&
+ !this->globOpt->IsLoopPrePass() &&
+ loop &&
+ loop->symsAssignedToInLoop->Test(bucket.value->m_id);
+
newValue =
this->MergeValues(
bucket.element,
iter2.Data().element,
iter2.Data().value,
isLoopBackEdge,
+ forceUniqueValue,
symsRequiringCompensation,
symsCreatedForMerge);
}
@@ -847,6 +858,7 @@ GlobOptBlockData::MergeValues(
Value *fromDataValue,
Sym *fromDataSym,
bool isLoopBackEdge,
+ bool forceUniqueValue,
BVSparse *const symsRequiringCompensation,
BVSparse *const symsCreatedForMerge)
{
@@ -879,22 +891,30 @@ GlobOptBlockData::MergeValues(
return toDataValue;
}
- // There may be other syms in toData that haven't been merged yet, referring to the current toData value for this sym. If
- // the merge produced a new value info, don't corrupt the value info for the other sym by changing the same value. Instead,
- // create one value per source value number pair per merge and reuse that for new value infos.
- Value *newValue = this->globOpt->valuesCreatedForMerge->Lookup(sourceValueNumberPair, nullptr);
- if(newValue)
+ Value *newValue = nullptr;
+ if (forceUniqueValue)
{
- Assert(sameValueNumber == (newValue->GetValueNumber() == toDataValue->GetValueNumber()));
-
- // This is an exception where Value::SetValueInfo is called directly instead of GlobOpt::ChangeValueInfo, because we're
- // actually generating new value info through merges.
- newValue->SetValueInfo(newValueInfo);
+ newValue = this->globOpt->NewValue(newValueInfo);
}
else
{
- newValue = this->globOpt->NewValue(sameValueNumber ? sourceValueNumberPair.First() : this->globOpt->NewValueNumber(), newValueInfo);
- this->globOpt->valuesCreatedForMerge->Add(sourceValueNumberPair, newValue);
+ // There may be other syms in toData that haven't been merged yet, referring to the current toData value for this sym. If
+ // the merge produced a new value info, don't corrupt the value info for the other sym by changing the same value. Instead,
+ // create one value per source value number pair per merge and reuse that for new value infos.
+ newValue = this->globOpt->valuesCreatedForMerge->Lookup(sourceValueNumberPair, nullptr);
+ if (newValue)
+ {
+ Assert(sameValueNumber == (newValue->GetValueNumber() == toDataValue->GetValueNumber()));
+
+ // This is an exception where Value::SetValueInfo is called directly instead of GlobOpt::ChangeValueInfo, because we're
+ // actually generating new value info through merges.
+ newValue->SetValueInfo(newValueInfo);
+ }
+ else
+ {
+ newValue = this->globOpt->NewValue(sameValueNumber ? sourceValueNumberPair.First() : this->globOpt->NewValueNumber(), newValueInfo);
+ this->globOpt->valuesCreatedForMerge->Add(sourceValueNumberPair, newValue);
+ }
}
// Set symStore if same on both paths.
@@ -1654,60 +1674,82 @@ GlobOptBlockData::IsFloat64TypeSpecialized(Sym const * sym) const
}
void
-GlobOptBlockData::KillStateForGeneratorYield()
+GlobOptBlockData::KillStateForGeneratorYield(IR::Instr* yieldInstr)
{
- /*
- TODO[generators][ianhall]: Do a ToVar on any typespec'd syms before the bailout so that we can enable typespec in generators without bailin having to restore typespec'd values
- FOREACH_BITSET_IN_SPARSEBV(symId, this->liveInt32Syms)
- {
- this->ToVar(instr, , this->globOpt->currentBlock, , );
- }
- NEXT_BITSET_IN_SPARSEBV;
-
- FOREACH_BITSET_IN_SPARSEBV(symId, this->liveInt32Syms)
- {
- this->ToVar(instr, , this->globOpt->currentBlock, , );
- }
- NEXT_BITSET_IN_SPARSEBV;
- */
+ this->liveInt32Syms->Minus(this->liveVarSyms);
+ this->globOpt->ToVar(liveInt32Syms, this->globOpt->currentBlock, yieldInstr /* insertBeforeInstr */);
+ this->liveInt32Syms->ClearAll();
- FOREACH_GLOBHASHTABLE_ENTRY(bucket, this->symToValueMap)
- {
- ValueType type = bucket.element->GetValueInfo()->Type().ToLikely();
- bucket.element = this->globOpt->NewGenericValue(type);
- }
- NEXT_GLOBHASHTABLE_ENTRY;
+ this->liveFloat64Syms->Minus(this->liveVarSyms);
+ this->globOpt->ToVar(liveFloat64Syms, this->globOpt->currentBlock, yieldInstr /* insertBeforeInstr */);
+ this->liveFloat64Syms->ClearAll();
- this->exprToValueMap->ClearAll();
- this->liveFields->ClearAll();
- this->liveArrayValues->ClearAll();
- if (this->maybeWrittenTypeSyms)
- {
- this->maybeWrittenTypeSyms->ClearAll();
- }
- this->isTempSrc->ClearAll();
- this->liveInt32Syms->ClearAll();
this->liveLossyInt32Syms->ClearAll();
- this->liveFloat64Syms->ClearAll();
// Keep this->liveVarSyms as is
// Keep this->argObjSyms as is
- // MarkTemp should be disabled for generator functions for now
- Assert(this->maybeTempObjectSyms == nullptr || this->maybeTempObjectSyms->IsEmpty());
- Assert(this->canStoreTempObjectSyms == nullptr || this->canStoreTempObjectSyms->IsEmpty());
+ this->hasCSECandidates = false;
- this->valuesToKillOnCalls->Clear();
- if (this->inductionVariables)
- {
- this->inductionVariables->Clear();
+ // No need to clear `isTempSrc` (used for in-place string concat)
+
+ this->exprToValueMap->ClearAll();
+
+ this->KillSymToValueMapForGeneratorYield();
+}
+
+void
+GlobOptBlockData::KillSymToValueMapForGeneratorYield()
+{
+ // Remove illegal symToValueMap entries whose symstores don't have bytecode registers
+ // Hash table bucket key-value visualization: { bucket.value: bucket.element }
+ //
+ // Idea:
+ // Multiple symbols can map to the same value which has a symstore
+ // (multiple keys map to same value).
+ // Since the symstore might not have a bytecode register, our first pass
+ // through the map attemps to use the symbol (key) as a symstore for that value.
+ // This allows us to still retain such entries.
+ // After the first pass, any symToValueMap entries whose symstores don't have
+ // bytecode registers will be cleared.
+ FOREACH_VALUEHASHTABLE_ENTRY(GlobHashBucket, bucket, this->symToValueMap)
+ {
+ if (bucket.element == nullptr)
+ {
+ continue;
+ }
+
+ Sym* symStore = bucket.element->GetValueInfo()->GetSymStore();
+ if (symStore != nullptr && symStore->IsStackSym() && symStore->AsStackSym()->HasByteCodeRegSlot())
+ {
+ continue;
+ }
+
+ Sym* sym = bucket.value;
+ if (sym != nullptr && sym->IsStackSym() && sym->AsStackSym()->HasByteCodeRegSlot())
+ {
+ bucket.element->GetValueInfo()->SetSymStore(sym);
+ }
}
- if (this->availableIntBoundChecks)
+ NEXT_VALUEHASHTABLE_ENTRY;
+
+ // Remove illegal entries
+ FOREACH_VALUEHASHTABLE_ENTRY_EDITING(GlobHashBucket, bucket, this->symToValueMap, iter)
{
- this->availableIntBoundChecks->Clear();
+ Value* value = bucket.element;
+ if (value == nullptr)
+ {
+ iter.RemoveCurrent(this->symToValueMap->alloc);
+ }
+ else
+ {
+ Sym* symStore = value->GetValueInfo()->GetSymStore();
+ if (symStore == nullptr || !symStore->IsStackSym() || !symStore->AsStackSym()->HasByteCodeRegSlot())
+ {
+ iter.RemoveCurrent(this->symToValueMap->alloc);
+ }
+ }
}
-
- // Keep bailout data as is
- this->hasCSECandidates = false;
+ NEXT_VALUEHASHTABLE_ENTRY_EDITING;
}
#if DBG_DUMP
diff --git a/lib/Backend/GlobOptBlockData.h b/lib/Backend/GlobOptBlockData.h
index 541c7603411..27a23ef0ea0 100644
--- a/lib/Backend/GlobOptBlockData.h
+++ b/lib/Backend/GlobOptBlockData.h
@@ -20,7 +20,7 @@ class ExprAttributes
}
private:
- static const uint32 BitMask(const uint index)
+ static uint32 BitMask(const uint index)
{
return 1u << index;
}
@@ -261,7 +261,7 @@ class GlobOptBlockData
template
void MergeCapturedValues(SListBase* toList, SListBase * fromList, CapturedItemsAreEqual itemsAreEqual);
void MergeValueMaps(BasicBlock *toBlock, BasicBlock *fromBlock, BVSparse *const symsRequiringCompensation, BVSparse *const symsCreatedForMerge);
- Value * MergeValues(Value *toDataValue, Value *fromDataValue, Sym *fromDataSym, bool isLoopBackEdge, BVSparse *const symsRequiringCompensation, BVSparse *const symsCreatedForMerge);
+ Value * MergeValues(Value *toDataValue, Value *fromDataValue, Sym *fromDataSym, bool isLoopBackEdge, bool forceUniqueValue, BVSparse *const symsRequiringCompensation, BVSparse *const symsCreatedForMerge);
ValueInfo * MergeValueInfo(Value *toDataVal, Value *fromDataVal, Sym *fromDataSym, bool isLoopBackEdge, bool sameValueNumber, BVSparse *const symsRequiringCompensation, BVSparse *const symsCreatedForMerge);
JsTypeValueInfo * MergeJsTypeValueInfo(JsTypeValueInfo * toValueInfo, JsTypeValueInfo * fromValueInfo, bool isLoopBackEdge, bool sameValueNumber);
ValueInfo * MergeArrayValueInfo(const ValueType mergedValueType, const ArrayValueInfo *const toDataValueInfo, const ArrayValueInfo *const fromDataValueInfo, Sym *const arraySym, BVSparse *const symsRequiringCompensation, BVSparse *const symsCreatedForMerge, bool isLoopBackEdge);
@@ -320,8 +320,9 @@ class GlobOptBlockData
private:
// Other
+ void KillSymToValueMapForGeneratorYield();
public:
- void KillStateForGeneratorYield();
+ void KillStateForGeneratorYield(IR::Instr *yieldInstr);
// Debug
public:
diff --git a/lib/Backend/GlobOptExpr.cpp b/lib/Backend/GlobOptExpr.cpp
index d95b63779ce..14c6ac03e25 100644
--- a/lib/Backend/GlobOptExpr.cpp
+++ b/lib/Backend/GlobOptExpr.cpp
@@ -828,6 +828,7 @@ GlobOpt::ProcessArrayValueKills(IR::Instr *instr)
case Js::OpCode::StFldStrict:
case Js::OpCode::StRootFldStrict:
case Js::OpCode::StSuperFld:
+ case Js::OpCode::StSuperFldStrict:
case Js::OpCode::StSlot:
case Js::OpCode::StSlotChkUndecl:
case Js::OpCode::DeleteFld:
diff --git a/lib/Backend/GlobOptFields.cpp b/lib/Backend/GlobOptFields.cpp
index d095f85801e..dca85bd2119 100644
--- a/lib/Backend/GlobOptFields.cpp
+++ b/lib/Backend/GlobOptFields.cpp
@@ -211,7 +211,6 @@ void
GlobOpt::KillLiveElems(IR::IndirOpnd * indirOpnd, IR::Opnd * valueOpnd, BVSparse * bv, bool inGlobOpt, Func *func)
{
IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
-
// obj.x = 10;
// obj["x"] = ...; // This needs to kill obj.x... We need to kill all fields...
//
@@ -392,6 +391,7 @@ GlobOpt::ProcessFieldKills(IR::Instr *instr, BVSparse *bv, bo
case Js::OpCode::InitSetFld:
case Js::OpCode::InitGetFld:
+ case Js::OpCode::InitClassMember:
case Js::OpCode::InitClassMemberGet:
case Js::OpCode::InitClassMemberSet:
sym = instr->GetDst()->AsSymOpnd()->m_sym;
@@ -440,6 +440,7 @@ GlobOpt::ProcessFieldKills(IR::Instr *instr, BVSparse *bv, bo
case Js::OpCode::StSlot:
case Js::OpCode::StSlotChkUndecl:
case Js::OpCode::StSuperFld:
+ case Js::OpCode::StSuperFldStrict:
Assert(dstOpnd != nullptr);
sym = dstOpnd->AsSymOpnd()->m_sym;
if (inGlobOpt)
@@ -481,7 +482,7 @@ GlobOpt::ProcessFieldKills(IR::Instr *instr, BVSparse *bv, bo
case Js::OpCode::InlineeEnd:
Assert(!instr->UsesAllFields());
- // Kill all live 'arguments' and 'caller' fields, as 'inlineeFunction.arguments' and 'inlineeFunction.caller'
+ // Kill all live 'arguments' and 'caller' fields, as 'inlineeFunction.arguments' and 'inlineeFunction.caller'
// cannot be copy-propped across different instances of the same inlined function.
KillLiveFields(argumentsEquivBv, bv);
KillLiveFields(callerEquivBv, bv);
@@ -564,7 +565,14 @@ GlobOpt::ProcessFieldKills(IR::Instr *instr, BVSparse *bv, bo
}
break;
- case Js::OpCode::InitClass:
+ case Js::OpCode::NewClassProto:
+ Assert(instr->GetSrc1());
+ if (IR::AddrOpnd::IsEqualAddr(instr->GetSrc1(), (void*)func->GetScriptContextInfo()->GetObjectPrototypeAddr()))
+ {
+ // No extends operand, the proto parent is the Object prototype
+ break;
+ }
+ // Fall through
case Js::OpCode::InitProto:
case Js::OpCode::NewScObjectNoCtor:
case Js::OpCode::NewScObjectNoCtorFull:
@@ -635,7 +643,7 @@ GlobOpt::CreateFieldSrcValue(PropertySym * sym, PropertySym * originalSym, IR::O
}
Assert((*ppOpnd)->AsSymOpnd()->m_sym == sym || this->IsLoopPrePass());
-
+
// We don't use the sym store to do copy prop on hoisted fields, but create a value
// in case it can be copy prop out of the loop.
return this->NewGenericValue(ValueType::Uninitialized, *ppOpnd);
@@ -929,6 +937,7 @@ GlobOpt::FinishOptPropOp(IR::Instr *instr, IR::PropertySymOpnd *opnd, BasicBlock
if (!isObjTypeSpecialized || opnd->ChangesObjectLayout())
{
this->KillObjectHeaderInlinedTypeSyms(block, isObjTypeSpecialized, opndId);
+ this->KillAuxSlotPtrSyms(opnd, block, isObjTypeSpecialized);
}
else if (!isObjTypeChecked && this->HasLiveObjectHeaderInlinedTypeSym(block, true, opndId))
{
@@ -939,6 +948,37 @@ GlobOpt::FinishOptPropOp(IR::Instr *instr, IR::PropertySymOpnd *opnd, BasicBlock
return isObjTypeSpecialized;
}
+StackSym *
+GlobOpt::EnsureAuxSlotPtrSym(IR::PropertySymOpnd *opnd)
+{
+ StackSym *auxSlotPtrSym = opnd->EnsureAuxSlotPtrSym(this->func);
+ this->auxSlotPtrSyms->Set(auxSlotPtrSym->m_id);
+ return auxSlotPtrSym;
+}
+
+void
+GlobOpt::KillAuxSlotPtrSyms(IR::PropertySymOpnd *opnd, BasicBlock *block, bool isObjTypeSpecialized)
+{
+ StackSym *auxSlotPtrSym = nullptr;
+ if (isObjTypeSpecialized)
+ {
+ // Kill all aux slot syms other than this one
+ auxSlotPtrSym = opnd->GetAuxSlotPtrSym();
+ if (auxSlotPtrSym)
+ {
+ Assert(this->auxSlotPtrSyms && this->auxSlotPtrSyms->Test(auxSlotPtrSym->m_id));
+ this->auxSlotPtrSyms->Clear(auxSlotPtrSym->m_id);
+ }
+ }
+
+ block->globOptData.liveFields->Minus(this->auxSlotPtrSyms);
+
+ if (auxSlotPtrSym)
+ {
+ this->auxSlotPtrSyms->Set(auxSlotPtrSym->m_id);
+ }
+}
+
void
GlobOpt::KillObjectHeaderInlinedTypeSyms(BasicBlock *block, bool isObjTypeSpecialized, SymID opndId)
{
@@ -1347,12 +1387,6 @@ GlobOpt::ProcessPropOpInTypeCheckSeq(IR::Instr* instr, IR::PropertySymOpnd *opnd
{
// Indicates we can optimize, as all upstream types are equivalent here.
- opnd->SetSlotIndex(slotIndex);
- opnd->SetUsesAuxSlot(auxSlot);
-
- opnd->GetObjTypeSpecInfo()->SetSlotIndex(slotIndex);
- opnd->GetObjTypeSpecInfo()->SetUsesAuxSlot(auxSlot);
-
isSpecialized = true;
if (isTypeCheckedOut)
{
@@ -1361,10 +1395,17 @@ GlobOpt::ProcessPropOpInTypeCheckSeq(IR::Instr* instr, IR::PropertySymOpnd *opnd
if (consumeType)
{
opnd->SetTypeChecked(true);
- }
- if (checkedTypeSetIndex != (uint16)-1)
- {
- opnd->SetCheckedTypeSetIndex(checkedTypeSetIndex);
+
+ opnd->SetSlotIndex(slotIndex);
+ opnd->SetUsesAuxSlot(auxSlot);
+
+ opnd->GetObjTypeSpecInfo()->SetSlotIndex(slotIndex);
+ opnd->GetObjTypeSpecInfo()->SetUsesAuxSlot(auxSlot);
+
+ if (checkedTypeSetIndex != (uint16)-1)
+ {
+ opnd->SetCheckedTypeSetIndex(checkedTypeSetIndex);
+ }
}
}
}
@@ -1407,8 +1448,8 @@ GlobOpt::ProcessPropOpInTypeCheckSeq(IR::Instr* instr, IR::PropertySymOpnd *opnd
}
}
else if (valueInfo->GetJsTypeSet() &&
- (opnd->IsMono() ?
- valueInfo->GetJsTypeSet()->Contains(opnd->GetFirstEquivalentType()) :
+ (opnd->IsMono() ?
+ valueInfo->GetJsTypeSet()->Contains(opnd->GetFirstEquivalentType()) :
IsSubsetOf(opndTypeSet, valueInfo->GetJsTypeSet())
)
)
@@ -1541,6 +1582,43 @@ GlobOpt::ProcessPropOpInTypeCheckSeq(IR::Instr* instr, IR::PropertySymOpnd *opnd
*changesTypeValueOut = isSpecialized && (emitsTypeCheck || addsProperty);
}
+ if (makeChanges)
+ {
+ // Track liveness of aux slot ptr syms.
+ if (!PHASE_OFF(Js::ReuseAuxSlotPtrPhase, this->func) && isSpecialized)
+ {
+ if (opnd->UsesAuxSlot() && !opnd->IsLoadedFromProto())
+ {
+ // Optimized ld/st that loads/uses an aux slot ptr.
+ // Aux slot sym is live forward.
+ StackSym *auxSlotPtrSym = this->EnsureAuxSlotPtrSym(opnd);
+ if (!this->IsLoopPrePass() && opnd->IsTypeChecked())
+ {
+ if (block->globOptData.liveFields->TestAndSet(auxSlotPtrSym->m_id))
+ {
+ // Aux slot sym is available here. Tell lowerer to use it.
+ opnd->SetAuxSlotPtrSymAvailable(true);
+ }
+ }
+ else
+ {
+ block->globOptData.liveFields->Set(auxSlotPtrSym->m_id);
+ }
+ }
+ else if (!opnd->IsTypeChecked())
+ {
+ // Type sym is not available here (i.e., object shape is not known) and we're not loading the aux slots.
+ // May get here with aux slot sym still in live set if type sym is not in the value table.
+ // Clear the aux slot sym out of the live set.
+ StackSym *auxSlotPtrSym = opnd->GetAuxSlotPtrSym();
+ if (auxSlotPtrSym)
+ {
+ block->globOptData.liveFields->Clear(auxSlotPtrSym->m_id);
+ }
+ }
+ }
+ }
+
return isSpecialized;
}
@@ -1559,7 +1637,7 @@ GlobOpt::OptNewScObject(IR::Instr** instrPtr, Value* srcVal)
instr->m_func->GetConstructorCache(static_cast(instr->AsProfiledInstr()->u.profileId)) : nullptr;
// TODO: OOP JIT, enable assert
- //Assert(ctorCache == nullptr || srcVal->GetValueInfo()->IsVarConstant() && Js::JavascriptFunction::Is(srcVal->GetValueInfo()->AsVarConstant()->VarValue()));
+ //Assert(ctorCache == nullptr || srcVal->GetValueInfo()->IsVarConstant() && Js::VarIs(srcVal->GetValueInfo()->AsVarConstant()->VarValue()));
Assert(ctorCache == nullptr || !ctorCache->IsTypeFinal() || ctorCache->CtorHasNoExplicitReturnValue());
if (ctorCache != nullptr && !ctorCache->SkipNewScObject() && (isCtorInlined || ctorCache->IsTypeFinal()))
@@ -1854,6 +1932,11 @@ GlobOpt::KillObjectType(StackSym* objectSym, BVSparse* liveFi
}
liveFields->Clear(objectSym->GetObjectTypeSym()->m_id);
+ StackSym *auxSlotPtrSym = objectSym->GetAuxSlotPtrSym();
+ if (auxSlotPtrSym)
+ {
+ liveFields->Clear(auxSlotPtrSym->m_id);
+ }
}
void
@@ -1862,6 +1945,7 @@ GlobOpt::KillAllObjectTypes(BVSparse* liveFields)
if (this->objectTypeSyms && liveFields)
{
liveFields->Minus(this->objectTypeSyms);
+ liveFields->Minus(this->auxSlotPtrSyms);
}
}
@@ -1931,6 +2015,8 @@ GlobOpt::CopyPropPropertySymObj(IR::SymOpnd *symOpnd, IR::Instr *instr)
bool shouldOptimize = CompareCurrentTypesWithExpectedTypes(newValueInfo, propertySymOpnd);
if (!shouldOptimize)
{
+ // We would like just to force a new type check here and keep optimizing, but downstream
+ // objtypespecfldinfo may have slot indices based on the old type.
propertySymOpnd->SetTypeCheckSeqCandidate(false);
}
}
@@ -2047,6 +2133,12 @@ GlobOpt::UpdateObjPtrValueType(IR::Opnd * opnd, IR::Instr * instr)
AnalysisAssert(type != nullptr);
Js::TypeId typeId = type->GetTypeId();
+ if (Js::TypedArrayBase::Is(typeId))
+ {
+ // Type ID does not allow us to distinguish between virtual and non-virtual typed array.
+ return;
+ }
+
// Passing false for useVirtual as we would never have a virtual typed array hitting this code path
ValueType newValueType = ValueType::FromTypeId(typeId, false);
diff --git a/lib/Backend/IR.cpp b/lib/Backend/IR.cpp
index 1e3a4c1074b..71f47941671 100644
--- a/lib/Backend/IR.cpp
+++ b/lib/Backend/IR.cpp
@@ -68,7 +68,7 @@ Instr::IsPlainInstr() const
}
bool
-Instr::DoStackArgsOpt(Func *topFunc) const
+Instr::DoStackArgsOpt() const
{
return this->usesStackArgumentsObject && m_func->IsStackArgsEnabled();
}
@@ -1029,6 +1029,91 @@ bool Instr::CanAggregateByteCodeUsesAcrossInstr(Instr * instr)
(instr->GetByteCodeOffset() == this->GetByteCodeOffset()));
}
+bool IR::Instr::IsStFldVariant() const
+{
+ return this->m_opcode == Js::OpCode::StFld ||
+ this->m_opcode == Js::OpCode::StFldStrict ||
+ this->m_opcode == Js::OpCode::StLocalFld ||
+ this->m_opcode == Js::OpCode::StRootFld ||
+ this->m_opcode == Js::OpCode::StRootFldStrict ||
+ this->m_opcode == Js::OpCode::StSuperFld ||
+ this->m_opcode == Js::OpCode::StSuperFldStrict;
+}
+
+bool IR::Instr::IsStElemVariant() const
+{
+ return this->m_opcode == Js::OpCode::StElemI_A ||
+ this->m_opcode == Js::OpCode::StElemI_A_Strict ||
+ this->m_opcode == Js::OpCode::StElemC;
+}
+
+bool IR::Instr::DontHoistBailOnNoProfileAboveInGeneratorFunction() const
+{
+ return this->m_opcode == Js::OpCode::GeneratorResumeYield ||
+ this->m_opcode == Js::OpCode::GeneratorCreateInterpreterStackFrame;
+}
+
+bool IR::Instr::CanChangeFieldValueWithoutImplicitCall() const
+{
+ // TODO: Why is InitFld necessary?
+ return this->IsStFldVariant() || this->IsStElemVariant();
+}
+
+// If LazyBailOut is the only BailOutKind on the instruction, the BailOutInfo is cleared.
+// Otherwise, we remove the LazyBailOut kind from the instruction and still keep the BailOutInfo.
+void IR::Instr::ClearLazyBailOut()
+{
+ if (!this->HasBailOutInfo())
+ {
+ return;
+ }
+
+ if (this->OnlyHasLazyBailOut())
+ {
+ this->ClearBailOutInfo();
+ }
+ else
+ {
+ this->GetBailOutInfo()->RestoreUseOfDst();
+ this->SetBailOutKind(BailOutInfo::WithoutLazyBailOut(this->GetBailOutKind()));
+ }
+
+ Assert(!this->HasLazyBailOut());
+}
+
+int IR::Instr::GetOpndCount() const
+{
+ return (this->m_src1 ? 1 : 0) + (this->m_src2 ? 1 : 0) + (this->m_dst ? 1 : 0);
+}
+
+bool IR::Instr::AreAllOpndsTypeSpecialized() const
+{
+ bool src1TypeSpec = !this->m_src1 || (this->m_src1->GetStackSym() && this->m_src1->GetStackSym()->IsTypeSpec());
+ bool src2TypeSpec = !this->m_src2 || (this->m_src2->GetStackSym() && this->m_src2->GetStackSym()->IsTypeSpec());
+ bool dstTypeSpec = !this->m_dst || (this->m_dst->GetStackSym() && this->m_dst->GetStackSym()->IsTypeSpec());
+ return src1TypeSpec && src2TypeSpec && dstTypeSpec && this->GetOpndCount() > 0;
+}
+
+bool IR::Instr::OnlyHasLazyBailOut() const
+{
+ return this->HasBailOutInfo() && BailOutInfo::OnlyHasLazyBailOut(this->GetBailOutKind());
+}
+
+bool IR::Instr::HasLazyBailOut() const
+{
+ return this->HasBailOutInfo() && BailOutInfo::HasLazyBailOut(this->GetBailOutKind());
+}
+
+bool IR::Instr::HasPreOpBailOut() const
+{
+ return this->HasBailOutInfo() && this->GetBailOutInfo()->bailOutOffset == this->GetByteCodeOffset();
+}
+
+bool IR::Instr::HasPostOpBailOut() const
+{
+ return this->HasBailOutInfo() && this->GetBailOutInfo()->bailOutOffset > this->GetByteCodeOffset();
+}
+
BailOutInfo *
Instr::GetBailOutInfo() const
{
@@ -1203,7 +1288,7 @@ Instr::ReplaceBailOutInfo(BailOutInfo *newBailOutInfo)
__assume(UNREACHED);
}
- if (oldBailOutInfo->bailOutInstr == this)
+ if (oldBailOutInfo->bailOutInstr == this && !oldBailOutInfo->sharedBailOutKind)
{
Assert(!oldBailOutInfo->wasCloned && !oldBailOutInfo->wasCopied);
JitArenaAllocator * alloc = this->m_func->m_alloc;
@@ -1568,9 +1653,6 @@ BranchInstr::New(Js::OpCode opcode, LabelInstr * branchTarget, Func *func)
branchInstr->m_src1 = nullptr;
branchInstr->m_src2 = nullptr;
branchInstr->m_byteCodeReg = Js::Constants::NoRegister;
-#if DBG
- branchInstr->m_isHelperToNonHelperBranch = false;
-#endif
return branchInstr;
}
@@ -1823,6 +1905,14 @@ BranchInstr::Invert()
this->m_opcode = Js::OpCode::BrOnNoProperty;
break;
+ case Js::OpCode::BrOnHasLocalProperty:
+ this->m_opcode = Js::OpCode::BrOnNoLocalProperty;
+ break;
+
+ case Js::OpCode::BrOnNoLocalProperty:
+ this->m_opcode = Js::OpCode::BrOnHasLocalProperty;
+ break;
+
case Js::OpCode::BrOnNoProperty:
this->m_opcode = Js::OpCode::BrOnHasProperty;
break;
@@ -2017,6 +2107,22 @@ Instr::New(Js::OpCode opcode, Func *func)
return instr;
}
+///----------------------------------------------------------------------------
+///
+/// Instr::New
+///
+/// Create an Instr with a byte code offset.
+///
+///----------------------------------------------------------------------------
+
+Instr *
+Instr::New(Js::OpCode opcode, Func *func, IR::Instr * bytecodeOffsetInstr)
+{
+ Instr * instr = Instr::New(opcode, func);
+ instr->SetByteCodeOffset(bytecodeOffsetInstr);
+ return instr;
+}
+
///----------------------------------------------------------------------------
///
/// Instr::New
@@ -2664,6 +2770,21 @@ Instr::GetNextRealInstr() const
return instr;
}
+#if DBG
+IR::LabelInstr *
+Instr::GetNextNonEmptyLabel() const
+{
+ IR::Instr *instr = const_cast(this);
+
+ while (instr != nullptr && (!instr->IsLabelInstr() || instr->m_next->IsLabelInstr()))
+ {
+ instr = instr->m_next;
+ }
+
+ return instr->AsLabelInstr();
+}
+#endif
+
///----------------------------------------------------------------------------
///
/// Instr::GetNextRealInstrOrLabel
@@ -2842,7 +2963,7 @@ Instr::IsByteCodeUsesInstrFor(IR::Instr * instr) const
IR::LabelInstr *
Instr::GetOrCreateContinueLabel(const bool isHelper)
{
- if(m_next && m_next->IsLabelInstr() && m_next->AsLabelInstr()->isOpHelper == isHelper)
+ if (m_next && m_next->IsLabelInstr() && m_next->AsLabelInstr()->isOpHelper == isHelper)
{
return m_next->AsLabelInstr();
}
@@ -3067,6 +3188,8 @@ Instr::TransferTo(Instr * instr)
instr->dstIsAlwaysConvertedToInt32 = this->dstIsAlwaysConvertedToInt32;
instr->dstIsAlwaysConvertedToNumber = this->dstIsAlwaysConvertedToNumber;
instr->dataWidth = this->dataWidth;
+ instr->isCtorCall = this->isCtorCall;
+ instr->forcePreOpBailOutIfNeeded = this->forcePreOpBailOutIfNeeded;
IR::Opnd * dst = this->m_dst;
if (dst)
@@ -3089,6 +3212,16 @@ Instr::TransferTo(Instr * instr)
this->m_src2 = nullptr;
}
+// Convert an instruction to a bailout instruction and perform a shallow copy of the input instruction's BailOutInfo.
+// Can optionally change the BailOutKind, otherwise the input instruction's BailOutKind will be used instead.
+IR::Instr *
+Instr::ConvertToBailOutInstrWithBailOutInfoCopy(BailOutInfo *bailOutInfo, IR::BailOutKind bailOutKind)
+{
+ BailOutInfo *bailOutInfoCopy = JitAnew(this->m_func->m_alloc, BailOutInfo, bailOutInfo->bailOutOffset, this->m_func);
+ bailOutInfo->PartialDeepCopyTo(bailOutInfoCopy);
+ return this->ConvertToBailOutInstr(bailOutInfoCopy, bailOutKind);
+}
+
IR::Instr *
Instr::ConvertToBailOutInstr(IR::Instr * bailOutTarget, IR::BailOutKind kind, uint32 bailOutOffset)
{
@@ -3350,6 +3483,11 @@ IR::Instr* Instr::GetBytecodeArgOutCapture()
this->m_opcode == Js::OpCode::ArgOut_A_InlineBuiltIn);
Assert(this->m_dst->GetStackSym()->m_isArgCaptured);
IR::Instr* instr = this->GetSrc1()->GetStackSym()->m_instrDef;
+ while (instr->m_opcode != Js::OpCode::BytecodeArgOutCapture)
+ {
+ Assert(instr->GetSrc1() && instr->GetSrc1()->GetStackSym() && instr->GetSrc1()->GetStackSym()->IsSingleDef());
+ instr = instr->GetSrc1()->GetStackSym()->m_instrDef;
+ }
Assert(instr->m_opcode == Js::OpCode::BytecodeArgOutCapture);
return instr;
}
@@ -4116,6 +4254,14 @@ bool Instr::UnaryCalculator(IntConstType src1Const, IntConstType *pResult, IRTyp
return true;
}
+GeneratorBailInInstr*
+GeneratorBailInInstr::New(IR::Instr* yieldInstr, Func* func)
+{
+ GeneratorBailInInstr* labelInstr = JitAnew(func->m_alloc, IR::GeneratorBailInInstr, func->m_alloc, yieldInstr);
+ labelInstr->Init(Js::OpCode::GeneratorBailInLabel, InstrKindLabel, func, false /* isOpHelper */);
+ return labelInstr;
+}
+
#if ENABLE_DEBUG_CONFIG_OPTIONS
///----------------------------------------------------------------------------
///
diff --git a/lib/Backend/IR.h b/lib/Backend/IR.h
index 90e4a25f60f..0867f289acb 100644
--- a/lib/Backend/IR.h
+++ b/lib/Backend/IR.h
@@ -15,6 +15,9 @@ class IRBuilderAsmJs;
class FlowGraph;
class GlobOpt;
class BailOutInfo;
+class GeneratorBailInInfo;
+class SCCLiveness;
+
struct LazyBailOutRecord;
typedef JsUtil::KeyValuePair ConstantStackSymValue;
@@ -50,6 +53,30 @@ struct CapturedValues
Assert(refCount > 0);
refCount++;
}
+
+ void CopyTo(JitArenaAllocator *allocator, CapturedValues *other) const
+ {
+ Assert(other != nullptr);
+ this->constantValues.CopyTo(allocator, other->constantValues);
+ this->copyPropSyms.CopyTo(allocator, other->copyPropSyms);
+
+ if (other->argObjSyms != nullptr)
+ {
+ other->argObjSyms->ClearAll();
+ JitAdelete(allocator, other->argObjSyms);
+ }
+
+ if (this->argObjSyms != nullptr)
+ {
+ other->argObjSyms = this->argObjSyms->CopyNew(allocator);
+ }
+ else
+ {
+ other->argObjSyms = nullptr;
+ }
+
+ // Ignore refCount because other objects might still reference it
+ }
};
class LoweredBasicBlock;
@@ -91,6 +118,7 @@ class ProfiledLabelInstr;
class MultiBranchInstr;
class PragmaInstr;
class ByteCodeUsesInstr;
+class GeneratorBailInInstr;
class Opnd;
class RegOpnd;
@@ -128,8 +156,8 @@ const int32 InvalidInstrLayout = -1;
/// ExitInstr
/// PragmaInstr
/// BailoutInstr
-/// ByteCoteUsesInstr
-///
+/// ByteCodeUsesInstr
+/// GeneratorBailInInstr
///---------------------------------------------------------------------------
class Instr
@@ -172,11 +200,13 @@ class Instr
isSafeToSpeculate(false)
#if DBG
, highlight(0)
+ , m_noLazyHelperAssert(false)
#endif
{
}
public:
static Instr * New(Js::OpCode opcode, Func *func);
+ static Instr * New(Js::OpCode opcode, Func *func, IR::Instr * bytecodeOffsetInstr);
static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Func *func);
static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Func *func);
static Instr * New(Js::OpCode opcode, Opnd *dstOpnd, Opnd *src1Opnd, Opnd *src2Opnd, Func *func);
@@ -192,6 +222,9 @@ class Instr
BranchInstr * AsBranchInstr();
bool IsLabelInstr() const;
LabelInstr * AsLabelInstr();
+ bool IsGeneratorBailInInstr() const;
+ GeneratorBailInInstr * AsGeneratorBailInInstr();
+
bool IsJitProfilingInstr() const;
JitProfilingInstr * AsJitProfilingInstr();
bool IsProfiledInstr() const;
@@ -214,7 +247,7 @@ class Instr
bool StartsBasicBlock() const;
bool EndsBasicBlock() const;
bool HasFallThrough() const;
- bool DoStackArgsOpt(Func *topFunc) const;
+ bool DoStackArgsOpt() const;
bool HasAnyLoadHeapArgsOpCode();
bool IsEqual(IR::Instr *instr) const;
@@ -283,8 +316,12 @@ class Instr
IR::Instr * Copy(bool copyDst = true);
IR::Instr * CopyWithoutDst();
IR::Instr * Clone();
- IR::Instr * ConvertToBailOutInstr(IR::Instr * bailOutTarget, BailOutKind kind, uint32 bailOutOffset = Js::Constants::NoByteCodeOffset);
- IR::Instr * ConvertToBailOutInstr(BailOutInfo * bailOutInfo, BailOutKind kind, bool useAuxBailout = false);
+ IR::Instr * ConvertToBailOutInstr(IR::Instr *bailOutTarget, BailOutKind kind, uint32 bailOutOffset = Js::Constants::NoByteCodeOffset);
+ IR::Instr * ConvertToBailOutInstr(BailOutInfo *bailOutInfo, BailOutKind kind, bool useAuxBailout = false);
+ IR::Instr * ConvertToBailOutInstrWithBailOutInfoCopy(BailOutInfo *bailOutInfo, IR::BailOutKind bailOutKind);
+#if DBG
+ IR::LabelInstr *GetNextNonEmptyLabel() const;
+#endif
IR::Instr * GetNextRealInstr() const;
IR::Instr * GetNextRealInstrOrLabel() const;
IR::Instr * GetNextBranchOrLabel() const;
@@ -304,6 +341,22 @@ class Instr
static Instr* FindSingleDefInstr(Js::OpCode opCode, Opnd* src);
bool CanAggregateByteCodeUsesAcrossInstr(IR::Instr * instr);
+ bool DontHoistBailOnNoProfileAboveInGeneratorFunction() const;
+
+ // LazyBailOut
+ bool AreAllOpndsTypeSpecialized() const;
+ bool IsStFldVariant() const;
+ bool IsStElemVariant() const;
+ bool CanChangeFieldValueWithoutImplicitCall() const;
+ void ClearLazyBailOut();
+ bool OnlyHasLazyBailOut() const;
+ bool HasLazyBailOut() const;
+ bool HasPreOpBailOut() const;
+ bool HasPostOpBailOut() const;
+#if DBG
+ bool m_noLazyHelperAssert;
+#endif
+
BranchInstr * ChangeCmCCToBranchInstr(LabelInstr *targetInstr);
static void MoveRangeAfter(Instr * instrStart, Instr * instrLast, Instr * instrAfter);
static IR::Instr * CloneRange(Instr * instrStart, Instr * instrLast, Instr * instrInsert, Lowerer *lowerer, JitArenaAllocator *alloc, bool (*fMapTest)(IR::Instr*), bool clonedInstrGetOrigArgSlot);
@@ -475,11 +528,13 @@ class Instr
void MoveArgs(bool generateByteCodeCapture = false);
void Move(IR::Instr* insertInstr);
private:
+ int GetOpndCount() const;
void ClearNumber() { this->m_number = 0; }
void SetNumber(uint32 number);
friend class ::Func;
friend class ::Lowerer;
friend class IR::ByteCodeUsesInstr;
+ friend class ::SCCLiveness;
void SetByteCodeOffset(uint32 number);
friend class ::IRBuilder;
@@ -750,6 +805,7 @@ class LabelInstr : public Instr
inline void SetRegion(Region *);
inline Region * GetRegion(void) const;
inline BOOL IsUnreferenced(void) const;
+ inline BOOL IsGeneratorEpilogueLabel(void) const;
LabelInstr * CloneLabel(BOOL fCreate);
@@ -820,6 +876,7 @@ class BranchInstr : public Instr
{
#if DBG
m_isMultiBranch = false;
+ m_isHelperToNonHelperBranch = false;
m_leaveConvToBr = false;
#endif
}
@@ -1057,6 +1114,25 @@ class PragmaInstr : public Instr
PragmaInstr * CopyPragma();
};
+class GeneratorBailInInstr : public LabelInstr
+{
+private:
+ GeneratorBailInInstr(JitArenaAllocator* allocator, IR::Instr* yieldInstr) :
+ LabelInstr(allocator),
+ yieldInstr(yieldInstr),
+ upwardExposedUses(allocator)
+ {
+ Assert(yieldInstr != nullptr && yieldInstr->m_opcode == Js::OpCode::Yield);
+ }
+
+public:
+ IR::Instr* yieldInstr;
+ CapturedValues capturedValues;
+ BVSparse upwardExposedUses;
+
+ static GeneratorBailInInstr* New(IR::Instr* yieldInstr, Func* func);
+};
+
template
class BailOutInstrTemplate : public InstrType
{
diff --git a/lib/Backend/IR.inl b/lib/Backend/IR.inl
index e3d6466ce11..a5f6d113f8e 100644
--- a/lib/Backend/IR.inl
+++ b/lib/Backend/IR.inl
@@ -124,6 +124,34 @@ Instr::AsLabelInstr()
return reinterpret_cast(this);
}
+///----------------------------------------------------------------------------
+///
+/// Instr::IsGeneratorBailInInstr
+///
+///----------------------------------------------------------------------------
+
+__forceinline bool
+Instr::IsGeneratorBailInInstr() const
+{
+ return this->m_opcode == Js::OpCode::GeneratorBailInLabel;
+}
+
+///----------------------------------------------------------------------------
+///
+/// Instr::AsGeneratorBailInInstr
+///
+/// Return this as a GeneratorBailInInstr *
+///
+///----------------------------------------------------------------------------
+
+inline GeneratorBailInInstr*
+Instr::AsGeneratorBailInInstr()
+{
+ AssertMsg(this->IsGeneratorBailInInstr(), "Bad call to AsGeneratorBailInInstr()");
+
+ return reinterpret_cast(this);
+}
+
///----------------------------------------------------------------------------
///
/// Instr::AsMultiBrInstr
@@ -256,6 +284,7 @@ Instr::EndsBasicBlock() const
return
this->IsBranchInstr() ||
this->IsExitInstr() ||
+ this->m_opcode == Js::OpCode::Yield ||
this->m_opcode == Js::OpCode::Ret ||
this->m_opcode == Js::OpCode::Throw ||
this->m_opcode == Js::OpCode::RuntimeTypeError ||
@@ -728,6 +757,13 @@ LabelInstr::IsUnreferenced(void) const
return labelRefs.Empty() && !m_hasNonBranchRef;
}
+inline BOOL
+LabelInstr::IsGeneratorEpilogueLabel(void) const
+{
+ return this->m_opcode == Js::OpCode::GeneratorEpilogueNoFrameNullOutLabel ||
+ this->m_opcode == Js::OpCode::GeneratorEpilogueFrameNullOutLabel;
+}
+
inline void
LabelInstr::SetRegion(Region * region)
{
diff --git a/lib/Backend/IRBuilder.cpp b/lib/Backend/IRBuilder.cpp
index 75ee5c7f245..05c3ff31d33 100644
--- a/lib/Backend/IRBuilder.cpp
+++ b/lib/Backend/IRBuilder.cpp
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#include "Backend.h"
@@ -49,7 +50,7 @@ IRBuilder::AddStatementBoundary(uint statementIndex, uint offset)
}
}
}
- else if (Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryLineFlag))
+ else if (Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryLineFlag))
{
this->InjectBailOut(offset);
}
@@ -117,6 +118,11 @@ IRBuilder::DoBailOnNoProfile()
return false;
}
+ if (m_func->GetTopFunc()->GetJITFunctionBody()->IsCoroutine() && !m_func->IsLoopBody())
+ {
+ return false;
+ }
+
return true;
}
@@ -314,56 +320,6 @@ IRBuilder::AddEnvOpndForInnerFrameDisplay(IR::Instr *instr, uint offset)
}
}
-bool
-IRBuilder::DoSlotArrayCheck(IR::SymOpnd *fieldOpnd, bool doDynamicCheck)
-{
- if (PHASE_OFF(Js::ClosureRangeCheckPhase, m_func))
- {
- return true;
- }
-
- PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
- IR::Instr *instrDef = propertySym->m_stackSym->m_instrDef;
- IR::Opnd *allocOpnd = nullptr;
-
- if (instrDef == nullptr)
- {
- if (doDynamicCheck)
- {
- return false;
- }
- Js::Throw::FatalInternalError();
- }
- switch(instrDef->m_opcode)
- {
- case Js::OpCode::NewScopeSlots:
- case Js::OpCode::NewStackScopeSlots:
- case Js::OpCode::NewScopeSlotsWithoutPropIds:
- allocOpnd = instrDef->GetSrc1();
- break;
-
- case Js::OpCode::LdSlot:
- case Js::OpCode::LdSlotArr:
- if (doDynamicCheck)
- {
- return false;
- }
- // fall through
- default:
- Js::Throw::FatalInternalError();
- }
-
- uint32 allocCount = allocOpnd->AsIntConstOpnd()->AsUint32();
- uint32 slotId = (uint32)propertySym->m_propertyId;
-
- if (slotId >= allocCount)
- {
- Js::Throw::FatalInternalError();
- }
-
- return true;
-}
-
///----------------------------------------------------------------------------
///
/// IRBuilder::Build
@@ -403,12 +359,10 @@ IRBuilder::Build()
if (tempCount > 0)
{
this->tempMap = AnewArrayZ(m_tempAlloc, SymID, tempCount);
- this->fbvTempUsed = BVFixed::New(tempCount, m_tempAlloc);
}
else
{
this->tempMap = nullptr;
- this->fbvTempUsed = nullptr;
}
m_func->m_headInstr = IR::EntryInstr::New(Js::OpCode::FunctionEntry, m_func);
@@ -472,7 +426,6 @@ IRBuilder::Build()
this->LoadNativeCodeData();
this->BuildConstantLoads();
- this->BuildGeneratorPreamble();
if (!this->IsLoopBody() && m_func->GetJITFunctionBody()->HasImplicitArgIns())
{
@@ -484,9 +437,12 @@ IRBuilder::Build()
this->BuildArgInRest();
}
- if (m_func->IsJitInDebugMode())
+ // This is first bailout in the function, the locals at stack have not initialized to undefined, so do not restore them.
+ // Note that for generators, we insert the bailout after the jump table to allow
+ // the generator's execution to proceed before bailing out. Otherwise, we would always
+ // bail to the beginning of the function in the interpreter, creating an infinite loop.
+ if (m_func->IsJitInDebugMode() && (!this->m_func->GetJITFunctionBody()->IsCoroutine() || this->IsLoopBody()))
{
- // This is first bailout in the function, the locals at stack have not initialized to undefined, so do not restore them.
this->InsertBailOutForDebugger(m_functionStartOffset, IR::BailOutForceByFlag | IR::BailOutBreakPointInFunction | IR::BailOutStep, nullptr);
}
@@ -532,6 +488,34 @@ IRBuilder::Build()
this->AddInstr(instr, offset);
}
+ // The point at which we insert the generator resume jump table is important.
+ // We want to insert it right *after* the environment and constants have
+ // been loaded and *before* we create any other important objects
+ // (e.g: FrameDisplay, LocalClosure) which will be passed on to the interpreter
+ // frame when we bail out. Those values, if used when we resume, will be restored
+ // by the bail-in code, therefore we don't want to unnecessarily create those new
+ // objects every time we "resume" a generator
+ //
+ // Note: We need to make sure that all the values below are allocated on the heap.
+ // so that they don't go away once this jit'd frame is popped off.
+
+#ifdef BAILOUT_INJECTION
+ lastInstr = this->m_generatorJumpTable.BuildJumpTable();
+#else
+ this->m_generatorJumpTable.BuildJumpTable();
+#endif
+
+ // When debugging generators, insert bail-out after the jump table so that we can
+ // get to the right point before going back to the interpreter.
+ // This bailout is equivalent to the one inserted above for non-generator functions.
+ // Additionally, we also need to insert bailouts on each resume point and right
+ // after the bail-in code since this bailout is only for the very first time
+ // we are in the generator.
+ if (m_func->IsJitInDebugMode() && this->m_func->GetJITFunctionBody()->IsCoroutine())
+ {
+ this->InsertBailOutForDebugger(m_functionStartOffset, IR::BailOutForceByFlag | IR::BailOutBreakPointInFunction | IR::BailOutStep, nullptr);
+ }
+
Js::RegSlot funcExprScopeReg = m_func->GetJITFunctionBody()->GetFuncExprScopeReg();
IR::RegOpnd *frameDisplayOpnd = nullptr;
if (funcExprScopeReg != Js::Constants::NoRegister)
@@ -906,40 +890,6 @@ IRBuilder::Build()
void
IRBuilder::EmitClosureRangeChecks()
{
- // Emit closure range checks
- if (m_func->slotArrayCheckTable)
- {
- // Local slot array checks, should only be necessary in jitted loop bodies.
- FOREACH_HASHTABLE_ENTRY(uint32, bucket, m_func->slotArrayCheckTable)
- {
- uint32 slotId = bucket.element;
- Assert(slotId != (uint32)-1 && slotId >= Js::ScopeSlots::FirstSlotIndex);
-
- if (slotId > Js::ScopeSlots::FirstSlotIndex)
- {
- // Emit a SlotArrayCheck instruction, chained to the instruction (LdSlot) that defines the pointer.
- StackSym *stackSym = m_func->m_symTable->FindStackSym(bucket.value);
- Assert(stackSym && stackSym->m_instrDef);
-
- IR::Instr *instrDef = stackSym->m_instrDef;
- IR::Instr *insertInstr = instrDef->m_next;
- IR::RegOpnd *dstOpnd = instrDef->UnlinkDst()->AsRegOpnd();
- IR::Instr *instr = IR::Instr::New(Js::OpCode::SlotArrayCheck, dstOpnd, m_func);
-
- dstOpnd = IR::RegOpnd::New(TyVar, m_func);
- instrDef->SetDst(dstOpnd);
- instr->SetSrc1(dstOpnd);
-
- // Attach the slot ID to the check instruction.
- IR::IntConstOpnd *slotIdOpnd = IR::IntConstOpnd::New(bucket.element, TyUint32, m_func);
- instr->SetSrc2(slotIdOpnd);
-
- insertInstr->InsertBefore(instr);
- }
- }
- NEXT_HASHTABLE_ENTRY;
- }
-
if (m_func->frameDisplayCheckTable)
{
// Frame display checks. Again, chain to the instruction (LdEnv/LdSlot).
@@ -1278,7 +1228,6 @@ IRBuilder::BuildSrcStackSymID(Js::RegSlot regSlot)
this->SetMappedTemp(regSlot, symID);
this->EnsureLoopBodyLoadSlot(symID);
}
- this->SetTempUsed(regSlot, TRUE);
}
else
{
@@ -1312,17 +1261,33 @@ IRBuilder::EnsureLoopBodyForInEnumeratorArrayOpnd()
}
IR::Opnd *
-IRBuilder::BuildForInEnumeratorOpnd(uint forInLoopLevel)
+IRBuilder::BuildForInEnumeratorOpnd(uint forInLoopLevel, uint32 offset)
{
Assert(forInLoopLevel < this->m_func->GetJITFunctionBody()->GetForInLoopDepth());
- if (!this->IsLoopBody())
+ if (this->IsLoopBody())
+ {
+ return IR::IndirOpnd::New(
+ this->EnsureLoopBodyForInEnumeratorArrayOpnd(),
+ forInLoopLevel * sizeof(Js::ForInObjectEnumerator),
+ TyMachPtr,
+ this->m_func
+ );
+ }
+ else if (this->m_func->GetJITFunctionBody()->IsCoroutine())
{
- StackSym *stackSym = StackSym::New(TyMisc, this->m_func);
+ return IR::IndirOpnd::New(
+ this->m_generatorJumpTable.BuildForInEnumeratorArrayOpnd(offset),
+ forInLoopLevel * sizeof(Js::ForInObjectEnumerator),
+ TyMachPtr,
+ this->m_func
+ );
+ }
+ else
+ {
+ StackSym* stackSym = StackSym::New(TyMisc, this->m_func);
stackSym->m_offset = forInLoopLevel;
return IR::SymOpnd::New(stackSym, TyMachPtr, this->m_func);
}
- return IR::IndirOpnd::New(
- EnsureLoopBodyForInEnumeratorArrayOpnd(), forInLoopLevel * sizeof(Js::ForInObjectEnumerator), TyMachPtr, this->m_func);
}
///----------------------------------------------------------------------------
@@ -1353,7 +1318,7 @@ IRBuilder::BuildSrcOpnd(Js::RegSlot srcRegSlot, IRType type)
///----------------------------------------------------------------------------
IR::RegOpnd *
-IRBuilder::BuildDstOpnd(Js::RegSlot dstRegSlot, IRType type, bool isCatchObjectSym)
+IRBuilder::BuildDstOpnd(Js::RegSlot dstRegSlot, IRType type, bool isCatchObjectSym, bool reuseTemp)
{
StackSym * symDst;
SymID symID;
@@ -1374,24 +1339,20 @@ IRBuilder::BuildDstOpnd(Js::RegSlot dstRegSlot, IRType type, bool isCatchObjectS
// This is a def of a temp. Create a new sym ID for it if it's been used since its last def.
// !!!NOTE: always process an instruction's temp uses before its temp defs!!!
- if (this->GetTempUsed(dstRegSlot))
+
+ symID = this->GetMappedTemp(dstRegSlot);
+ if (symID == 0)
{
- symID = m_func->m_symTable->NewID();
- this->SetTempUsed(dstRegSlot, FALSE);
+ // First time we've seen the temp. Just use the number that the front end gave it.
+ symID = static_cast(dstRegSlot);
this->SetMappedTemp(dstRegSlot, symID);
}
- else
+ else if (!reuseTemp)
{
- symID = this->GetMappedTemp(dstRegSlot);
- // The temp hasn't been used since its last def. There are 2 possibilities:
- if (symID == 0)
- {
- // First time we've seen the temp. Just use the number that the front end gave it.
- symID = static_cast(dstRegSlot);
- this->SetMappedTemp(dstRegSlot, symID);
- }
+ // Byte code has not told us to reuse the mapped temp at this def, so don't. Make a new one.
+ symID = m_func->m_symTable->NewID();
+ this->SetMappedTemp(dstRegSlot, symID);
}
-
}
else
{
@@ -1433,71 +1394,6 @@ IRBuilder::BuildImplicitArgIns()
}
}
-#if DBG_DUMP || defined(ENABLE_IR_VIEWER)
-#define POINTER_OFFSET(opnd, c, field) \
- BuildIndirOpnd((opnd), c::Get##field##Offset(), _u(#c) _u(".") _u(#field))
-#else
-#define POINTER_OFFSET(opnd, c, field) \
- BuildIndirOpnd((opnd), c::Get##field##Offset())
-#endif
-
-void
-IRBuilder::BuildGeneratorPreamble()
-{
- if (!this->m_func->GetJITFunctionBody()->IsCoroutine())
- {
- return;
- }
-
- // Build code to check if the generator already has state and if it does then jump to the corresponding resume point.
- // Otherwise jump to the start of the function. The generator object is the first argument by convention established
- // in JavascriptGenerator::EntryNext/EntryReturn/EntryThrow.
- //
- // s1 = Ld_A prm1
- // s2 = Ld_A s1[offset of JavascriptGenerator::frame]
- // BrAddr_A s2 nullptr $startOfFunc
- // s3 = Ld_A s2[offset of InterpreterStackFrame::m_reader.m_currentLocation]
- // s4 = Ld_A s2[offset of InterpreterStackFrame::m_reader.m_startLocation]
- // s5 = Sub_I4 s3 s4
- // GeneratorResumeJumpTable s5
- // $startOfFunc:
- //
-
- StackSym *genParamSym = StackSym::NewParamSlotSym(1, this->m_func);
- this->m_func->SetArgOffset(genParamSym, LowererMD::GetFormalParamOffset() * MachPtr);
-
- IR::SymOpnd *genParamOpnd = IR::SymOpnd::New(genParamSym, TyMachPtr, this->m_func);
- IR::RegOpnd *genRegOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- IR::Instr *instr = IR::Instr::New(Js::OpCode::Ld_A, genRegOpnd, genParamOpnd, this->m_func);
- this->AddInstr(instr, Js::Constants::NoByteCodeOffset);
-
- IR::RegOpnd *genFrameOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- instr = IR::Instr::New(Js::OpCode::Ld_A, genFrameOpnd, POINTER_OFFSET(genRegOpnd, Js::JavascriptGenerator, Frame), this->m_func);
- this->AddInstr(instr, Js::Constants::NoByteCodeOffset);
-
- IR::LabelInstr *labelInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- IR::BranchInstr *branchInstr = IR::BranchInstr::New(Js::OpCode::BrAddr_A, labelInstr, genFrameOpnd, IR::AddrOpnd::NewNull(this->m_func), this->m_func);
- this->AddInstr(branchInstr, Js::Constants::NoByteCodeOffset);
-
- IR::RegOpnd *curLocOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- instr = IR::Instr::New(Js::OpCode::Ld_A, curLocOpnd, POINTER_OFFSET(genFrameOpnd, Js::InterpreterStackFrame, CurrentLocation), this->m_func);
- this->AddInstr(instr, Js::Constants::NoByteCodeOffset);
-
- IR::RegOpnd *startLocOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- instr = IR::Instr::New(Js::OpCode::Ld_A, startLocOpnd, POINTER_OFFSET(genFrameOpnd, Js::InterpreterStackFrame, StartLocation), this->m_func);
- this->AddInstr(instr, Js::Constants::NoByteCodeOffset);
-
- IR::RegOpnd *curOffsetOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
- instr = IR::Instr::New(Js::OpCode::Sub_I4, curOffsetOpnd, curLocOpnd, startLocOpnd, this->m_func);
- this->AddInstr(instr, Js::Constants::NoByteCodeOffset);
-
- instr = IR::Instr::New(Js::OpCode::GeneratorResumeJumpTable, this->m_func);
- instr->SetSrc1(curOffsetOpnd);
- this->AddInstr(instr, Js::Constants::NoByteCodeOffset);
-
- this->AddInstr(labelInstr, Js::Constants::NoByteCodeOffset);
-}
-
void
IRBuilder::LoadNativeCodeData()
{
@@ -1562,7 +1458,7 @@ IRBuilder::BuildConstantLoads()
instr = IR::Instr::NewConstantLoad(dstOpnd, varConst, valueType, m_func,
m_func->IsOOPJIT() ? m_func->GetJITFunctionBody()->GetConstAsT(reg) : nullptr);
break;
- }
+ }
this->AddInstr(instr, Js::Constants::NoByteCodeOffset);
}
@@ -1609,6 +1505,7 @@ IRBuilder::BuildReg1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0)
IR::Opnd * srcOpnd = nullptr;
bool isNotInt = false;
bool dstIsCatchObject = false;
+ bool reuseLoc = false;
ValueType dstValueType;
switch (newOpcode)
{
@@ -1663,6 +1560,9 @@ IRBuilder::BuildReg1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0)
isNotInt = true;
break;
+ case Js::OpCode::LdLocalObj_ReuseLoc:
+ reuseLoc = true;
+ // fall through
case Js::OpCode::LdLocalObj:
if (!m_func->GetJITFunctionBody()->HasScopeObject())
{
@@ -1739,6 +1639,18 @@ IRBuilder::BuildReg1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0)
break;
}
+ case Js::OpCode::LdBaseFncProto:
+ {
+ // reuseLoc set to true as this is only used when that is wanted - during class extension
+ reuseLoc = true;
+ srcOpnd = IR::AddrOpnd::New(m_func->GetScriptContextInfo()->GetFunctionPrototypeAddr(), IR::AddrOpndKindDynamicVar, m_func, true);
+ newOpcode = Js::OpCode::Ld_A;
+ break;
+ }
+
+ case Js::OpCode::LdFalse_ReuseLoc:
+ reuseLoc = true;
+ // fall through
case Js::OpCode::LdFalse:
{
const auto addrOpnd = IR::AddrOpnd::New(m_func->GetScriptContextInfo()->GetFalseAddr(), IR::AddrOpndKindDynamicVar, m_func, true);
@@ -1748,6 +1660,9 @@ IRBuilder::BuildReg1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0)
break;
}
+ case Js::OpCode::LdTrue_ReuseLoc:
+ reuseLoc = true;
+ // fall through
case Js::OpCode::LdTrue:
{
const auto addrOpnd = IR::AddrOpnd::New(m_func->GetScriptContextInfo()->GetTrueAddr(), IR::AddrOpndKindDynamicVar, m_func, true);
@@ -1769,12 +1684,6 @@ IRBuilder::BuildReg1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0)
isNotInt = TRUE;
break;
- case Js::OpCode::Unused:
- // Don't generate anything. Just indicate that the temp reg is used.
- Assert(this->RegIsTemp(dstRegSlot));
- this->SetTempUsed(dstRegSlot, TRUE);
- return;
-
case Js::OpCode::InitUndecl:
srcOpnd = IR::AddrOpnd::New(m_func->GetScriptContextInfo()->GetUndeclBlockVarAddr(), IR::AddrOpndKindDynamicVar, m_func, true);
srcOpnd->SetValueType(ValueType::PrimitiveOrObject);
@@ -1808,7 +1717,7 @@ IRBuilder::BuildReg1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0)
}
}
- IR::RegOpnd * dstOpnd = this->BuildDstOpnd(dstRegSlot, TyVar, dstIsCatchObject);
+ IR::RegOpnd * dstOpnd = this->BuildDstOpnd(dstRegSlot, TyVar, dstIsCatchObject, reuseLoc);
dstOpnd->SetValueType(dstValueType);
StackSym * dstSym = dstOpnd->m_sym;
dstSym->m_isCatchObjectSym = dstIsCatchObject;
@@ -1858,7 +1767,7 @@ template
void
IRBuilder::BuildReg2(Js::OpCode newOpcode, uint32 offset)
{
- Assert(!OpCodeAttr::IsProfiledOp(newOpcode) || newOpcode == Js::OpCode::ProfiledStrictLdThis);
+ Assert(!OpCodeAttr::IsProfiledOp(newOpcode));
Assert(OpCodeAttr::HasMultiSizeLayout(newOpcode));
auto layout = m_jnReader.GetLayout>();
@@ -1876,12 +1785,30 @@ IRBuilder::BuildReg2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, Js::Re
{
IR::RegOpnd * src1Opnd = this->BuildSrcOpnd(R1);
StackSym * symSrc1 = src1Opnd->m_sym;
+ bool reuseLoc = false;
switch (newOpcode)
{
+ case Js::OpCode::Ld_A_ReuseLoc:
+ newOpcode = Js::OpCode::Ld_A;
+ reuseLoc = true;
+ break;
+
+ case Js::OpCode::Typeof_ReuseLoc:
+ newOpcode = Js::OpCode::Typeof;
+ reuseLoc = true;
+ break;
+
+ case Js::OpCode::UnwrapWithObj_ReuseLoc:
+ newOpcode = Js::OpCode::UnwrapWithObj;
+ reuseLoc = true;
+ break;
+
+ case Js::OpCode::SpreadObjectLiteral:
+ // fall through
case Js::OpCode::SetComputedNameVar:
{
- IR::Instr *instr = IR::Instr::New(Js::OpCode::SetComputedNameVar, m_func);
+ IR::Instr *instr = IR::Instr::New(newOpcode, m_func);
instr->SetSrc1(this->BuildSrcOpnd(R0));
instr->SetSrc2(src1Opnd);
this->AddInstr(instr, offset);
@@ -1908,7 +1835,7 @@ IRBuilder::BuildReg2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, Js::Re
}
}
- IR::RegOpnd * dstOpnd = this->BuildDstOpnd(R0);
+ IR::RegOpnd * dstOpnd = this->BuildDstOpnd(R0, TyVar, false, reuseLoc);
StackSym * dstSym = dstOpnd->m_sym;
IR::Instr * instr = nullptr;
@@ -1922,19 +1849,6 @@ IRBuilder::BuildReg2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, Js::Re
}
break;
- case Js::OpCode::ProfiledStrictLdThis:
- newOpcode = Js::OpCode::StrictLdThis;
- if (m_func->HasProfileInfo())
- {
- dstOpnd->SetValueType(m_func->GetReadOnlyProfileInfo()->GetThisInfo().valueType);
- }
-
- if (m_func->DoSimpleJitDynamicProfile())
- {
- IR::JitProfilingInstr* newInstr = IR::JitProfilingInstr::New(Js::OpCode::StrictLdThis, dstOpnd, src1Opnd, m_func);
- instr = newInstr;
- }
- break;
case Js::OpCode::Delete_A:
dstOpnd->SetValueType(ValueType::Boolean);
break;
@@ -1966,15 +1880,43 @@ IRBuilder::BuildReg2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, Js::Re
break;
case Js::OpCode::Yield:
+ // Jitting Loop Bodies containing Yield is not possible, blocked at callsites of GenerateLoopBody
+ AssertMsg(!this->IsLoopBody(), "Attempting to JIT loop body containing Yield");
+
instr = IR::Instr::New(newOpcode, dstOpnd, src1Opnd, m_func);
this->AddInstr(instr, offset);
- this->m_lastInstr = instr->ConvertToBailOutInstr(instr, IR::BailOutForGeneratorYield);
+ IR::Instr* yieldInstr = instr->ConvertToBailOutInstr(instr, IR::BailOutForGeneratorYield);
+ this->m_lastInstr = yieldInstr;
+
+ // This label indicates the bail-in section that we will jump to from the generator jump table
+ auto* bailInLabel = IR::GeneratorBailInInstr::New(yieldInstr, m_func);
+ bailInLabel->m_hasNonBranchRef = true; // set to true so that we don't move this label around
+ LABELNAMESET(bailInLabel, "GeneratorBailInLabel");
+ this->AddInstr(bailInLabel, offset);
+ this->m_func->AddYieldOffsetResumeLabel(nextOffset, bailInLabel);
+
+ yieldInstr->GetBailOutInfo()->bailInInstr = bailInLabel;
- IR::LabelInstr* label = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- label->m_hasNonBranchRef = true;
- this->AddInstr(label, Js::Constants::NoByteCodeOffset);
+#ifdef ENABLE_DEBUG_CONFIG_OPTIONS
+ if (PHASE_TRACE(Js::Phase::BailInPhase, this->m_func))
+ {
+ IR::LabelInstr* traceBailInLabel = IR::LabelInstr::New(Js::OpCode::GeneratorOutputBailInTraceLabel, m_func);
+ traceBailInLabel->m_hasNonBranchRef = true; // set to true so that we don't move this label around
+ LABELNAMESET(traceBailInLabel, "OutputBailInTrace");
+ this->AddInstr(traceBailInLabel, offset);
- this->m_func->AddYieldOffsetResumeLabel(nextOffset, label);
+ IR::Instr* traceBailIn = IR::Instr::New(Js::OpCode::GeneratorOutputBailInTrace, m_func);
+ this->AddInstr(traceBailIn, offset);
+ }
+#endif
+
+ IR::Instr* resumeYield = IR::Instr::New(Js::OpCode::GeneratorResumeYield, dstOpnd, m_func);
+ this->AddInstr(resumeYield, offset);
+
+ if (this->m_func->IsJitInDebugMode())
+ {
+ this->InsertBailOutForDebugger(offset, IR::BailOutForceByFlag | IR::BailOutBreakPointInFunction | IR::BailOutStep);
+ }
return;
}
@@ -2204,7 +2146,7 @@ IRBuilder::BuildReg3(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot
{
InsertBailOnNoProfile(instr);
}
-
+
switch (newOpcode)
{
case Js::OpCode::LdHandlerScope:
@@ -2272,6 +2214,191 @@ IRBuilder::BuildReg3C(Js::OpCode newOpCode, uint32 offset, Js::RegSlot dstRegSlo
this->AddInstr(instr, Js::Constants::NoByteCodeOffset);
}
+void
+IRBuilder::BuildReg2U(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, Js::RegSlot R1, uint index)
+{
+ Assert(OpCodeAttr::HasMultiSizeLayout(newOpcode));
+
+ switch (newOpcode)
+ {
+ case Js::OpCode::InitBaseClass:
+ {
+ IR::Opnd * opndProtoParent = IR::AddrOpnd::New(m_func->GetScriptContextInfo()->GetObjectPrototypeAddr(), IR::AddrOpndKindDynamicVar, m_func, true);
+ IR::Opnd * opndCtorParent = IR::AddrOpnd::New(m_func->GetScriptContextInfo()->GetFunctionPrototypeAddr(), IR::AddrOpndKindDynamicVar, m_func, true);
+ BuildInitClass(offset, R0, R1, opndProtoParent, opndCtorParent, GetEnvironmentOperand(offset), index);
+ break;
+ }
+
+ default:
+ AssertMsg(false, "Unknown Reg2U op");
+ break;
+ }
+}
+
+template
+void
+IRBuilder::BuildReg2U(Js::OpCode newOpcode, uint32 offset)
+{
+ Assert(!OpCodeAttr::IsProfiledOp(newOpcode));
+ Assert(OpCodeAttr::HasMultiSizeLayout(newOpcode));
+ auto layout = m_jnReader.GetLayout>();
+
+ if (!PHASE_OFF(Js::ClosureRegCheckPhase, m_func))
+ {
+ this->DoClosureRegCheck(layout->R0);
+ this->DoClosureRegCheck(layout->R1);
+ }
+
+ BuildReg2U(newOpcode, offset, layout->R0, layout->R1, layout->SlotIndex);
+}
+
+void
+IRBuilder::BuildReg3U(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, Js::RegSlot R1, Js::RegSlot R2, uint index)
+{
+ Assert(OpCodeAttr::HasMultiSizeLayout(newOpcode));
+
+ switch (newOpcode)
+ {
+ case Js::OpCode::InitInnerBaseClass:
+ {
+ IR::Opnd * opndProtoParent = IR::AddrOpnd::New(m_func->GetScriptContextInfo()->GetObjectPrototypeAddr(), IR::AddrOpndKindDynamicVar, m_func, true);
+ IR::Opnd * opndCtorParent = IR::AddrOpnd::New(m_func->GetScriptContextInfo()->GetFunctionPrototypeAddr(), IR::AddrOpndKindDynamicVar, m_func, true);
+ BuildInitClass(offset, R0, R1, opndProtoParent, opndCtorParent, BuildSrcOpnd(R2), index);
+ break;
+ }
+
+ default:
+ AssertMsg(false, "Unknown Reg3U op");
+ break;
+ }
+}
+
+template
+void
+IRBuilder::BuildReg3U(Js::OpCode newOpcode, uint32 offset)
+{
+ Assert(!OpCodeAttr::IsProfiledOp(newOpcode));
+ Assert(OpCodeAttr::HasMultiSizeLayout(newOpcode));
+ auto layout = m_jnReader.GetLayout>();
+
+ if (!PHASE_OFF(Js::ClosureRegCheckPhase, m_func))
+ {
+ this->DoClosureRegCheck(layout->R0);
+ this->DoClosureRegCheck(layout->R1);
+ this->DoClosureRegCheck(layout->R2);
+ }
+
+ BuildReg3U(newOpcode, offset, layout->R0, layout->R1, layout->R2, layout->SlotIndex);
+}
+
+template
+void
+IRBuilder::BuildReg4U(Js::OpCode newOpcode, uint32 offset)
+{
+ Assert(!OpCodeAttr::IsProfiledOp(newOpcode));
+ Assert(OpCodeAttr::HasMultiSizeLayout(newOpcode));
+ auto layout = m_jnReader.GetLayout>();
+
+ if (!PHASE_OFF(Js::ClosureRegCheckPhase, m_func))
+ {
+ this->DoClosureRegCheck(layout->R0);
+ this->DoClosureRegCheck(layout->R1);
+ this->DoClosureRegCheck(layout->R2);
+ this->DoClosureRegCheck(layout->R3);
+ }
+
+ BuildReg4U(newOpcode, offset, layout->R0, layout->R1, layout->R2, layout->R3, layout->SlotIndex);
+}
+
+void
+IRBuilder::BuildReg4U(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, Js::RegSlot R1, Js::RegSlot R2, Js::RegSlot R3, uint slotIndex)
+{
+ Assert(OpCodeAttr::HasMultiSizeLayout(newOpcode));
+
+ switch (newOpcode)
+ {
+ case Js::OpCode::InitClass:
+ {
+ BuildInitClass(offset, R0, R1, BuildSrcOpnd(R3), BuildSrcOpnd(R2), GetEnvironmentOperand(offset), slotIndex);
+ break;
+ }
+
+ default:
+ AssertMsg(false, "Unknown Reg4U opcode");
+ break;
+ }
+}
+
+template
+void
+IRBuilder::BuildReg5U(Js::OpCode newOpcode, uint32 offset)
+{
+ Assert(!OpCodeAttr::IsProfiledOp(newOpcode));
+ Assert(OpCodeAttr::HasMultiSizeLayout(newOpcode));
+ auto layout = m_jnReader.GetLayout>();
+
+ if (!PHASE_OFF(Js::ClosureRegCheckPhase, m_func))
+ {
+ this->DoClosureRegCheck(layout->R0);
+ this->DoClosureRegCheck(layout->R1);
+ this->DoClosureRegCheck(layout->R2);
+ this->DoClosureRegCheck(layout->R3);
+ this->DoClosureRegCheck(layout->R4);
+ }
+
+ BuildReg5U(newOpcode, offset, layout->R0, layout->R1, layout->R2, layout->R3, layout->R4, layout->SlotIndex);
+}
+
+void
+IRBuilder::BuildReg5U(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, Js::RegSlot R1, Js::RegSlot R2, Js::RegSlot R3, Js::RegSlot R4, uint slotIndex)
+{
+ Assert(OpCodeAttr::HasMultiSizeLayout(newOpcode));
+
+ switch (newOpcode)
+ {
+ case Js::OpCode::InitInnerClass:
+ {
+ BuildInitClass(offset, R0, R1, BuildSrcOpnd(R3), BuildSrcOpnd(R2), BuildSrcOpnd(R4), slotIndex);
+ break;
+ }
+
+ default:
+ AssertMsg(false, "Unknown Reg5U opcode");
+ break;
+ }
+}
+
+void
+IRBuilder::BuildInitClass(uint32 offset, Js::RegSlot regConstructor, Js::RegSlot regProto, IR::Opnd * opndProtoParent, IR::Opnd * opndConstructorParent, IR::Opnd * opndEnvironment, uint index)
+{
+ IR::RegOpnd * opndProto = BuildDstOpnd(regProto);
+ opndProto->SetValueType(ValueType::GetObject(ObjectType::Object));
+ IR::Instr * instr = IR::Instr::New(Js::OpCode::NewClassProto, opndProto, opndProtoParent, m_func);
+ this->AddInstr(instr, offset);
+
+ instr = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, m_func), opndConstructorParent, m_func);
+ this->AddInstr(instr, offset);
+
+ instr = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, m_func), opndProto, instr->GetDst(), m_func);
+ this->AddInstr(instr, offset);
+
+ Js::FunctionInfoPtrPtr infoRef = m_func->GetJITFunctionBody()->GetNestedFuncRef(index);
+ IR::AddrOpnd * functionBodySlotOpnd = IR::AddrOpnd::New((Js::Var)infoRef, IR::AddrOpndKindDynamicMisc, m_func);
+ instr = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, m_func), functionBodySlotOpnd, instr->GetDst(), m_func);
+ this->AddInstr(instr, offset);
+
+ instr = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, m_func), opndEnvironment, instr->GetDst(), m_func);
+ this->AddInstr(instr, offset);
+
+ IR::RegOpnd * opndConstructor = BuildDstOpnd(regConstructor);
+ instr = IR::Instr::New(Js::OpCode::NewClassConstructor, opndConstructor, instr->GetDst(), m_func);
+ this->AddInstr(instr, offset);
+
+ Assert(opndConstructor->m_sym->m_isSingleDef);
+ opndConstructor->m_sym->m_isSafeThis = true;
+ opndConstructor->m_sym->m_isNotNumber = true;
+}
+
///----------------------------------------------------------------------------
///
/// IRBuilder::BuildReg4
@@ -2303,12 +2430,35 @@ void
IRBuilder::BuildReg4(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot src1RegSlot,
Js::RegSlot src2RegSlot, Js::RegSlot src3RegSlot)
{
- IR::Instr * instr;
- Assert(newOpcode == Js::OpCode::Concat3);
+ IR::Instr * instr = nullptr;
+ Assert(newOpcode == Js::OpCode::Concat3 || newOpcode == Js::OpCode::Restify);
IR::RegOpnd * src1Opnd = this->BuildSrcOpnd(src1RegSlot);
IR::RegOpnd * src2Opnd = this->BuildSrcOpnd(src2RegSlot);
- IR::RegOpnd * src3Opnd = this->BuildSrcOpnd(src3RegSlot);
+ IR::RegOpnd * src3Opnd = this->BuildSrcOpnd(src3RegSlot);
+
+ if (newOpcode == Js::OpCode::Restify)
+ {
+ IR::RegOpnd * src0Opnd = this->BuildSrcOpnd(dstRegSlot);
+ instr = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, m_func), src3Opnd, m_func);
+ this->AddInstr(instr, offset);
+
+ instr = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, m_func), src2Opnd, instr->GetDst(), m_func);
+ this->AddInstr(instr, Js::Constants::NoByteCodeOffset);
+
+ instr = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, m_func), src1Opnd, instr->GetDst(), m_func);
+ this->AddInstr(instr, Js::Constants::NoByteCodeOffset);
+
+ instr = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, m_func), src0Opnd, instr->GetDst(), m_func);
+ this->AddInstr(instr, Js::Constants::NoByteCodeOffset);
+
+ IR::Opnd *firstArg = instr->GetDst();
+ instr = IR::Instr::New(newOpcode, m_func);
+ instr->SetSrc1(firstArg);
+ this->AddInstr(instr, Js::Constants::NoByteCodeOffset);
+ return;
+ }
+
IR::RegOpnd * dstOpnd = this->BuildDstOpnd(dstRegSlot);
IR::RegOpnd * str1Opnd = InsertConvPrimStr(src1Opnd, offset, true);
@@ -2403,7 +2553,7 @@ IRBuilder::BuildReg2B1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSl
IR::Instr * instr;
IR::RegOpnd * srcOpnd = this->BuildSrcOpnd(srcRegSlot);
- IR::RegOpnd * dstOpnd = this->BuildDstOpnd(dstRegSlot);
+ IR::RegOpnd * dstOpnd = this->BuildDstOpnd(dstRegSlot, TyVar, false, true);
IR::IndirOpnd * indir1Opnd = IR::IndirOpnd::New(dstOpnd, index, TyVar, m_func);
@@ -2440,22 +2590,23 @@ IRBuilder::BuildReg3B1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSl
IR::Instr * instr;
IR::RegOpnd * src1Opnd = this->BuildSrcOpnd(src1RegSlot);
IR::RegOpnd * src2Opnd = this->BuildSrcOpnd(src2RegSlot);
- IR::RegOpnd * dstOpnd = this->BuildDstOpnd(dstRegSlot);
- dstOpnd->SetValueType(ValueType::String);
+ IR::RegOpnd * dstOpnd = nullptr;
IR::Instr * newConcatStrMulti = nullptr;
switch (newOpcode)
{
case Js::OpCode::NewConcatStrMulti:
-
+ dstOpnd = this->BuildDstOpnd(dstRegSlot);
newConcatStrMulti = IR::Instr::New(Js::OpCode::NewConcatStrMulti, dstOpnd, IR::IntConstOpnd::New(index, TyUint32, m_func), m_func);
index = 0;
break;
case Js::OpCode::SetConcatStrMultiItem2:
+ dstOpnd = this->BuildDstOpnd(dstRegSlot, TyVar, false, true);
break;
default:
Assert(false);
};
+ dstOpnd->SetValueType(ValueType::String);
IR::IndirOpnd * indir1Opnd = IR::IndirOpnd::New(dstOpnd, index, TyVar, m_func);
IR::IndirOpnd * indir2Opnd = IR::IndirOpnd::New(dstOpnd, index + 1, TyVar, m_func);
@@ -2530,7 +2681,7 @@ IRBuilder::BuildReg5(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot
src3Opnd = this->BuildSrcOpnd(src3RegSlot);
src4Opnd = this->BuildSrcOpnd(src4RegSlot);
dstOpnd = this->BuildDstOpnd(dstRegSlot);
-
+
instr = IR::Instr::New(Js::OpCode::ArgOut_A, IR::RegOpnd::New(TyVar, m_func), src4Opnd, m_func);
this->AddInstr(instr, offset);
@@ -2816,7 +2967,7 @@ IRBuilder::BuildProfiledReg1Unsigned1(Js::OpCode newOpcode, uint32 offset, Js::R
if (newOpcode == Js::OpCode::InitForInEnumerator)
{
IR::RegOpnd * src1Opnd = this->BuildSrcOpnd(R0);
- IR::Opnd * src2Opnd = this->BuildForInEnumeratorOpnd(C1);
+ IR::Opnd * src2Opnd = this->BuildForInEnumeratorOpnd(C1, offset);
IR::Instr *instr = IR::ProfiledInstr::New(Js::OpCode::InitForInEnumerator, nullptr, src1Opnd, src2Opnd, m_func);
instr->AsProfiledInstr()->u.profileId = profileId;
this->AddInstr(instr, offset);
@@ -2951,7 +3102,7 @@ IRBuilder::BuildReg1Unsigned1(Js::OpCode newOpcode, uint offset, Js::RegSlot R0,
{
IR::Instr *instr = IR::Instr::New(Js::OpCode::InitForInEnumerator, m_func);
instr->SetSrc1(this->BuildSrcOpnd(R0));
- instr->SetSrc2(this->BuildForInEnumeratorOpnd(C1));
+ instr->SetSrc2(this->BuildForInEnumeratorOpnd(C1, offset));
this->AddInstr(instr, offset);
return;
}
@@ -2997,6 +3148,7 @@ IRBuilder::BuildReg1Unsigned1(Js::OpCode newOpcode, uint offset, Js::RegSlot R0,
dstOpnd->SetValueTypeFixed();
}
}
+
///----------------------------------------------------------------------------
///
/// IRBuilder::BuildReg2Int1
@@ -3174,15 +3326,20 @@ IRBuilder::BuildElementC(Js::OpCode newOpcode, uint32 offset, Js::RegSlot fieldR
PropertyKind propertyKind = PropertyKindData;
IR::SymOpnd * fieldSymOpnd = this->BuildFieldOpnd(newOpcode, fieldRegSlot, propertyId, propertyIdIndex, propertyKind);
IR::RegOpnd * regOpnd;
+ bool reuseLoc = false;
switch (newOpcode)
{
+ case Js::OpCode::DeleteFld_ReuseLoc:
+ newOpcode = Js::OpCode::DeleteFld;
+ reuseLoc = true;
+ // fall through
case Js::OpCode::DeleteFld:
case Js::OpCode::DeleteRootFld:
case Js::OpCode::DeleteFldStrict:
case Js::OpCode::DeleteRootFldStrict:
// Load
- regOpnd = this->BuildDstOpnd(regSlot);
+ regOpnd = this->BuildDstOpnd(regSlot, TyVar, false, reuseLoc);
instr = IR::Instr::New(newOpcode, regOpnd, fieldSymOpnd, m_func);
break;
@@ -3423,6 +3580,29 @@ IRBuilder::BuildElementSlot(Js::OpCode newOpcode, uint32 offset, Js::RegSlot fie
}
break;
+ case Js::OpCode::StPropIdArrFromVar:
+ {
+ IR::RegOpnd * src0Opnd = this->BuildSrcOpnd(fieldRegSlot);
+ IR::RegOpnd * src1Opnd = this->BuildSrcOpnd(regSlot);
+ IntConstType value = slotId;
+ IR::IntConstOpnd * valOpnd = IR::IntConstOpnd::New(value, TyInt32, m_func);
+
+ instr = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, m_func), src1Opnd, m_func);
+ this->AddInstr(instr, offset);
+ offset = Js::Constants::NoByteCodeOffset;
+
+ instr = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, m_func), valOpnd, instr->GetDst(), m_func);
+ this->AddInstr(instr, offset);
+
+ instr = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, m_func), src0Opnd, instr->GetDst(), m_func);
+ this->AddInstr(instr, offset);
+
+ IR::Opnd * firstArg = instr->GetDst();
+ instr = IR::Instr::New(newOpcode, m_func);
+ instr->SetSrc1(firstArg);
+ break;
+ }
+
default:
AssertMsg(UNREACHED, "Unknown ElementSlot opcode");
Fatal();
@@ -3481,6 +3661,7 @@ IRBuilder::BuildElementSlotI1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot r
StackSym * stackFuncPtrSym = nullptr;
SymID symID = m_func->GetJITFunctionBody()->GetLocalClosureReg();
bool isLdSlotThatWasNotProfiled = false;
+ bool reuseLoc = false;
StackSym* closureSym = m_func->GetLocalClosureSym();
uint scopeSlotSize = this->IsParamScopeDone() ? m_func->GetJITFunctionBody()->GetScopeSlotArraySize() : m_func->GetJITFunctionBody()->GetParamScopeSlotArraySize();
@@ -3524,8 +3705,6 @@ IRBuilder::BuildElementSlotI1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot r
if (IsLoopBody())
{
fieldOpnd = this->BuildFieldOpnd(Js::OpCode::LdSlotArr, closureSym->m_id, slotId, (Js::PropertyIdIndexType)-1, PropertyKindSlotArray);
- // Need a dynamic check on the size of the local slot array.
- m_func->GetTopFunc()->AddSlotArrayCheck(fieldOpnd);
}
}
else if (IsLoopBody())
@@ -3547,11 +3726,6 @@ IRBuilder::BuildElementSlotI1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot r
}
this->AddInstr(instr, offset);
- if (!m_func->DoStackFrameDisplay() && IsLoopBody())
- {
- // Need a dynamic check on the size of the local slot array.
- m_func->GetTopFunc()->AddSlotArrayCheck(fieldOpnd);
- }
break;
case Js::OpCode::LdParamObjSlot:
@@ -3629,8 +3803,6 @@ IRBuilder::BuildElementSlotI1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot r
if (IsLoopBody())
{
fieldOpnd = this->BuildFieldOpnd(Js::OpCode::LdSlotArr, closureSym->m_id, slotId, (Js::PropertyIdIndexType)-1, PropertyKindSlotArray);
- // Need a dynamic check on the size of the local slot array.
- m_func->GetTopFunc()->AddSlotArrayCheck(fieldOpnd);
}
}
else
@@ -3650,11 +3822,6 @@ IRBuilder::BuildElementSlotI1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot r
instr->SetSrc2(fieldOpnd);
}
- if (!m_func->DoStackFrameDisplay() && IsLoopBody())
- {
- // Need a dynamic check on the size of the local slot array.
- m_func->GetTopFunc()->AddSlotArrayCheck(fieldOpnd);
- }
break;
case Js::OpCode::StParamObjSlot:
@@ -3692,9 +3859,12 @@ IRBuilder::BuildElementSlotI1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot r
this->AddInstr(instr, offset);
break;
+ case Js::OpCode::LdEnvObj_ReuseLoc:
+ reuseLoc = true;
+ // fall through
case Js::OpCode::LdEnvObj:
fieldOpnd = this->BuildFieldOpnd(Js::OpCode::LdSlotArr, this->GetEnvReg(), slotId, (Js::PropertyIdIndexType)-1, PropertyKindSlotArray);
- regOpnd = this->BuildDstOpnd(regSlot);
+ regOpnd = this->BuildDstOpnd(regSlot, TyVar, false, reuseLoc);
instr = IR::Instr::New(Js::OpCode::LdSlotArr, regOpnd, fieldOpnd, m_func);
this->AddInstr(instr, offset);
@@ -3719,7 +3889,7 @@ IRBuilder::BuildElementSlotI1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot r
if (stackFuncPtrSym)
{
IR::RegOpnd * dataOpnd = IR::RegOpnd::New(TyVar, m_func);
- instr = IR::Instr::New(Js::OpCode::NewScFuncData, dataOpnd, environmentOpnd,
+ instr = IR::Instr::New(Js::OpCode::NewScFuncData, dataOpnd, environmentOpnd,
IR::RegOpnd::New(stackFuncPtrSym, TyVar, m_func), m_func);
this->AddInstr(instr, offset);
instr = IR::Instr::New(newOpcode, regOpnd, functionBodySlotOpnd, dataOpnd, m_func);
@@ -3849,7 +4019,7 @@ IRBuilder::BuildElementSlotI2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot r
fieldSym = PropertySym::New(regOpnd->m_sym, slotId2, (uint32)-1, (uint)-1, PropertyKindSlots, m_func);
fieldOpnd = IR::SymOpnd::New(fieldSym, TyVar, m_func);
-
+
if (newOpcode == Js::OpCode::LdModuleSlot)
{
newOpcode = Js::OpCode::LdSlot;
@@ -3966,13 +4136,8 @@ IRBuilder::BuildElementSlotI2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot r
else
{
fieldOpnd = this->BuildFieldOpnd(Js::OpCode::StSlot, slotId1, slotId2, (Js::PropertyIdIndexType)-1, PropertyKindSlots);
- if (!this->DoSlotArrayCheck(fieldOpnd, IsLoopBody()))
- {
- // Need a dynamic check on the size of the local slot array.
- m_func->GetTopFunc()->AddSlotArrayCheck(fieldOpnd);
- }
}
- newOpcode =
+ newOpcode =
newOpcode == Js::OpCode::StInnerObjSlot || newOpcode == Js::OpCode::StInnerSlot ?
Js::OpCode::StSlot : Js::OpCode::StSlotChkUndecl;
instr = IR::Instr::New(newOpcode, fieldOpnd, regOpnd, m_func);
@@ -4009,11 +4174,6 @@ IRBuilder::BuildElementSlotI2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot r
else
{
fieldOpnd = this->BuildFieldOpnd(Js::OpCode::LdSlot, slotId1, slotId2, (Js::PropertyIdIndexType)-1, PropertyKindSlots);
- if (!this->DoSlotArrayCheck(fieldOpnd, IsLoopBody()))
- {
- // Need a dynamic check on the size of the local slot array.
- m_func->GetTopFunc()->AddSlotArrayCheck(fieldOpnd);
- }
}
regOpnd = this->BuildDstOpnd(regSlot);
instr = IR::Instr::New(Js::OpCode::LdSlot, regOpnd, fieldOpnd, m_func);
@@ -4089,7 +4249,7 @@ IRBuilder::BuildElementSlotI3(Js::OpCode newOpcode, uint32 offset, Js::RegSlot f
IR::Opnd * environmentOpnd = this->BuildSrcOpnd(fieldRegSlot);
IR::Opnd * homeObjOpnd = this->BuildSrcOpnd(homeObj);
regOpnd = this->BuildDstOpnd(regSlot);
-
+
instr = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, m_func), homeObjOpnd, m_func);
this->AddInstr(instr, offset);
@@ -4100,7 +4260,7 @@ IRBuilder::BuildElementSlotI3(Js::OpCode newOpcode, uint32 offset, Js::RegSlot f
this->AddInstr(instr, offset);
instr = IR::Instr::New(newOpcode, regOpnd, instr->GetDst(), m_func);
-
+
if (regOpnd->m_sym->m_isSingleDef)
{
regOpnd->m_sym->m_isSafeThis = true;
@@ -4278,9 +4438,14 @@ IRBuilder::BuildElementP(Js::OpCode newOpcode, uint32 offset, Js::RegSlot regSlo
propertyId = this->m_func->GetJITFunctionBody()->GetPropertyIdFromCacheId(inlineCacheIndex);
Js::RegSlot instance = this->GetEnvRegForEvalCode();
+ bool reuseLoc = false;
switch (newOpcode)
{
+ case Js::OpCode::LdLocalFld_ReuseLoc:
+ reuseLoc = true;
+ newOpcode = Js::OpCode::LdLocalFld;
+ // fall through
case Js::OpCode::LdLocalFld:
if (m_func->GetLocalClosureSym()->HasByteCodeRegSlot())
{
@@ -4295,7 +4460,7 @@ IRBuilder::BuildElementP(Js::OpCode newOpcode, uint32 offset, Js::RegSlot regSlo
{
fieldSymOpnd->AsPropertySymOpnd()->TryDisableRuntimePolymorphicCache();
}
- regOpnd = this->BuildDstOpnd(regSlot);
+ regOpnd = this->BuildDstOpnd(regSlot, TyVar, false, reuseLoc);
instr = nullptr;
if (isProfiled)
@@ -4508,8 +4673,13 @@ IRBuilder::BuildElementCP(Js::OpCode newOpcode, uint32 offset, Js::RegSlot insta
IR::Instr * instr = nullptr;
bool isLdFldThatWasNotProfiled = false;
+ bool reuseLoc = false;
switch (newOpcode)
{
+ case Js::OpCode::LdFld_ReuseLoc:
+ reuseLoc = true;
+ newOpcode = Js::OpCode::LdFld;
+ // fall through
case Js::OpCode::LdFldForTypeOf:
case Js::OpCode::LdFld:
case Js::OpCode::LdLen_A:
@@ -4525,7 +4695,7 @@ IRBuilder::BuildElementCP(Js::OpCode newOpcode, uint32 offset, Js::RegSlot insta
case Js::OpCode::ScopedLdMethodFld:
// Load
// LdMethodFromFlags is backend only. Don't need to be added here.
- regOpnd = this->BuildDstOpnd(regSlot);
+ regOpnd = this->BuildDstOpnd(regSlot, TyVar, false, reuseLoc);
if (isProfiled)
{
@@ -4678,7 +4848,7 @@ IRBuilder::BuildProfiledElementCP(Js::OpCode newOpcode, uint32 offset, Js::RegSl
{
isProfiled = false;
}
-
+
bool wasNotProfiled = false;
IR::Instr *instr = nullptr;
@@ -4816,15 +4986,18 @@ IRBuilder::BuildElementC2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot insta
regOpnd = this->BuildDstOpnd(regSlot);
instr = IR::ProfiledInstr::New(newOpcode, regOpnd, fieldSymOpnd, value2Opnd, m_func);
+ instr->AsProfiledInstr()->u.FldInfo() = *(m_func->GetReadOnlyProfileInfo()->GetFldInfo(propertyIdIndex));
this->AddInstr(instr, offset);
}
break;
case Js::OpCode::ProfiledStSuperFld:
+ case Js::OpCode::ProfiledStSuperFldStrict:
Js::OpCodeUtil::ConvertNonCallOpToNonProfiled(newOpcode);
// fall-through
case Js::OpCode::StSuperFld:
+ case Js::OpCode::StSuperFldStrict:
{
propertyId = m_func->GetJITFunctionBody()->GetPropertyIdFromCacheId(propertyIdIndex);
fieldSymOpnd = this->BuildFieldOpnd(newOpcode, instanceSlot, propertyId, (Js::PropertyIdIndexType) - 1, PropertyKindData, propertyIdIndex);
@@ -4837,7 +5010,7 @@ IRBuilder::BuildElementC2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot insta
value2Opnd = this->BuildSrcOpnd(value2Slot);
instr = IR::ProfiledInstr::New(newOpcode, fieldSymOpnd, regOpnd, value2Opnd, m_func);
-
+ instr->AsProfiledInstr()->u.FldInfo() = *(m_func->GetReadOnlyProfileInfo()->GetFldInfo(propertyIdIndex));
this->AddInstr(instr, offset);
break;
}
@@ -4899,6 +5072,7 @@ IRBuilder::BuildElementU(Js::OpCode newOpcode, uint32 offset, Js::RegSlot instan
IR::RegOpnd * regOpnd;
IR::SymOpnd * fieldSymOpnd;
Js::PropertyId propertyId = m_func->GetJITFunctionBody()->GetReferencedPropertyId(propertyIdIndex);
+ bool reuseLoc = false;
switch (newOpcode)
{
@@ -4946,10 +5120,14 @@ IRBuilder::BuildElementU(Js::OpCode newOpcode, uint32 offset, Js::RegSlot instan
instr = IR::Instr::New(newOpcode, fieldSymOpnd, regOpnd, m_func);
break;
+ case Js::OpCode::DeleteLocalFld_ReuseLoc:
+ newOpcode = Js::OpCode::DeleteLocalFld;
+ reuseLoc = true;
+ // fall through
case Js::OpCode::DeleteLocalFld:
newOpcode = Js::OpCode::DeleteFld;
fieldSymOpnd = BuildFieldOpnd(newOpcode, m_func->GetJITFunctionBody()->GetLocalClosureReg(), propertyId, propertyIdIndex, PropertyKindData);
- regOpnd = BuildDstOpnd(instance);
+ regOpnd = BuildDstOpnd(instance, TyVar, false, reuseLoc);
instr = IR::Instr::New(newOpcode, regOpnd, fieldSymOpnd, m_func);
break;
@@ -6663,48 +6841,6 @@ IRBuilder::BuildCallCommon(IR::Instr * instr, StackSym * symDst, Js::ArgSlot arg
}
}
-///----------------------------------------------------------------------------
-///
-/// IRBuilder::BuildClass
-///
-/// Build IR instr for an InitClass instruction.
-///
-///----------------------------------------------------------------------------
-
-
-template
-void
-IRBuilder::BuildClass(Js::OpCode newOpcode, uint32 offset)
-{
- Assert(!OpCodeAttr::IsProfiledOp(newOpcode));
- Assert(OpCodeAttr::HasMultiSizeLayout(newOpcode));
- auto layout = m_jnReader.GetLayout>();
-
- if (!PHASE_OFF(Js::ClosureRegCheckPhase, m_func))
- {
- this->DoClosureRegCheck(layout->Constructor);
- this->DoClosureRegCheck(layout->Extends);
- }
-
- BuildClass(newOpcode, offset, layout->Constructor, layout->Extends);
-}
-
-void
-IRBuilder::BuildClass(Js::OpCode newOpcode, uint32 offset, Js::RegSlot constructor, Js::RegSlot extends)
-{
- Assert(newOpcode == Js::OpCode::InitClass);
-
- IR::Instr * insn = IR::Instr::New(newOpcode, m_func);
- insn->SetSrc1(this->BuildSrcOpnd(constructor));
-
- if (extends != Js::Constants::NoRegister)
- {
- insn->SetSrc2(this->BuildSrcOpnd(extends));
- }
-
- this->AddInstr(insn, offset);
-}
-
///----------------------------------------------------------------------------
///
@@ -6799,7 +6935,7 @@ IRBuilder::BuildBrReg1Unsigned1(Js::OpCode newOpcode, uint32 offset)
void
IRBuilder::BuildBrBReturn(Js::OpCode newOpcode, uint32 offset, Js::RegSlot DestRegSlot, uint32 forInLoopLevel, uint32 targetOffset)
{
- IR::Opnd *srcOpnd = this->BuildForInEnumeratorOpnd(forInLoopLevel);
+ IR::Opnd *srcOpnd = this->BuildForInEnumeratorOpnd(forInLoopLevel, offset);
IR::RegOpnd * destOpnd = this->BuildDstOpnd(DestRegSlot);
IR::BranchInstr * branchInstr = IR::BranchInstr::New(newOpcode, destOpnd, nullptr, srcOpnd, m_func);
this->AddBranchInstr(branchInstr, offset, targetOffset);
@@ -6960,7 +7096,7 @@ IRBuilder::BuildEmpty(Js::OpCode newOpcode, uint32 offset)
case Js::OpCode::BeginBodyScope:
{
- // This marks the end of a param socpe which is not merged with body scope.
+ // This marks the end of a param scope which is not merged with body scope.
// So we have to first cache the closure so that we can use it to copy the initial values for
// body syms from corresponding param syms (LdParamSlot). Body should get its own scope slot.
Assert(!this->IsParamScopeDone());
@@ -7298,17 +7434,7 @@ void
IRBuilder::BuildBrLocalProperty(Js::OpCode newOpcode, uint32 offset)
{
Assert(!OpCodeAttr::HasMultiSizeLayout(newOpcode));
-
- switch (newOpcode)
- {
- case Js::OpCode::BrOnNoLocalProperty:
- newOpcode = Js::OpCode::BrOnNoProperty;
- break;
-
- default:
- Assert(0);
- break;
- }
+ Assert(newOpcode == Js::OpCode::BrOnHasLocalProperty);
const unaligned Js::OpLayoutBrLocalProperty *branchInsn = m_jnReader.BrLocalProperty();
@@ -7352,7 +7478,8 @@ IRBuilder::BuildBrEnvProperty(Js::OpCode newOpcode, uint32 offset)
fieldSym = PropertySym::New(regOpnd->m_sym, propertyId, branchInsn->PropertyIdIndex, (uint)-1, PropertyKindData, m_func);
fieldOpnd = IR::SymOpnd::New(fieldSym, TyVar, m_func);
- branchInstr = IR::BranchInstr::New(Js::OpCode::BrOnNoProperty, nullptr, fieldOpnd, m_func);
+ Assert(newOpcode == Js::OpCode::BrOnHasEnvProperty || newOpcode == Js::OpCode::BrOnHasLocalEnvProperty);
+ branchInstr = IR::BranchInstr::New(newOpcode == Js::OpCode::BrOnHasEnvProperty ? Js::OpCode::BrOnHasProperty : Js::OpCode::BrOnHasLocalProperty, nullptr, fieldOpnd, m_func);
this->AddBranchInstr(branchInstr, offset, targetOffset);
}
@@ -7711,3 +7838,144 @@ IRBuilder::AllowNativeArrayProfileInfo()
return !((!(m_func->GetTopFunc()->HasTry() && !m_func->GetTopFunc()->DoOptimizeTry()) && m_func->GetWeakFuncRef() && !m_func->HasArrayInfo()) ||
m_func->IsJitInDebugMode());
}
+
+#if DBG_DUMP || defined(ENABLE_IR_VIEWER)
+#define POINTER_OFFSET(opnd, c, field) \
+ m_irBuilder->BuildIndirOpnd((opnd), c, _u(#c) _u(".") _u(#field))
+#else
+#define POINTER_OFFSET(opnd, c, field) \
+ m_irBuilder->BuildIndirOpnd((opnd), c)
+#endif
+
+IRBuilder::GeneratorJumpTable::GeneratorJumpTable(Func* func, IRBuilder* irBuilder) : m_func(func), m_irBuilder(irBuilder) {}
+
+IR::Instr*
+IRBuilder::GeneratorJumpTable::BuildJumpTable()
+{
+ AssertMsg(!this->m_func->IsLoopBody(), "Coroutine Loop Bodies can be jitted but should follow a different path");
+ if (!this->m_func->GetJITFunctionBody()->IsCoroutine())
+ {
+ return this->m_irBuilder->m_lastInstr;
+ }
+
+ // Build code to check if the generator already has state and if it does then jump to the corresponding resume point.
+ // Otherwise jump to the start of the function. The generator object is the first argument by convention established
+ // in JavascriptGenerator::EntryNext/EntryReturn/EntryThrow.
+ // We also create the interpreter stack frame for generator if it doesn't already exist.
+ //
+ // s1 = Ld_A prm1
+ // s2 = Ld_A s1[offset of JavascriptGenerator::frame]
+ // BrNotAddr_A s2 !nullptr $jumpTable
+ //
+ // $createInterpreterStackFrame:
+ // call helper
+ //
+ // Br $startOfFunc
+ //
+ // $jumpTable:
+ //
+ // s3 = Ld_A s2[offset of InterpreterStackFrame::m_reader.m_currentLocation]
+ // s4 = Ld_A s2[offset of InterpreterStackFrame::m_reader.m_startLocation]
+ // s5 = Sub_I4 s3 s4
+ // GeneratorResumeJumpTable s5
+ //
+ // $startOfFunc:
+ //
+
+ // s1 = Ld_A prm1
+ StackSym* genParamSym = StackSym::NewParamSlotSym(1, this->m_func);
+ this->m_func->SetArgOffset(genParamSym, LowererMD::GetFormalParamOffset() * MachPtr);
+
+ IR::SymOpnd* genParamOpnd = IR::SymOpnd::New(genParamSym, TyMachPtr, this->m_func);
+ IR::RegOpnd* genRegOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
+ IR::Instr* instr = IR::Instr::New(Js::OpCode::Ld_A, genRegOpnd, genParamOpnd, this->m_func);
+ this->m_irBuilder->AddInstr(instr, this->m_irBuilder->m_functionStartOffset);
+
+ // s2 = Ld_A s1[offset of JavascriptGenerator::frame]
+ IR::RegOpnd* genFrameOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
+ instr = IR::Instr::New(
+ Js::OpCode::Ld_A,
+ genFrameOpnd,
+ POINTER_OFFSET(genRegOpnd, Js::JavascriptGenerator::GetFrameOffset(), GeneratorFrame),
+ this->m_func
+ );
+ this->m_irBuilder->AddInstr(instr, this->m_irBuilder->m_functionStartOffset);
+
+ IR::LabelInstr* functionBegin = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
+ LABELNAMESET(functionBegin, "GeneratorFunctionBegin");
+
+ IR::LabelInstr* jumpTable = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
+ LABELNAMESET(jumpTable, "GeneratorJumpTable");
+
+ // If there is already a stack frame, generator function has previously begun execution - don't recreate, skip down to jump table
+ // BrNotAddr_A s2 nullptr $jumpTable
+ IR::BranchInstr* skipCreateInterpreterFrame = IR::BranchInstr::New(Js::OpCode::BrNotAddr_A, jumpTable, genFrameOpnd, IR::AddrOpnd::NewNull(this->m_func), this->m_func);
+ this->m_irBuilder->AddInstr(skipCreateInterpreterFrame, this->m_irBuilder->m_functionStartOffset);
+
+ // Create interpreter stack frame
+ IR::Instr* createInterpreterFrame = IR::Instr::New(Js::OpCode::GeneratorCreateInterpreterStackFrame, genFrameOpnd /* dst */, genRegOpnd /* src */, this->m_func);
+ this->m_irBuilder->AddInstr(createInterpreterFrame, this->m_irBuilder->m_functionStartOffset);
+
+ // Having created the frame, skip over the jump table and start executing from the beginning of the function
+ IR::BranchInstr* skipJumpTable = IR::BranchInstr::New(Js::OpCode::Br, functionBegin, this->m_func);
+ this->m_irBuilder->AddInstr(skipJumpTable, this->m_irBuilder->m_functionStartOffset);
+
+ // Label for start of jumpTable - where we look for the correct Yield resume point
+ // $jumpTable:
+ this->m_irBuilder->AddInstr(jumpTable, this->m_irBuilder->m_functionStartOffset);
+
+ // s3 = Ld_A s2[offset of InterpreterStackFrame::m_reader.m_currentLocation]
+ IR::RegOpnd* curLocOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
+ instr = IR::Instr::New(
+ Js::OpCode::Ld_A,
+ curLocOpnd,
+ POINTER_OFFSET(genFrameOpnd, Js::InterpreterStackFrame::GetCurrentLocationOffset(), InterpreterCurrentLocation),
+ this->m_func
+ );
+ this->m_irBuilder->AddInstr(instr, this->m_irBuilder->m_functionStartOffset);
+
+ // s4 = Ld_A s2[offset of InterpreterStackFrame::m_reader.m_startLocation]
+ IR::RegOpnd* startLocOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
+ instr = IR::Instr::New(
+ Js::OpCode::Ld_A,
+ startLocOpnd,
+ POINTER_OFFSET(genFrameOpnd, Js::InterpreterStackFrame::GetStartLocationOffset(), InterpreterStartLocation),
+ this->m_func
+ );
+ this->m_irBuilder->AddInstr(instr, this->m_irBuilder->m_functionStartOffset);
+
+ // s5 = Sub_I4 s3 s4
+ IR::RegOpnd* curOffsetOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
+ instr = IR::Instr::New(Js::OpCode::Sub_I4, curOffsetOpnd, curLocOpnd, startLocOpnd, this->m_func);
+ this->m_irBuilder->AddInstr(instr, this->m_irBuilder->m_functionStartOffset);
+
+ // GeneratorResumeJumpTable s5
+ instr = IR::Instr::New(Js::OpCode::GeneratorResumeJumpTable, this->m_func);
+ instr->SetSrc1(curOffsetOpnd);
+ this->m_irBuilder->AddInstr(instr, this->m_irBuilder->m_functionStartOffset);
+
+ this->m_func->m_bailOutForElidedYieldInsertionPoint = instr;
+
+ this->m_irBuilder->AddInstr(functionBegin, this->m_irBuilder->m_functionStartOffset);
+
+ // Save this value for later use
+ this->m_generatorFrameOpnd = genFrameOpnd;
+ this->m_func->SetGeneratorFrameSym(genFrameOpnd->GetStackSym());
+
+ return this->m_irBuilder->m_lastInstr;
+}
+
+IR::RegOpnd*
+IRBuilder::GeneratorJumpTable::BuildForInEnumeratorArrayOpnd(uint32 offset)
+{
+ Assert(this->m_generatorFrameOpnd != nullptr);
+
+ IR::RegOpnd* forInEnumeratorArrayOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
+ IR::Instr* instr = IR::Instr::New(Js::OpCode::Ld_A, forInEnumeratorArrayOpnd,
+ POINTER_OFFSET(this->m_generatorFrameOpnd, Js::InterpreterStackFrame::GetOffsetOfForInEnumerators(), ForInEnumerators),
+ this->m_func
+ );
+ this->m_irBuilder->AddInstr(instr, offset);
+
+ return forInEnumeratorArrayOpnd;
+}
diff --git a/lib/Backend/IRBuilder.h b/lib/Backend/IRBuilder.h
index c31e036ed9d..1ad9b383be8 100644
--- a/lib/Backend/IRBuilder.h
+++ b/lib/Backend/IRBuilder.h
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#pragma once
@@ -84,6 +85,7 @@ class IRBuilder
#ifdef BYTECODE_BRANCH_ISLAND
, longBranchMap(nullptr)
#endif
+ , m_generatorJumpTable(GeneratorJumpTable(func, this))
{
auto loopCount = func->GetJITFunctionBody()->GetLoopCount();
if (loopCount > 0) {
@@ -121,7 +123,6 @@ class IRBuilder
uint ResolveVirtualLongBranch(IR::BranchInstr * branchInstr, uint offset);
#endif
BranchReloc * CreateRelocRecord(IR::BranchInstr * branchInstr, uint32 offset, uint32 targetOffset);
- void BuildGeneratorPreamble();
void LoadNativeCodeData();
void BuildConstantLoads();
void BuildImplicitArgIns();
@@ -137,11 +138,18 @@ class IRBuilder
void BuildProfiledReg2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot srcRegSlot, Js::ProfileId profileId);
void BuildReg3(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot src1RegSlot,
Js::RegSlot src2RegSlot, Js::ProfileId profileId);
+ void BuildReg3U(Js::OpCode newOpCode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot src1RegSlot,
+ Js::RegSlot src2RegSlot, uint slotIndex);
void BuildIsIn(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot src1RegSlot, Js::RegSlot src2RegSlot, Js::ProfileId profileId);
void BuildReg3C(Js::OpCode newOpCode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot src1RegSlot,
Js::RegSlot src2RegSlot, Js::CacheId inlineCacheIndex);
+ void BuildReg3UC(Js::OpCode newOpCode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot src1RegSlot,
+ Js::RegSlot src2RegSlot, uint slotIndex, Js::CacheId inlineCacheIndex);
void BuildReg4(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot src1RegSlot,
Js::RegSlot src2RegSlot, Js::RegSlot src3RegSlot);
+ void BuildReg4U(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, Js::RegSlot R1, Js::RegSlot R2, Js::RegSlot R3, uint slotIndex);
+ void BuildReg5U(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, Js::RegSlot R1, Js::RegSlot R2, Js::RegSlot R3, Js::RegSlot R4,
+ uint slotIndex);
void BuildReg2B1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot srcRegSlot, byte index);
void BuildReg3B1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot src1RegSlot,
Js::RegSlot src2RegSlot, uint8 index);
@@ -151,6 +159,7 @@ class IRBuilder
void BuildReg1Unsigned1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, int32 C1);
void BuildProfiledReg1Unsigned1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, int32 C1, Js::ProfileId profileId);
void BuildReg2Int1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot dstRegSlot, Js::RegSlot srcRegSlot, int32 value);
+ void BuildReg2U(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, Js::RegSlot R1, uint index);
void BuildElementC(Js::OpCode newOpcode, uint32 offset, Js::RegSlot fieldRegSlot, Js::RegSlot regSlot,
Js::PropertyIdIndexType propertyIdIndex);
void BuildElementScopedC(Js::OpCode newOpcode, uint32 offset, Js::RegSlot regSlot,
@@ -201,7 +210,7 @@ class IRBuilder
Js::ArgSlot argCount, Js::CallIExtendedOptions options, uint32 spreadAuxOffset, Js::CallFlags flags = Js::CallFlags_None);
void BuildCallCommon(IR::Instr *instr, StackSym *symDst, Js::ArgSlot argCount, Js::CallFlags flags = Js::CallFlags_None);
void BuildRegexFromPattern(Js::RegSlot dstRegSlot, uint32 patternIndex, uint32 offset);
- void BuildClass(Js::OpCode newOpcode, uint32 offset, Js::RegSlot constructor, Js::RegSlot extends);
+ void BuildInitClass(uint32 offset, Js::RegSlot constructor, Js::RegSlot proto, IR::Opnd * opndProtoParent, IR::Opnd * opndCtorParent, IR::Opnd * opndEnvironment, uint index);
void BuildBrReg1(Js::OpCode newOpcode, uint32 offset, uint targetOffset, Js::RegSlot srcRegSlot);
void BuildBrReg2(Js::OpCode newOpcode, uint32 offset, uint targetOffset, Js::RegSlot src1RegSlot, Js::RegSlot src2RegSlot);
void BuildBrBReturn(Js::OpCode newOpcode, uint32 offset, Js::RegSlot DestRegSlot, uint32 forInLoopLevel, uint32 targetOffset);
@@ -214,11 +223,11 @@ class IRBuilder
IR::SymOpnd * BuildFieldOpnd(Js::OpCode newOpCode, Js::RegSlot reg, Js::PropertyId propertyId, Js::PropertyIdIndexType propertyIdIndex, PropertyKind propertyKind, uint inlineCacheIndex = -1);
PropertySym * BuildFieldSym(Js::RegSlot reg, Js::PropertyId propertyId, Js::PropertyIdIndexType propertyIdIndex, uint inlineCacheIndex, PropertyKind propertyKind);
SymID BuildSrcStackSymID(Js::RegSlot regSlot);
- IR::RegOpnd * BuildDstOpnd(Js::RegSlot dstRegSlot, IRType type = TyVar, bool isCatchObjectSym = false);
+ IR::RegOpnd * BuildDstOpnd(Js::RegSlot dstRegSlot, IRType type = TyVar, bool isCatchObjectSym = false, bool reuseTemp = false);
IR::RegOpnd * BuildSrcOpnd(Js::RegSlot srcRegSlot, IRType type = TyVar);
IR::AddrOpnd * BuildAuxArrayOpnd(AuxArrayValue auxArrayType, uint32 auxArrayOffset);
IR::Opnd * BuildAuxObjectLiteralTypeRefOpnd(int objectId);
- IR::Opnd * BuildForInEnumeratorOpnd(uint forInLoopLevel);
+ IR::Opnd * BuildForInEnumeratorOpnd(uint forInLoopLevel, uint32 offset);
IR::RegOpnd * EnsureLoopBodyForInEnumeratorArrayOpnd();
private:
uint AddStatementBoundary(uint statementIndex, uint offset);
@@ -247,33 +256,6 @@ class IRBuilder
this->tempMap[tempIndex] = tempId;
}
- BOOL GetTempUsed(Js::RegSlot reg)
- {
- AssertMsg(this->RegIsTemp(reg), "Processing non-temp reg as a temp?");
- AssertMsg(this->fbvTempUsed, "Processing non-temp reg without a used BV?");
-
- Js::RegSlot tempIndex = reg - this->firstTemp;
- AssertOrFailFast(tempIndex < m_func->GetJITFunctionBody()->GetTempCount());
- return this->fbvTempUsed->Test(tempIndex);
- }
-
- void SetTempUsed(Js::RegSlot reg, BOOL used)
- {
- AssertMsg(this->RegIsTemp(reg), "Processing non-temp reg as a temp?");
- AssertMsg(this->fbvTempUsed, "Processing non-temp reg without a used BV?");
-
- Js::RegSlot tempIndex = reg - this->firstTemp;
- AssertOrFailFast(tempIndex < m_func->GetJITFunctionBody()->GetTempCount());
- if (used)
- {
- this->fbvTempUsed->Set(tempIndex);
- }
- else
- {
- this->fbvTempUsed->Clear(tempIndex);
- }
- }
-
BOOL RegIsTemp(Js::RegSlot reg)
{
return reg >= this->firstTemp;
@@ -281,7 +263,7 @@ class IRBuilder
BOOL RegIsConstant(Js::RegSlot reg)
{
- return reg > 0 && reg < m_func->GetJITFunctionBody()->GetConstCount();
+ return this->m_func->GetJITFunctionBody()->RegIsConstant(reg);
}
bool IsParamScopeDone() const { return m_paramScopeDone; }
@@ -292,7 +274,6 @@ class IRBuilder
Js::RegSlot GetEnvRegForEvalCode() const;
Js::RegSlot GetEnvRegForInnerFrameDisplay() const;
void AddEnvOpndForInnerFrameDisplay(IR::Instr *instr, uint offset);
- bool DoSlotArrayCheck(IR::SymOpnd *fieldOpnd, bool doDynamicCheck);
void EmitClosureRangeChecks();
void DoClosureRegCheck(Js::RegSlot reg);
void BuildInitCachedScope(int auxOffset, int offset);
@@ -322,7 +303,7 @@ class IRBuilder
void InsertDoneLoopBodyLoopCounter(uint32 lastOffset);
IR::RegOpnd * InsertConvPrimStr(IR::RegOpnd * srcOpnd, uint offset, bool forcePreOpBailOutIfNeeded);
- IR::Opnd * IRBuilder::GetEnvironmentOperand(uint32 offset);
+ IR::Opnd * GetEnvironmentOperand(uint32 offset);
bool DoLoadInstructionArrayProfileInfo();
bool AllowNativeArrayProfileInfo();
@@ -348,7 +329,6 @@ class IRBuilder
typedef Pair handlerStackElementType;
SList *handlerOffsetStack;
SymID * tempMap;
- BVFixed * fbvTempUsed;
Js::RegSlot firstTemp;
IRBuilderSwitchAdapter m_switchAdapter;
SwitchIRBuilder m_switchBuilder;
@@ -381,4 +361,18 @@ class IRBuilder
LongBranchMap * longBranchMap;
static IR::Instr * const VirtualLongBranchInstr;
#endif
+
+ class GeneratorJumpTable {
+ Func* const m_func;
+ IRBuilder* const m_irBuilder;
+
+ IR::RegOpnd* m_generatorFrameOpnd = nullptr;
+
+ public:
+ GeneratorJumpTable(Func* func, IRBuilder* irBuilder);
+ IR::Instr* BuildJumpTable();
+ IR::RegOpnd* BuildForInEnumeratorArrayOpnd(uint32 offset);
+ };
+
+ GeneratorJumpTable m_generatorJumpTable;
};
diff --git a/lib/Backend/IRBuilderAsmJs.cpp b/lib/Backend/IRBuilderAsmJs.cpp
index 7434dd0b667..5a356036710 100644
--- a/lib/Backend/IRBuilderAsmJs.cpp
+++ b/lib/Backend/IRBuilderAsmJs.cpp
@@ -337,7 +337,7 @@ IRBuilderAsmJs::BuildSrcOpnd(Js::RegSlot srcRegSlot, IRType type)
{
StackSym * symSrc = m_func->m_symTable->FindStackSym(BuildSrcStackSymID(srcRegSlot, type));
AssertMsg(symSrc, "Tried to use an undefined stack slot?");
- IR::RegOpnd * regOpnd = IR::RegOpnd::New(symSrc, type, m_func);
+ IR::RegOpnd * regOpnd = IR::RegOpnd::New(symSrc, type, m_func);
return regOpnd;
}
@@ -5903,7 +5903,6 @@ IRBuilderAsmJs::BuildInt1Uint8x16_1Int1(Js::OpCodeAsmJs newOpcode, uint32 offset
void IRBuilderAsmJs::BuildUint8x16_2Int16(Js::OpCodeAsmJs newOpcode, uint32 offset, BUILD_SIMD_ARGS_REG18)
{
- IR::RegOpnd * dstOpnd = BuildDstOpnd(dstRegSlot, TySimd128U16);
IR::RegOpnd * src1Opnd = BuildSrcOpnd(src1RegSlot, TySimd128U16);
IR::RegOpnd * src2Opnd = BuildIntConstOpnd(src2RegSlot);
@@ -5923,6 +5922,8 @@ void IRBuilderAsmJs::BuildUint8x16_2Int16(Js::OpCodeAsmJs newOpcode, uint32 offs
IR::RegOpnd * src16Opnd = BuildIntConstOpnd(src16RegSlot);
IR::RegOpnd * src17Opnd = BuildIntConstOpnd(src17RegSlot);
+ IR::RegOpnd * dstOpnd = BuildDstOpnd(dstRegSlot, TySimd128U16);
+
IR::Instr * instr = nullptr;
dstOpnd->SetValueType(ValueType::Simd);
src1Opnd->SetValueType(ValueType::Simd);
@@ -5961,9 +5962,9 @@ IRBuilderAsmJs::BuildAsmShuffle(Js::OpCodeAsmJs newOpcode, uint32 offset)
Assert(OpCodeAttrAsmJs::HasMultiSizeLayout(newOpcode) && newOpcode == Js::OpCodeAsmJs::Simd128_Shuffle_V8X16);
auto layout = m_jnReader.GetLayout>();
- IR::RegOpnd * dstOpnd = BuildDstOpnd(GetRegSlotFromSimd128Reg(layout->R0), TySimd128U16);
IR::RegOpnd * src1Opnd = BuildSrcOpnd(GetRegSlotFromSimd128Reg(layout->R1), TySimd128U16);
IR::RegOpnd * src2Opnd = BuildSrcOpnd(GetRegSlotFromSimd128Reg(layout->R2), TySimd128U16);
+ IR::RegOpnd * dstOpnd = BuildDstOpnd(GetRegSlotFromSimd128Reg(layout->R0), TySimd128U16);
dstOpnd->SetValueType(ValueType::Simd);
src1Opnd->SetValueType(ValueType::Simd);
src2Opnd->SetValueType(ValueType::Simd);
@@ -5982,7 +5983,6 @@ IRBuilderAsmJs::BuildAsmShuffle(Js::OpCodeAsmJs newOpcode, uint32 offset)
void IRBuilderAsmJs::BuildUint8x16_3Int16(Js::OpCodeAsmJs newOpcode, uint32 offset, BUILD_SIMD_ARGS_REG19)
{
- IR::RegOpnd * dstOpnd = BuildDstOpnd(dstRegSlot, TySimd128U16);
IR::RegOpnd * src1Opnd = BuildSrcOpnd(src1RegSlot, TySimd128U16);
IR::RegOpnd * src2Opnd = BuildSrcOpnd(src2RegSlot, TySimd128U16);
@@ -6003,6 +6003,8 @@ void IRBuilderAsmJs::BuildUint8x16_3Int16(Js::OpCodeAsmJs newOpcode, uint32 offs
IR::RegOpnd * src17Opnd = BuildIntConstOpnd(src17RegSlot);
IR::RegOpnd * src18Opnd = BuildIntConstOpnd(src18RegSlot);
+ IR::RegOpnd * dstOpnd = BuildDstOpnd(dstRegSlot, TySimd128U16);
+
IR::Instr * instr = nullptr;
dstOpnd->SetValueType(ValueType::Simd);
src1Opnd->SetValueType(ValueType::Simd);
@@ -6746,13 +6748,11 @@ IRBuilderAsmJs::BuildAsmSimdTypedArr(Js::OpCodeAsmJs newOpcode, uint32 offset, u
IRType type = TySimd128F4;
Js::RegSlot valueRegSlot = GetRegSlotFromSimd128Reg(value);
- IR::RegOpnd * maskedOpnd = nullptr;
- IR::Instr * maskInstr = nullptr;
+ IR::RegOpnd * addrOpnd = nullptr;
Js::OpCode op = GetSimdOpcode(newOpcode);
ValueType arrayType;
bool isLd = false, isConst = false;
- uint32 mask = 0;
switch (newOpcode)
{
@@ -6934,7 +6934,6 @@ IRBuilderAsmJs::BuildAsmSimdTypedArr(Js::OpCodeAsmJs newOpcode, uint32 offset, u
{
#define ARRAYBUFFER_VIEW(name, align, RegType, MemType, irSuffix) \
case Js::ArrayBufferView::TYPE_##name: \
- mask = ARRAYBUFFER_VIEW_MASK(align); \
arrayType = ValueType::GetObject(ObjectType::##irSuffix##Array); \
break;
#include "Language/AsmJsArrayBufferViews.h"
@@ -6947,18 +6946,7 @@ IRBuilderAsmJs::BuildAsmSimdTypedArr(Js::OpCodeAsmJs newOpcode, uint32 offset, u
{
Js::RegSlot indexRegSlot = GetRegSlotFromIntReg(slotIndex);
-
- if (mask)
- {
- // AND_I4 index, mask
- maskedOpnd = IR::RegOpnd::New(TyUint32, m_func);
- maskInstr = IR::Instr::New(Js::OpCode::And_I4, maskedOpnd, BuildSrcOpnd(indexRegSlot, TyInt32), IR::IntConstOpnd::New(mask, TyUint32, m_func), m_func);
-
- }
- else
- {
- maskedOpnd = BuildSrcOpnd(indexRegSlot, TyInt32);
- }
+ addrOpnd = BuildSrcOpnd(indexRegSlot, TyInt32);
}
IR::Instr * instr = nullptr;
@@ -6974,11 +6962,11 @@ IRBuilderAsmJs::BuildAsmSimdTypedArr(Js::OpCodeAsmJs newOpcode, uint32 offset, u
regOpnd->SetValueType(ValueType::Simd);
if (!isConst)
{
- Assert(maskedOpnd);
+ Assert(addrOpnd);
// Js::OpCodeAsmJs::Simd128_LdArr_I4:
// Js::OpCodeAsmJs::Simd128_LdArr_F4:
// Js::OpCodeAsmJs::Simd128_LdArr_D2:
- indirOpnd = IR::IndirOpnd::New(baseOpnd, maskedOpnd, type, m_func);
+ indirOpnd = IR::IndirOpnd::New(baseOpnd, addrOpnd, type, m_func);
}
else
{
@@ -6995,11 +6983,11 @@ IRBuilderAsmJs::BuildAsmSimdTypedArr(Js::OpCodeAsmJs newOpcode, uint32 offset, u
regOpnd->SetValueType(ValueType::Simd);
if (!isConst)
{
- Assert(maskedOpnd);
+ Assert(addrOpnd);
// Js::OpCodeAsmJs::Simd128_StArr_I4:
// Js::OpCodeAsmJs::Simd128_StArr_F4:
// Js::OpCodeAsmJs::Simd128_StArr_D2:
- indirOpnd = IR::IndirOpnd::New(baseOpnd, maskedOpnd, type, m_func);
+ indirOpnd = IR::IndirOpnd::New(baseOpnd, addrOpnd, type, m_func);
}
else
{
@@ -7014,10 +7002,6 @@ IRBuilderAsmJs::BuildAsmSimdTypedArr(Js::OpCodeAsmJs newOpcode, uint32 offset, u
Assert(dataWidth >= 4 && dataWidth <= 16);
instr->dataWidth = dataWidth;
indirOpnd->SetOffset(simdOffset);
- if (maskInstr)
- {
- AddInstr(maskInstr, offset);
- }
AddInstr(instr, offset);
}
diff --git a/lib/Backend/Inline.cpp b/lib/Backend/Inline.cpp
index a09ba6bbb4a..7b091bb5808 100644
--- a/lib/Backend/Inline.cpp
+++ b/lib/Backend/Inline.cpp
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#include "Backend.h"
@@ -139,15 +140,6 @@ Inline::Optimize(Func *func, __in_ecount_opt(callerArgOutCount) IR::Instr *calle
JITTimeFunctionBody * body = inlineeData->GetBody();
if (!body)
{
-#ifdef ENABLE_DOM_FAST_PATH
- Assert(inlineeData->GetLocalFunctionId() == Js::JavascriptBuiltInFunction::DOMFastPathGetter ||
- inlineeData->GetLocalFunctionId() == Js::JavascriptBuiltInFunction::DOMFastPathSetter);
- if (PHASE_OFF1(Js::InlineHostCandidatePhase))
- {
- break;
- }
- this->InlineDOMGetterSetterFunction(instr, inlineeData, inlinerData);
-#endif
break;
}
@@ -213,7 +205,7 @@ Inline::Optimize(Func *func, __in_ecount_opt(callerArgOutCount) IR::Instr *calle
{
if (PHASE_ENABLED(InlineCallbacksPhase, func))
{
- callbackDefInstr = TryGetCallbackDefInstr(instr);
+ callbackDefInstr = TryGetCallbackDefInstrForCallInstr(instr);
}
if (callbackDefInstr == nullptr)
@@ -228,7 +220,7 @@ Inline::Optimize(Func *func, __in_ecount_opt(callerArgOutCount) IR::Instr *calle
{
if (PHASE_ENABLED(InlineCallbacksPhase, func))
{
- callbackDefInstr = TryGetCallbackDefInstr(instr);
+ callbackDefInstr = TryGetCallbackDefInstrForCallInstr(instr);
if (callbackDefInstr == nullptr)
{
isPolymorphic = true;
@@ -244,12 +236,12 @@ Inline::Optimize(Func *func, __in_ecount_opt(callerArgOutCount) IR::Instr *calle
{
Js::ProfileId callSiteId = static_cast(callbackDefInstr->AsProfiledInstr()->u.profileId);
inlineeData = callbackDefInstr->m_func->GetWorkItem()->GetJITTimeInfo()->GetCallbackInlinee(callSiteId);
- if (PHASE_TESTTRACE(Js::InlineCallbacksPhase, func) || PHASE_TRACE(Js::InlineCallbacksPhase, func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(_u("INLINING : Inlining callback at\tCallSite: %d\tCaller: %s (%s)\n"),
- callSiteId, inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
- }
+
+#if ENABLE_DEBUG_CONFIG_OPTIONS
+ char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
+ INLINE_CALLBACKS_TRACE(_u("INLINING : Inlining callback at\tCallSite: %d\tCaller: %s (%s)\n"),
+ callSiteId, inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
+#endif
}
if (isPolymorphic)
@@ -403,7 +395,7 @@ Inline::Optimize(Func *func, __in_ecount_opt(callerArgOutCount) IR::Instr *calle
instrNext = builtInInlineCandidateOpCode != 0 ?
- this->InlineBuiltInFunction(instr, inlineeData, builtInInlineCandidateOpCode, inlinerData, symThis, &isInlined, profileId, recursiveInlineDepth) :
+ this->InlineBuiltInFunction(instr, inlineeData, builtInInlineCandidateOpCode, inlinerData, symThis, &isInlined, profileId, recursiveInlineDepth, instr) :
this->InlineScriptFunction(instr, inlineeData, symThis, profileId, &isInlined, callbackDefInstr, recursiveInlineDepth);
if (!isInlined && hasDstUsedBuiltInReturnType)
{
@@ -1010,20 +1002,58 @@ Inline::InlinePolymorphicFunctionUsingFixedMethods(IR::Instr *callInstr, const F
return instrNext;
}
-IR::Instr * Inline::TryGetCallbackDefInstr(IR::Instr * callInstr)
+IR::RegOpnd * Inline::GetCallbackFunctionOpnd(IR::Instr * callInstr)
+{
+ IR::Instr * callApplyLdInstr = callInstr->GetSrc1()->GetStackSym()->GetInstrDef();
+ IR::Instr * targetDefInstr = callApplyLdInstr->GetSrc1()->AsPropertySymOpnd()->GetObjectSym()->GetInstrDef();
+ return targetDefInstr->GetDst()->AsRegOpnd();
+}
+
+IR::Instr * Inline::TryGetCallbackDefInstrForCallInstr(IR::Instr * callInstr)
{
// Try to find a function argument that could be inlined.
- IR::Instr * defInstr = callInstr;
- StackSym * linkSym = callInstr->GetSrc1()->GetStackSym();
- Assert(linkSym != nullptr);
+ StackSym * callbackSym = callInstr->GetSrc1()->GetStackSym();
+ Assert(callbackSym != nullptr);
+ return TryGetCallbackDefInstr(callbackSym);
+}
- Inline * currFrame = this;
+IR::Instr * Inline::TryGetCallbackDefInstrForCallApplyTarget(IR::Instr * callApplyLdInstr)
+{
+ // Try to find a function argument that could be inlined.
+ if (!callApplyLdInstr->GetSrc1()->IsSymOpnd() || !callApplyLdInstr->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
+ {
+ return nullptr;
+ }
+ StackSym * callbackSym = callApplyLdInstr->GetSrc1()->AsPropertySymOpnd()->GetObjectSym();
+ Assert(callbackSym != nullptr);
+ return TryGetCallbackDefInstr(callbackSym);
+}
+
+IR::Instr * Inline::TryGetCallbackDefInstrForCallInstanceFunction(IR::Instr * callInstr)
+{
+ IR::Instr * argImplicitThisInstr = nullptr;
+ IR::Instr * argFunction = nullptr;
+
+ callInstr->IterateArgInstrs([&](IR::Instr* argInstr) {
+ argFunction = argImplicitThisInstr;
+ argImplicitThisInstr = argInstr;
+ return false;
+ });
+
+ StackSym * callbackSym = argFunction->GetSrc1()->GetStackSym();
+ Assert(callbackSym != nullptr);
+ return TryGetCallbackDefInstr(callbackSym);
+}
- while (linkSym->m_isSingleDef)
+IR::Instr * Inline::TryGetCallbackDefInstr(StackSym * callbackSym)
+{
+ Inline * currFrame = this;
+ IR::Instr * defInstr = nullptr;
+ while (callbackSym->m_isSingleDef)
{
- if (linkSym->m_instrDef != nullptr)
+ if (callbackSym->m_instrDef != nullptr)
{
- defInstr = linkSym->m_instrDef;
+ defInstr = callbackSym->m_instrDef;
}
else
{
@@ -1037,7 +1067,7 @@ IR::Instr * Inline::TryGetCallbackDefInstr(IR::Instr * callInstr)
Assert(callingInstr != nullptr && callingInstr->IsProfiledInstr());
Js::ProfileId callSiteId = static_cast(callingInstr->AsProfiledInstr()->u.profileId);
- Js::ArgSlot argIndex = linkSym->GetParamSlotNum() - 1;
+ Js::ArgSlot argIndex = callbackSym->GetParamSlotNum() - 1;
Func * callingFunc = callingInstr->m_func;
if (!callingFunc->GetReadOnlyProfileInfo()->CanInlineCallback(argIndex, callSiteId))
@@ -1052,7 +1082,7 @@ IR::Instr * Inline::TryGetCallbackDefInstr(IR::Instr * callInstr)
defInstr = nullptr;
- // find the appropraite argOut from the call site.
+ // find the appropriate argOut from the call site.
callingInstr->IterateArgInstrs([&](IR::Instr* argInstr) {
StackSym *argSym = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
if (argSym->GetArgSlotNum() - 1 == argIndex)
@@ -1074,8 +1104,8 @@ IR::Instr * Inline::TryGetCallbackDefInstr(IR::Instr * callInstr)
return nullptr;
}
- linkSym = linkOpnd->GetStackSym();
- if (linkSym == nullptr)
+ callbackSym = linkOpnd->GetStackSym();
+ if (callbackSym == nullptr)
{
return nullptr;
}
@@ -1303,6 +1333,7 @@ void Inline::InsertOneInlinee(IR::Instr* callInstr, IR::RegOpnd* returnValueOpnd
else
{
currentCallInstr = IR::Instr::New(callInstr->m_opcode, callInstr->m_func);
+ currentCallInstr->SetByteCodeOffset(callInstr);
currentCallInstr->SetSrc1(methodOpnd);
if (returnValueOpnd)
{
@@ -1326,12 +1357,13 @@ void Inline::InsertOneInlinee(IR::Instr* callInstr, IR::RegOpnd* returnValueOpnd
Js::ArgSlot actualCount = MapActuals(currentCallInstr, argOuts, Js::InlineeCallInfo::MaxInlineeArgoutCount, inlinee, (Js::ProfileId)callInstr->AsProfiledInstr()->u.profileId, &stackArgsArgOutExpanded);
Assert(actualCount > 0);
MapFormals(inlinee, argOuts, funcBody->GetInParamsCount(), actualCount, returnValueOpnd, currentCallInstr->GetSrc1(), symCallerThis, stackArgsArgOutExpanded, fixedFunctionSafeThis, argOuts);
+ inlinee->SetInlineeStart(currentCallInstr);
currentCallInstr->m_func = inlinee;
// Put the meta arguments that the stack walker expects to find on the stack.
// As all the argouts are shared among the inlinees, do this only once.
SetupInlineeFrame(inlinee, currentCallInstr, actualCount, currentCallInstr->GetSrc1());
-
+
IR::Instr* inlineeEndInstr = IR::Instr::New(Js::OpCode::InlineeEnd, inlinee);
inlineeEndInstr->SetByteCodeOffset(inlinee->m_tailInstr->GetPrevRealInstr());
inlineeEndInstr->SetSrc1(IR::IntConstOpnd::New(actualCount + Js::Constants::InlineeMetaArgCount, TyInt32, inlinee));
@@ -2006,7 +2038,16 @@ Inline::TryOptimizeInstrWithFixedDataProperty(IR::Instr *&instr)
// dstC = MOVSD s1(XMM0)
IR::Instr *
-Inline::InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo * inlineeData, Js::OpCode inlineCallOpCode, const FunctionJITTimeInfo * inlinerData, const StackSym *symCallerThis, bool* pIsInlined, uint profileId, uint recursiveInlineDepth)
+Inline::InlineBuiltInFunction(
+ IR::Instr *callInstr,
+ const FunctionJITTimeInfo * inlineeData,
+ Js::OpCode inlineCallOpCode,
+ const FunctionJITTimeInfo * inlinerData,
+ const StackSym *symCallerThis,
+ bool* pIsInlined,
+ uint profileId,
+ uint recursiveInlineDepth,
+ IR::Instr * funcObjCheckInsertInstr)
{
Assert(callInstr);
Assert(inlinerData);
@@ -2017,156 +2058,30 @@ Inline::InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo *
// Inlining is profile-based, so get the built-in function from profile rather than from the callInstr's opnd.
Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInForFuncInfo(inlineeData->GetLocalFunctionId());
-
-#if defined(DBG_DUMP) || defined(ENABLE_DEBUG_CONFIG_OPTIONS)
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
-#endif
- if(inlineCallOpCode == Js::OpCode::InlineMathFloor || inlineCallOpCode == Js::OpCode::InlineMathCeil || inlineCallOpCode == Js::OpCode::InlineMathRound)
- {
-#if defined(_M_IX86) || defined(_M_X64)
- if (!AutoSystemInfo::Data.SSE4_1Available())
- {
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: SSE4.1 not available\tInlinee: %s (#%d)\tCaller: %s\n"), Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId, inlinerData->GetBody()->GetDisplayName());
- return callInstr->m_next;
- }
-#endif
- if(callInstr->m_func->GetTopFunc()->HasProfileInfo() && callInstr->m_func->GetTopFunc()->GetReadOnlyProfileInfo()->IsFloorInliningDisabled())
- {
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: Floor Inlining Disabled\tInlinee: %s (#%d)\tCaller: %s\n"), Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId, inlinerData->GetBody()->GetDisplayName());
- return callInstr->m_next;
- }
- }
-
- if (callInstr->GetSrc2() &&
- callInstr->GetSrc2()->IsSymOpnd() &&
- callInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum() > Js::InlineeCallInfo::MaxInlineeArgoutCount)
+ if (!CanInlineBuiltInFunction(callInstr, inlineeData, inlineCallOpCode, inlinerData, builtInId, false))
{
- // This is a hard limit as we only use 4 bits to encode the actual count in the InlineeCallInfo. Although
- // InliningDecider already checks for this, the check is against profile data that may not be accurate since profile
- // data matching does not take into account some types of changes to source code. Need to check this again with current
- // information.
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: ArgSlot > MaxInlineeArgoutCount\tInlinee: %s (#%d)\tArgSlotNum: %d\tMaxInlineeArgoutCount: %d\tCaller: %s (#%d)\n"),
- Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId, callInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum(),
- Js::InlineeCallInfo::MaxInlineeArgoutCount, inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
return callInstr->m_next;
}
Js::BuiltInFlags builtInFlags = Js::JavascriptLibrary::GetFlagsForBuiltIn(builtInId);
-
- bool isAnyArgFloat = (builtInFlags & Js::BuiltInFlags::BIF_TypeSpecAllToFloat) != 0;
- if (isAnyArgFloat && !GlobOpt::DoFloatTypeSpec(this->topFunc))
- {
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: float type spec is off\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
- Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
- inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
- return callInstr->m_next;
- }
-
- bool canDstBeFloat = (builtInFlags & Js::BuiltInFlags::BIF_TypeSpecDstToFloat) != 0;
- if (canDstBeFloat && !Js::JavascriptLibrary::CanFloatPreferenceFunc(builtInId) && inlineCallOpCode != Js::OpCode::InlineArrayPop)
- {
- // Note that for Math.abs that means that even though it can potentially be type-spec'd to int, we won't inline it.
- // Some built-in functions, such as atan2, are disabled for float-pref.
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: Cannot float-type-spec the inlinee\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
- Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId, // Get the _value (cause operator _E) to avoid using struct directly.
- inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
- return callInstr->m_next;
- }
-
- bool isAnyArgInt = (builtInFlags & (Js::BuiltInFlags::BIF_TypeSpecDstToInt | Js::BuiltInFlags::BIF_TypeSpecSrc1ToInt | Js::BuiltInFlags::BIF_TypeSpecSrc2ToInt)) != 0;
- if (isAnyArgInt && !GlobOpt::DoAggressiveIntTypeSpec(this->topFunc))
- {
- // Note that for Math.abs that means that even though it can potentially be type-spec'd to float, we won't inline it.
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: int type spec is off\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
- Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
- inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
- return callInstr->m_next;
- }
-
- if(inlineCallOpCode == Js::OpCode::InlineMathImul && !GlobOpt::DoLossyIntTypeSpec(topFunc))
- {
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: lossy int type spec is off, it's required for Math.imul to do | 0 on src opnds\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
- Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
- inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
- return callInstr->m_next;
- }
-
- if(inlineCallOpCode == Js::OpCode::InlineMathClz && !GlobOpt::DoLossyIntTypeSpec(topFunc))
- {
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: lossy int type spec is off, it's required for Math.clz32 to do | 0 on src opnds\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
- Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
- inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
- return callInstr->m_next;
- }
-
- if (inlineCallOpCode == Js::OpCode::InlineFunctionApply && (!callInstr->m_func->GetHasStackArgs() || this->topFunc->GetJITFunctionBody()->IsInlineApplyDisabled()))
- {
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: stack args of inlining is off\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
- Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
- inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
- return callInstr->m_next;
- }
-
- // TODO: when adding support for other type spec args (array, string) do appropriate check as well.
-
- Assert(callInstr->GetSrc1());
- Assert(callInstr->GetSrc1()->IsRegOpnd());
- Assert(callInstr->GetSrc1()->AsRegOpnd()->m_sym);
-
- if (!(builtInFlags & Js::BuiltInFlags::BIF_IgnoreDst) && callInstr->GetDst() == nullptr && inlineCallOpCode != Js::OpCode::InlineArrayPop)
- {
- // Is seems that it's not worth optimizing odd cases where the result is unused.
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: inlinee's return value is not assigned to anything\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
- Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
- inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
- return callInstr->m_next;
- }
-
- // Number of arguments, not including "this".
- IntConstType requiredInlineCallArgCount = (IntConstType)Js::JavascriptLibrary::GetArgCForBuiltIn(builtInId);
-
- IR::Opnd* linkOpnd = callInstr->GetSrc2();
+ IR::Opnd * linkOpnd = callInstr->GetSrc2();
Js::ArgSlot actualCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
- // Check for missing actuals:
- // if number of passed params to built-in function is not what it needs, don't inline.
bool usesThisArgument = (builtInFlags & Js::BuiltInFlags::BIF_UseSrc0) != 0;
int inlineCallArgCount = (int)(usesThisArgument ? actualCount : actualCount - 1);
- Assert(inlineCallArgCount >= 0);
-
- if (linkOpnd->IsSymOpnd())
- {
- if((builtInFlags & Js::BuiltInFlags::BIF_VariableArgsNumber) != 0)
- {
- if(inlineCallArgCount > requiredInlineCallArgCount)
- {
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: parameter count exceeds the maximum number of parameters allowed\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
- Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
- inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
- return callInstr->m_next;
- }
- }
- else if(inlineCallArgCount != requiredInlineCallArgCount)
- {
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: parameter count doesn't match dynamic profile\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
- Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
- inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
- return callInstr->m_next;
- }
- }
IR::Instr *inlineBuiltInEndInstr = nullptr;
- if (inlineCallOpCode == Js::OpCode::InlineFunctionApply)
+ if (inlineCallOpCode == Js::OpCode::InlineFunctionApply)
{
return InlineApply(callInstr, inlineeData, inlinerData, symCallerThis, pIsInlined, profileId, recursiveInlineDepth, inlineCallArgCount - (usesThisArgument ? 1 : 0));
}
- if (inlineCallOpCode == Js::OpCode::InlineFunctionCall)
+ if (inlineCallOpCode == Js::OpCode::InlineFunctionCall || inlineCallOpCode == Js::OpCode::InlineCallInstanceFunction)
{
- return InlineCall(callInstr, inlineeData, inlinerData, symCallerThis, pIsInlined, profileId, recursiveInlineDepth);
+ const bool isCallInstanceFunction = (inlineCallOpCode == Js::OpCode::InlineCallInstanceFunction);
+ return InlineCall(callInstr, inlineeData, inlinerData, symCallerThis, pIsInlined, profileId, recursiveInlineDepth, isCallInstanceFunction);
}
-
#if defined(ENABLE_DEBUG_CONFIG_OPTIONS)
TraceInlining(inlinerData, Js::JavascriptLibrary::GetNameForBuiltIn(builtInId),
nullptr, 0, this->topFunc->GetWorkItem()->GetJITTimeInfo(), 0, nullptr, profileId, callInstr->m_func->GetTopFunc()->IsLoopBody(), builtInId);
@@ -2180,10 +2095,10 @@ Inline::InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo *
StackSym* originalCallTargetStackSym = callInstr->GetSrc1()->GetStackSym();
bool originalCallTargetOpndIsJITOpt = callInstr->GetSrc1()->GetIsJITOptimizedReg();
- IR::ByteCodeUsesInstr* useCallTargetInstr = EmitFixedMethodOrFunctionObjectChecksForBuiltIns(callInstr, callInstr, inlineeData, false, true, false, true);
+ IR::ByteCodeUsesInstr* useCallTargetInstr = EmitFixedMethodOrFunctionObjectChecksForBuiltIns(callInstr, funcObjCheckInsertInstr, inlineeData, false, true, false, true);
// To push function object for cases when we have to make calls to helper method to assist in inlining
- if(inlineCallOpCode == Js::OpCode::CallDirect)
+ if (inlineCallOpCode == Js::OpCode::CallDirect)
{
IR::Instr* argoutInstr;
StackSym *dstSym = callInstr->m_func->m_symTable->GetArgSlotSym((uint16)(1));
@@ -2210,7 +2125,7 @@ Inline::InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo *
// InlineArrayPop - TrackCalls Need to be done at InlineArrayPop and not at the InlineBuiltInEnd
// Hence we use a new opcode, to detect that it is an InlineArrayPop and we don't track the call during End of inlineBuiltInCall sequence
- if(inlineCallOpCode == Js::OpCode::InlineArrayPop)
+ if (inlineCallOpCode == Js::OpCode::InlineArrayPop)
{
inlineBuiltInEndInstr->m_opcode = Js::OpCode::InlineNonTrackingBuiltInEnd;
}
@@ -2222,11 +2137,11 @@ Inline::InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo *
callInstr->InsertBefore(useCallTargetInstr);
}
- if(Js::JavascriptLibrary::IsTypeSpecRequired(builtInFlags))
+ if (Js::JavascriptLibrary::IsTypeSpecRequired(builtInFlags))
{
// Emit byteCodeUses for function object
IR::Instr * inlineBuiltInStartInstr = inlineBuiltInEndInstr;
- while(inlineBuiltInStartInstr->m_opcode != Js::OpCode::InlineBuiltInStart)
+ while (inlineBuiltInStartInstr->m_opcode != Js::OpCode::InlineBuiltInStart)
{
inlineBuiltInStartInstr = inlineBuiltInStartInstr->m_prev;
}
@@ -2234,7 +2149,7 @@ Inline::InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo *
IR::Opnd * tmpDst = nullptr;
IR::Opnd * callInstrDst = callInstr->GetDst();
- if(callInstrDst && inlineCallOpCode != Js::OpCode::InlineArrayPop)
+ if (callInstrDst && inlineCallOpCode != Js::OpCode::InlineArrayPop)
{
StackSym * tmpSym = StackSym::New(callInstr->GetDst()->GetType(), callInstr->m_func);
tmpDst = IR::RegOpnd::New(tmpSym, tmpSym->GetType(), callInstr->m_func);
@@ -2253,16 +2168,16 @@ Inline::InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo *
useCallTargetInstr = IR::ByteCodeUsesInstr::New(callInstr->GetPrevRealInstrOrLabel());
useCallTargetInstr->SetRemovedOpndSymbol(originalCallTargetOpndIsJITOpt, originalCallTargetStackSym->m_id);
- if(inlineCallOpCode == Js::OpCode::InlineArrayPop)
+ if (inlineCallOpCode == Js::OpCode::InlineArrayPop)
{
- callInstr->InsertBefore(useCallTargetInstr);
+ callInstr->InsertBefore(useCallTargetInstr);
}
else
{
inlineBuiltInEndInstr->InsertBefore(useCallTargetInstr);
}
- if(tmpDst)
+ if (tmpDst)
{
IR::Instr * ldInstr = IR::Instr::New(Func::GetLoadOpForType(callInstrDst->GetType()), callInstrDst, tmpDst, callInstr->m_func);
inlineBuiltInEndInstr->InsertBefore(ldInstr);
@@ -2288,9 +2203,9 @@ Inline::InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo *
if (OpCodeAttr::BailOutRec(inlineCallOpCode))
{
StackSym * sym = argInstr->GetSrc1()->GetStackSym();
- if (!sym->m_isSingleDef || !sym->m_instrDef->GetSrc1() || !sym->m_instrDef->GetSrc1()->IsConstOpnd())
+ if (sym->HasByteCodeRegSlot() && (!sym->m_isSingleDef || !sym->m_instrDef->GetSrc1() || !sym->m_instrDef->GetSrc1()->IsConstOpnd()))
{
- if (!sym->IsFromByteCodeConstantTable())
+ if (!sym->IsFromByteCodeConstantTable() && sym->GetByteCodeRegSlot() != callInstrDst->GetStackSym()->GetByteCodeRegSlot())
{
byteCodeUsesInstr->Set(argInstr->GetSrc1());
}
@@ -2323,7 +2238,7 @@ Inline::InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo *
return false;
});
- if(inlineCallOpCode == Js::OpCode::InlineMathImul || inlineCallOpCode == Js::OpCode::InlineMathClz)
+ if (inlineCallOpCode == Js::OpCode::InlineMathImul || inlineCallOpCode == Js::OpCode::InlineMathClz)
{
// Convert:
// s1 = InlineMathImul s2, s3
@@ -2359,7 +2274,7 @@ Inline::InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo *
}
}
- if(OpCodeAttr::BailOutRec(inlineCallOpCode))
+ if (OpCodeAttr::BailOutRec(inlineCallOpCode))
{
inlineBuiltInEndInstr->InsertBefore(byteCodeUsesInstr);
}
@@ -2371,7 +2286,7 @@ Inline::InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo *
// and not adjust the stack height on x86
linkOpnd->AsRegOpnd()->m_sym->m_isInlinedArgSlot = true;
- if(OpCodeAttr::BailOutRec(inlineCallOpCode))
+ if (OpCodeAttr::BailOutRec(inlineCallOpCode))
{
callInstr = callInstr->ConvertToBailOutInstr(callInstr, IR::BailOutOnFloor);
}
@@ -2379,81 +2294,229 @@ Inline::InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo *
return inlineBuiltInEndInstr->m_next;
}
-IR::Instr* Inline::InsertInlineeBuiltInStartEndTags(IR::Instr* callInstr, uint actualCount, IR::Instr** builtinStartInstr)
+bool
+Inline::CanInlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo * inlineeData, Js::OpCode inlineCallOpCode, const FunctionJITTimeInfo * inlinerData, Js::BuiltinFunction builtInId, bool isCallApplyTarget)
{
- IR::Instr* inlineBuiltInStartInstr = IR::Instr::New(Js::OpCode::InlineBuiltInStart, callInstr->m_func);
- inlineBuiltInStartInstr->SetSrc1(callInstr->GetSrc1());
- inlineBuiltInStartInstr->SetSrc2(callInstr->GetSrc2());
- inlineBuiltInStartInstr->SetByteCodeOffset(callInstr);
- callInstr->InsertBefore(inlineBuiltInStartInstr);
- if (builtinStartInstr)
+#if defined(DBG_DUMP) || defined(ENABLE_DEBUG_CONFIG_OPTIONS)
+ char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
+#endif
+ if (inlineCallOpCode == 0)
{
- *builtinStartInstr = inlineBuiltInStartInstr;
+ return false;
}
- IR::Instr* inlineBuiltInEndInstr = IR::Instr::New(Js::OpCode::InlineBuiltInEnd, callInstr->m_func);
- inlineBuiltInEndInstr->SetSrc1(IR::IntConstOpnd::New(actualCount, TyInt32, callInstr->m_func));
- inlineBuiltInEndInstr->SetSrc2(callInstr->GetSrc2());
- inlineBuiltInEndInstr->SetByteCodeOffset(callInstr);
- callInstr->InsertAfter(inlineBuiltInEndInstr);
- return inlineBuiltInEndInstr;
-}
-
-IR::Instr* Inline::GetDefInstr(IR::Opnd* linkOpnd)
-{
- StackSym *linkSym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
- Assert(linkSym->m_isSingleDef);
- Assert(linkSym->IsArgSlotSym());
-
- return linkSym->m_instrDef;
-}
-
-IR::Instr* Inline::InlineApply(IR::Instr *callInstr, const FunctionJITTimeInfo *applyData, const FunctionJITTimeInfo * inlinerData, const StackSym *symCallerThis, bool* pIsInlined, uint callSiteId, uint recursiveInlineDepth, uint argsCount)
-{
- // We may still decide not to inline.
- *pIsInlined = false;
-
- IR::Instr* instrNext = callInstr->m_next;
- if (argsCount == 0)
+ if (inlineCallOpCode == Js::OpCode::InlineMathFloor || inlineCallOpCode == Js::OpCode::InlineMathCeil || inlineCallOpCode == Js::OpCode::InlineMathRound)
{
- return instrNext;
+#if defined(_M_IX86) || defined(_M_X64)
+ if (!AutoSystemInfo::Data.SSE4_1Available())
+ {
+ INLINE_TESTTRACE(_u("INLINING: Skip Inline: SSE4.1 not available\tInlinee: %s (#%d)\tCaller: %s\n"), Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId, inlinerData->GetBody()->GetDisplayName());
+ return false;
+ }
+#endif
+ if (callInstr->m_func->GetTopFunc()->HasProfileInfo() && callInstr->m_func->GetTopFunc()->GetReadOnlyProfileInfo()->IsFloorInliningDisabled())
+ {
+ INLINE_TESTTRACE(_u("INLINING: Skip Inline: Floor Inlining Disabled\tInlinee: %s (#%d)\tCaller: %s\n"), Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId, inlinerData->GetBody()->GetDisplayName());
+ return false;
+ }
}
- Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInForFuncInfo(applyData->GetLocalFunctionId());
- const FunctionJITTimeInfo * inlineeData = nullptr;
-
- IR::Instr* arrayArgInstr = nullptr;
- IR::Opnd *arrayArgOpnd = nullptr;
- if (argsCount == 2) // apply was called with 2 arguments, most common case
+ if (callInstr->GetSrc2() &&
+ callInstr->GetSrc2()->IsSymOpnd() &&
+ callInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum() > Js::InlineeCallInfo::MaxInlineeArgoutCount)
{
- IR::SymOpnd* linkOpnd = callInstr->GetSrc2()->AsSymOpnd();
- StackSym *arrayArgsym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
- Assert(arrayArgsym->m_isSingleDef);
- Assert(arrayArgsym->IsArgSlotSym());
-
- arrayArgInstr = arrayArgsym->m_instrDef;
- arrayArgOpnd = arrayArgInstr->GetSrc1();
+ // This is a hard limit as we only use 4 bits to encode the actual count in the InlineeCallInfo. Although
+ // InliningDecider already checks for this, the check is against profile data that may not be accurate since profile
+ // data matching does not take into account some types of changes to source code. Need to check this again with current
+ // information.
+ INLINE_TESTTRACE(_u("INLINING: Skip Inline: ArgSlot > MaxInlineeArgoutCount\tInlinee: %s (#%d)\tArgSlotNum: %d\tMaxInlineeArgoutCount: %d\tCaller: %s (#%d)\n"),
+ Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId, callInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum(),
+ Js::InlineeCallInfo::MaxInlineeArgoutCount, inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
+ return false;
}
- // if isArrayOpndArgumentsObject == false, the array opnd can still be the arguments object; we just can't say that for sure
- bool isArrayOpndArgumentsObject = arrayArgOpnd && arrayArgOpnd->IsArgumentsObject();
+ Js::BuiltInFlags builtInFlags = Js::JavascriptLibrary::GetFlagsForBuiltIn(builtInId);
- IR::Instr * returnInstr = nullptr;
- if (!PHASE_OFF(Js::InlineApplyTargetPhase, this->topFunc))
+ bool isAnyArgFloat = (builtInFlags & Js::BuiltInFlags::BIF_TypeSpecAllToFloat) != 0;
+ if (isAnyArgFloat && !GlobOpt::DoFloatTypeSpec(this->topFunc))
{
- if (InlineApplyScriptTarget(callInstr, inlinerData, &inlineeData, applyData, symCallerThis, &returnInstr, recursiveInlineDepth, isArrayOpndArgumentsObject, argsCount))
- {
- *pIsInlined = true;
- Assert(returnInstr);
- return returnInstr;
- }
+ INLINE_TESTTRACE(_u("INLINING: Skip Inline: float type spec is off\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
+ Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
+ inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
+ return false;
}
-#if defined(ENABLE_DEBUG_CONFIG_OPTIONS)
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
-#endif
+ bool canDstBeFloat = (builtInFlags & Js::BuiltInFlags::BIF_TypeSpecDstToFloat) != 0;
+ if (canDstBeFloat && !Js::JavascriptLibrary::CanFloatPreferenceFunc(builtInId) && inlineCallOpCode != Js::OpCode::InlineArrayPop)
+ {
+ // Note that for Math.abs that means that even though it can potentially be type-spec'd to int, we won't inline it.
+ // Some built-in functions, such as atan2, are disabled for float-pref.
+ INLINE_TESTTRACE(_u("INLINING: Skip Inline: Cannot float-type-spec the inlinee\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
+ Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId, // Get the _value (cause operator _E) to avoid using struct directly.
+ inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
+ return false;
+ }
- if (argsCount == 1) // apply called with just 1 argument, the 'this' object.
+ bool isAnyArgInt = (builtInFlags & (Js::BuiltInFlags::BIF_TypeSpecDstToInt | Js::BuiltInFlags::BIF_TypeSpecSrc1ToInt | Js::BuiltInFlags::BIF_TypeSpecSrc2ToInt)) != 0;
+ if (isAnyArgInt && !GlobOpt::DoAggressiveIntTypeSpec(this->topFunc))
+ {
+ // Note that for Math.abs that means that even though it can potentially be type-spec'd to float, we won't inline it.
+ INLINE_TESTTRACE(_u("INLINING: Skip Inline: int type spec is off\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
+ Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
+ inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
+ return false;
+ }
+
+ if (inlineCallOpCode == Js::OpCode::InlineMathImul && !GlobOpt::DoLossyIntTypeSpec(topFunc))
+ {
+ INLINE_TESTTRACE(_u("INLINING: Skip Inline: lossy int type spec is off, it's required for Math.imul to do | 0 on src opnds\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
+ Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
+ inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
+ return false;
+ }
+
+ if (inlineCallOpCode == Js::OpCode::InlineMathClz && !GlobOpt::DoLossyIntTypeSpec(topFunc))
+ {
+ INLINE_TESTTRACE(_u("INLINING: Skip Inline: lossy int type spec is off, it's required for Math.clz32 to do | 0 on src opnds\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
+ Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
+ inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
+ return false;
+ }
+
+ if (inlineCallOpCode == Js::OpCode::InlineFunctionApply && (!callInstr->m_func->GetHasStackArgs() || this->topFunc->GetJITFunctionBody()->IsInlineApplyDisabled()))
+ {
+ INLINE_TESTTRACE(_u("INLINING: Skip Inline: stack args of inlining is off\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
+ Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
+ inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
+ return false;
+ }
+
+ // TODO: when adding support for other type spec args (array, string) do appropriate check as well.
+
+ Assert(callInstr->GetSrc1());
+ Assert(callInstr->GetSrc1()->IsRegOpnd());
+ Assert(callInstr->GetSrc1()->AsRegOpnd()->m_sym);
+
+ if (!(builtInFlags & Js::BuiltInFlags::BIF_IgnoreDst) && callInstr->GetDst() == nullptr && inlineCallOpCode != Js::OpCode::InlineArrayPop)
+ {
+ // Is seems that it's not worth optimizing odd cases where the result is unused.
+ INLINE_TESTTRACE(_u("INLINING: Skip Inline: inlinee's return value is not assigned to anything\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
+ Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
+ inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
+ return false;
+ }
+
+ // Number of arguments, not including "this".
+ IntConstType requiredInlineCallArgCount = (IntConstType)Js::JavascriptLibrary::GetArgCForBuiltIn(builtInId);
+
+ IR::Opnd* linkOpnd = callInstr->GetSrc2();
+ Js::ArgSlot actualCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum() - (isCallApplyTarget ? 1 : 0);
+
+ // Check for missing actuals:
+ // if number of passed params to built-in function is not what it needs, don't inline.
+ bool usesThisArgument = (builtInFlags & Js::BuiltInFlags::BIF_UseSrc0) != 0;
+ int inlineCallArgCount = (int)(usesThisArgument ? actualCount : actualCount - 1);
+ Assert(inlineCallArgCount >= 0);
+
+ if (linkOpnd->IsSymOpnd())
+ {
+ if ((builtInFlags & Js::BuiltInFlags::BIF_VariableArgsNumber) != 0)
+ {
+ if (inlineCallArgCount > requiredInlineCallArgCount)
+ {
+ INLINE_TESTTRACE(_u("INLINING: Skip Inline: parameter count exceeds the maximum number of parameters allowed\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
+ Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
+ inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
+ return false;
+ }
+ }
+ else if (inlineCallArgCount != requiredInlineCallArgCount)
+ {
+ INLINE_TESTTRACE(_u("INLINING: Skip Inline: parameter count doesn't match dynamic profile\tInlinee: %s (#%d)\tCaller: %s (%s)\n"),
+ Js::JavascriptLibrary::GetNameForBuiltIn(builtInId), (int)builtInId,
+ inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
+ return false;
+ }
+ }
+
+ return true;
+}
+
+IR::Instr* Inline::InsertInlineeBuiltInStartEndTags(IR::Instr* callInstr, uint actualCount, IR::Instr** builtinStartInstr)
+{
+ IR::Instr* inlineBuiltInStartInstr = IR::Instr::New(Js::OpCode::InlineBuiltInStart, callInstr->m_func);
+ inlineBuiltInStartInstr->SetSrc1(callInstr->GetSrc1());
+ inlineBuiltInStartInstr->SetSrc2(callInstr->GetSrc2());
+ inlineBuiltInStartInstr->SetByteCodeOffset(callInstr);
+ callInstr->InsertBefore(inlineBuiltInStartInstr);
+ if (builtinStartInstr)
+ {
+ *builtinStartInstr = inlineBuiltInStartInstr;
+ }
+
+ IR::Instr* inlineBuiltInEndInstr = IR::Instr::New(Js::OpCode::InlineBuiltInEnd, callInstr->m_func);
+ inlineBuiltInEndInstr->SetSrc1(IR::IntConstOpnd::New(actualCount, TyInt32, callInstr->m_func));
+ inlineBuiltInEndInstr->SetSrc2(callInstr->GetSrc2());
+ inlineBuiltInEndInstr->SetByteCodeOffset(callInstr);
+ callInstr->InsertAfter(inlineBuiltInEndInstr);
+ return inlineBuiltInEndInstr;
+}
+
+IR::Instr* Inline::GetDefInstr(IR::Opnd* linkOpnd)
+{
+ StackSym *linkSym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
+ Assert(linkSym->m_isSingleDef);
+ Assert(linkSym->IsArgSlotSym());
+
+ return linkSym->m_instrDef;
+}
+
+IR::Instr* Inline::InlineApply(IR::Instr *callInstr, const FunctionJITTimeInfo *applyData, const FunctionJITTimeInfo * inlinerData, const StackSym *symCallerThis, bool* pIsInlined, uint callSiteId, uint recursiveInlineDepth, uint argsCount)
+{
+ // We may still decide not to inline.
+ *pIsInlined = false;
+
+ IR::Instr* instrNext = callInstr->m_next;
+ if (argsCount == 0)
+ {
+ return instrNext;
+ }
+
+ Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInForFuncInfo(applyData->GetLocalFunctionId());
+ const FunctionJITTimeInfo * inlineeData = nullptr;
+
+ IR::Instr* arrayArgInstr = nullptr;
+ IR::Opnd *arrayArgOpnd = nullptr;
+ if (argsCount == 2) // apply was called with 2 arguments, most common case
+ {
+ IR::SymOpnd* linkOpnd = callInstr->GetSrc2()->AsSymOpnd();
+ StackSym *arrayArgsym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
+ Assert(arrayArgsym->m_isSingleDef);
+ Assert(arrayArgsym->IsArgSlotSym());
+
+ arrayArgInstr = arrayArgsym->m_instrDef;
+ arrayArgOpnd = arrayArgInstr->GetSrc1();
+ }
+
+ // if isArrayOpndArgumentsObject == false, the array opnd can still be the arguments object; we just can't say that for sure
+ bool isArrayOpndArgumentsObject = arrayArgOpnd && arrayArgOpnd->IsArgumentsObject();
+
+ IR::Instr * returnInstr = nullptr;
+ if (!PHASE_OFF(Js::InlineApplyTargetPhase, this->topFunc))
+ {
+ if (InlineApplyScriptTarget(callInstr, inlinerData, &inlineeData, applyData, symCallerThis, &returnInstr, recursiveInlineDepth, isArrayOpndArgumentsObject, argsCount))
+ {
+ *pIsInlined = true;
+ Assert(returnInstr);
+ return returnInstr;
+ }
+ }
+
+#if defined(ENABLE_DEBUG_CONFIG_OPTIONS)
+ char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
+#endif
+
+ if (argsCount == 1) // apply called with just 1 argument, the 'this' object.
{
if (PHASE_OFF1(Js::InlineApplyWithoutArrayArgPhase))
{
@@ -2474,8 +2537,6 @@ IR::Instr* Inline::InlineApply(IR::Instr *callInstr, const FunctionJITTimeInfo *
{
if (inlineeData && inlineeData->GetBody() == nullptr)
{
- *pIsInlined = true;
-
#if defined(ENABLE_DEBUG_CONFIG_OPTIONS)
TraceInlining(inlinerData, Js::JavascriptLibrary::GetNameForBuiltIn(builtInId),
nullptr, 0, this->topFunc->GetWorkItem()->GetJITTimeInfo(), 0, nullptr, callSiteId, callInstr->m_func->GetTopFunc()->IsLoopBody(), builtInId);
@@ -2483,7 +2544,7 @@ IR::Instr* Inline::InlineApply(IR::Instr *callInstr, const FunctionJITTimeInfo *
// TODO: OOP JIT enable assert (readprocessmemory?)
//Assert((inlineeData->GetFunctionInfo()->GetAttributes() & Js::FunctionInfo::Attributes::BuiltInInlinableAsLdFldInlinee) != 0);
- return InlineApplyBuiltInTargetWithArray(callInstr, applyData, inlineeData);
+ return InlineApplyBuiltInTargetWithArray(callInstr, applyData, inlineeData, pIsInlined);
}
else
{
@@ -2563,7 +2624,8 @@ IR::Instr * Inline::InlineApplyWithArgumentsObject(IR::Instr * callInstr, IR::In
// Optimize .init.apply(this, arguments);
IR::Instr* builtInStartInstr;
- InsertInlineeBuiltInStartEndTags(callInstr, 3, &builtInStartInstr); //3 args (implicit this + explicit this + arguments = 3)
+ IR::Instr *instr = InsertInlineeBuiltInStartEndTags(callInstr, 3, &builtInStartInstr); //3 args (implicit this + explicit this + arguments = 3)
+ instr->m_opcode = Js::OpCode::InlineNonTrackingBuiltInEnd;
// Move argouts close to call. Globopt expects this for arguments object tracking.
IR::Instr* argInsertInstr = builtInStartInstr;
@@ -2601,9 +2663,25 @@ IR::Instr * Inline::InlineApplyWithArgumentsObject(IR::Instr * callInstr, IR::In
/*
This method will only do CallDirect style inlining of built-in targets. No script function inlining.
*/
-IR::Instr * Inline::InlineApplyBuiltInTargetWithArray(IR::Instr * callInstr, const FunctionJITTimeInfo * applyInfo, const FunctionJITTimeInfo * builtInInfo)
+IR::Instr * Inline::InlineApplyBuiltInTargetWithArray(IR::Instr * callInstr, const FunctionJITTimeInfo * applyInfo, const FunctionJITTimeInfo * builtInInfo, bool * pIsInlined)
{
- IR::Instr* instrNext = callInstr->m_next;
+ IR::Instr * instrNext = callInstr->m_next;
+ Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInForFuncInfo(builtInInfo->GetLocalFunctionId());
+ IR::HelperCallOpnd * helperCallOpnd = nullptr;
+ switch (builtInId)
+ {
+ case Js::BuiltinFunction::Math_Max:
+ helperCallOpnd = IR::HelperCallOpnd::New(IR::HelperOp_MaxInAnArray, callInstr->m_func);
+ break;
+
+ case Js::BuiltinFunction::Math_Min:
+ helperCallOpnd = IR::HelperCallOpnd::New(IR::HelperOp_MinInAnArray, callInstr->m_func);
+ break;
+
+ default:
+ return instrNext;
+ }
+
IR::Instr * implicitThisArgOut = nullptr;
IR::Instr * explicitThisArgOut = nullptr;
IR::Instr * arrayArgOut = nullptr;
@@ -2619,10 +2697,12 @@ IR::Instr * Inline::InlineApplyBuiltInTargetWithArray(IR::Instr * callInstr, con
IR::Instr* applyLdInstr = nullptr;
IR::Instr* applyTargetLdInstr = nullptr;
- if (!TryGetApplyAndTargetLdInstrs(callInstr, &applyLdInstr, &applyTargetLdInstr))
+ if (!TryGetCallApplyAndTargetLdInstrs(callInstr, &applyLdInstr, &applyTargetLdInstr))
{
return instrNext;
}
+
+ *pIsInlined = true;
// Fixed function/function object checks for target built-in
callInstr->ReplaceSrc1(applyTargetLdInstr->GetDst());
{
@@ -2666,22 +2746,6 @@ IR::Instr * Inline::InlineApplyBuiltInTargetWithArray(IR::Instr * callInstr, con
argOut = IR::Instr::New(Js::OpCode::ArgOut_A_InlineSpecialized, linkOpnd, implicitThisArgOut->GetSrc1(), argOut->GetDst(), callInstr->m_func);
callInstr->InsertBefore(argOut);
- Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInForFuncInfo(builtInInfo->GetLocalFunctionId());
- IR::HelperCallOpnd * helperCallOpnd = nullptr;
- switch (builtInId)
- {
- case Js::BuiltinFunction::Math_Max:
- helperCallOpnd = IR::HelperCallOpnd::New(IR::HelperOp_MaxInAnArray, callInstr->m_func);
- break;
-
- case Js::BuiltinFunction::Math_Min:
- helperCallOpnd = IR::HelperCallOpnd::New(IR::HelperOp_MinInAnArray, callInstr->m_func);
- break;
-
- default:
- Assert(false);
- __assume(UNREACHED);
- }
callInstr->m_opcode = Js::OpCode::CallDirect;
callInstr->ReplaceSrc1(helperCallOpnd);
callInstr->ReplaceSrc2(argOut->GetDst());
@@ -2691,7 +2755,7 @@ IR::Instr * Inline::InlineApplyBuiltInTargetWithArray(IR::Instr * callInstr, con
IR::Instr * Inline::InlineApplyWithoutArrayArgument(IR::Instr *callInstr, const FunctionJITTimeInfo * applyInfo, const FunctionJITTimeInfo * applyTargetInfo)
{
- IR::Instr* instrNext = callInstr->m_next;
+ IR::Instr * instrNext = callInstr->m_next;
IR::Instr * implicitThisArgOut = nullptr;
IR::Instr * explicitThisArgOut = nullptr;
IR::Instr * dummyInstr = nullptr;
@@ -2703,7 +2767,8 @@ IR::Instr * Inline::InlineApplyWithoutArrayArgument(IR::Instr *callInstr, const
EmitFixedMethodOrFunctionObjectChecksForBuiltIns(callInstr, callInstr, applyInfo, false /*isPolymorphic*/, true /*isBuiltIn*/, false /*isCtor*/, true /*isInlined*/);
- InsertInlineeBuiltInStartEndTags(callInstr, 2); // 2 args (implicit this + explicit this)
+ IR::Instr *instr = InsertInlineeBuiltInStartEndTags(callInstr, 2); // 2 args (implicit this + explicit this)
+ instr->m_opcode = Js::OpCode::InlineNonTrackingBuiltInEnd;
IR::Instr * startCall = IR::Instr::New(Js::OpCode::StartCall,
IR::RegOpnd::New(TyVar, callInstr->m_func),
@@ -2775,19 +2840,124 @@ void Inline::GetArgInstrsForCallAndApply(IR::Instr* callInstr, IR::Instr** impli
}
_Success_(return != false)
-bool Inline::TryGetApplyAndTargetLdInstrs(IR::Instr * callInstr, _Outptr_result_nullonfailure_ IR::Instr ** applyLdInstr, _Outptr_result_nullonfailure_ IR::Instr ** applyTargetLdInstr)
+bool Inline::TryGetCallApplyAndTargetLdInstrs(IR::Instr * callInstr, _Outptr_result_nullonfailure_ IR::Instr ** applyLdInstr, _Outptr_result_nullonfailure_ IR::Instr ** applyTargetLdInstr)
{
IR::Opnd* applyOpnd = callInstr->GetSrc1();
Assert(applyOpnd->IsRegOpnd());
- StackSym* applySym = applyOpnd->AsRegOpnd()->m_sym->AsStackSym();
- if (!applySym->IsSingleDef())
+ StackSym* applySym = applyOpnd->AsRegOpnd()->m_sym->AsStackSym();
+ if (!applySym->IsSingleDef() ||
+ !applySym->GetInstrDef()->GetSrc1()->IsSymOpnd() ||
+ !applySym->GetInstrDef()->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
{
*applyLdInstr = nullptr;
*applyTargetLdInstr = nullptr;
return false;
}
- *applyLdInstr = applySym->GetInstrDef();;
- *applyTargetLdInstr = (*applyLdInstr)->m_prev;
+
+ Assert(applySym->GetInstrDef()->GetSrc1()->IsSymOpnd() && applySym->GetInstrDef()->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
+ StackSym * targetSym = applySym->GetInstrDef()->GetSrc1()->AsSymOpnd()->AsPropertySymOpnd()->GetObjectSym();
+ if (!targetSym->IsSingleDef())
+ {
+ *applyLdInstr = nullptr;
+ *applyTargetLdInstr = nullptr;
+ return false;
+ }
+
+ *applyLdInstr = applySym->GetInstrDef();
+ *applyTargetLdInstr = targetSym->GetInstrDef();
+ return true;
+}
+
+bool
+Inline::TryGetCallApplyInlineeData(
+ const FunctionJITTimeInfo* inlinerData,
+ IR::Instr * callInstr,
+ IR::Instr * callApplyLdInstr,
+ IR::Instr * callApplyTargetLdInstr,
+ const FunctionJITTimeInfo ** inlineeData,
+ Js::InlineCacheIndex * inlineCacheIndex,
+ IR::Instr ** callbackDefInstr,
+ bool isCallInstanceFunction,
+ CallApplyTargetSourceType* targetType
+ )
+{
+ *callbackDefInstr = nullptr;
+
+ if (callApplyTargetLdInstr->m_opcode == Js::OpCode::LdFldForCallApplyTarget &&
+ ((callApplyTargetLdInstr->AsProfiledInstr()->u.FldInfo().flags & Js::FldInfo_FromAccessor) == 0))
+ {
+ IR::Opnd *callTargetLdOpnd = callApplyTargetLdInstr->GetSrc1();
+ if (!callTargetLdOpnd->IsSymOpnd() || !callTargetLdOpnd->AsSymOpnd()->IsPropertySymOpnd())
+ {
+ return false;
+ }
+
+ *inlineCacheIndex = callTargetLdOpnd->AsPropertySymOpnd()->m_inlineCacheIndex;
+ *inlineeData = inlinerData->GetLdFldInlinee(*inlineCacheIndex);
+ }
+
+ if (*inlineeData)
+ {
+ *targetType = CallApplyTargetSourceType::LdFld;
+ return true;
+ }
+
+ if (TryGetCallApplyCallbackTargetInlineeData(inlinerData, callInstr, callApplyLdInstr, inlineeData, callbackDefInstr, isCallInstanceFunction))
+ {
+ *targetType = CallApplyTargetSourceType::Callback;
+ return true;
+ }
+
+ if (!isCallInstanceFunction && !PHASE_OFF(Js::InlineAnyCallApplyTargetPhase, callInstr->m_func))
+ {
+ Js::ProfileId callSiteId = static_cast(callInstr->AsProfiledInstr()->u.profileId);
+ Js::ProfileId callApplyCallSiteId = callInstr->m_func->GetJITFunctionBody()->GetCallApplyCallSiteIdForCallSiteId(callSiteId);
+
+ if (callApplyCallSiteId != Js::Constants::NoProfileId)
+ {
+ *inlineeData = callInstr->m_func->GetWorkItem()->GetJITTimeInfo()->GetCallApplyTargetInlinee(callApplyCallSiteId);
+ }
+ }
+ if (*inlineeData)
+ {
+ *targetType = CallApplyTargetSourceType::Other;
+ }
+ return *inlineeData != nullptr;
+}
+
+bool
+Inline::TryGetCallApplyCallbackTargetInlineeData(
+ const FunctionJITTimeInfo* inlinerData,
+ IR::Instr * callInstr,
+ IR::Instr * callApplyLdInstr,
+ const FunctionJITTimeInfo ** inlineeData,
+ IR::Instr ** callbackDefInstr,
+ bool isCallInstanceFunction)
+{
+ // Try to find a callback def instr for the method.
+ if (isCallInstanceFunction)
+ {
+ *callbackDefInstr = TryGetCallbackDefInstrForCallInstanceFunction(callInstr);
+ }
+ else
+ {
+ *callbackDefInstr = TryGetCallbackDefInstrForCallApplyTarget(callApplyLdInstr);
+ }
+
+ if (*callbackDefInstr == nullptr)
+ {
+ return false;
+ }
+
+ Js::ProfileId callSiteId = static_cast((*callbackDefInstr)->AsProfiledInstr()->u.profileId);
+ *inlineeData = (*callbackDefInstr)->m_func->GetWorkItem()->GetJITTimeInfo()->GetCallbackInlinee(callSiteId);
+
+#if ENABLE_DEBUG_CONFIG_OPTIONS
+ char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
+ INLINE_CALLBACKS_TRACE(_u("INLINING : Found callback def instr for call/apply target callback at\tCallSite: %d\tCaller: %s (%s)\n"),
+ callSiteId, inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer));
+#endif
+
return true;
}
@@ -2814,26 +2984,20 @@ bool Inline::InlineApplyScriptTarget(IR::Instr *callInstr, const FunctionJITTime
IR::Instr* applyLdInstr = nullptr;
IR::Instr* applyTargetLdInstr = nullptr;
- if (!TryGetApplyAndTargetLdInstrs(callInstr, &applyLdInstr, &applyTargetLdInstr))
+ if (!TryGetCallApplyAndTargetLdInstrs(callInstr, &applyLdInstr, &applyTargetLdInstr))
{
return false;
}
- if(applyTargetLdInstr->m_opcode != Js::OpCode::LdFldForCallApplyTarget ||
- ((applyTargetLdInstr->AsProfiledInstr()->u.FldInfo().flags & Js::FldInfo_FromAccessor) != 0))
- {
- return false;
- }
-
- IR::Opnd *applyTargetLdOpnd = applyTargetLdInstr->GetSrc1();
- if (!applyTargetLdOpnd->IsSymOpnd() || !applyTargetLdOpnd->AsSymOpnd()->IsPropertySymOpnd())
+ const FunctionJITTimeInfo * inlineeData = nullptr;
+ Js::InlineCacheIndex inlineCacheIndex = Js::Constants::NoInlineCacheIndex;
+ IR::Instr * callbackDefInstr = nullptr;
+ CallApplyTargetSourceType targetType = CallApplyTargetSourceType::None;
+ if (!TryGetCallApplyInlineeData(inlinerData, callInstr, applyLdInstr, applyTargetLdInstr, &inlineeData, &inlineCacheIndex, &callbackDefInstr, false, &targetType))
{
return false;
}
- const auto inlineCacheIndex = applyTargetLdOpnd->AsPropertySymOpnd()->m_inlineCacheIndex;
- const auto inlineeData = inlinerData->GetLdFldInlinee(inlineCacheIndex);
-
if ((!isArrayOpndArgumentsObject && (argsCount != 1)) || SkipCallApplyScriptTargetInlining_Shared(callInstr, inlinerData, inlineeData, /*isApplyTarget*/ true, /*isCallTarget*/ false))
{
*pInlineeData = inlineeData;
@@ -2847,10 +3011,13 @@ bool Inline::InlineApplyScriptTarget(IR::Instr *callInstr, const FunctionJITTime
return false;
}
+ const bool targetIsCallback = callbackDefInstr != nullptr;
+
StackSym* originalCallTargetStackSym = callInstr->GetSrc1()->GetStackSym();
bool originalCallTargetOpndIsJITOpt = callInstr->GetSrc1()->GetIsJITOptimizedReg();
bool safeThis = false;
- if (!TryGetFixedMethodsForBuiltInAndTarget(callInstr, inlinerData, inlineeData, applyFuncInfo, applyLdInstr, applyTargetLdInstr, safeThis, /*isApplyTarget*/ true))
+
+ if (!TryGetFixedMethodsForBuiltInAndTarget(callInstr, inlinerData, inlineeData, applyFuncInfo, applyLdInstr, applyTargetLdInstr, safeThis, /*isApplyTarget*/ true, targetIsCallback))
{
return false;
}
@@ -2913,15 +3080,23 @@ bool Inline::InlineApplyScriptTarget(IR::Instr *callInstr, const FunctionJITTime
// set src1 to avoid CSE on BailOnNotStackArgs for different arguments object
bailOutOnNotStackArgs->SetSrc1(argumentsObjArgOut->GetSrc1()->Copy(this->topFunc));
argumentsObjArgOut->InsertBefore(bailOutOnNotStackArgs);
+
+ // Insert ByteCodeUses instr to ensure that arguments object is available on bailout
+ IR::ByteCodeUsesInstr* bytecodeUses = IR::ByteCodeUsesInstr::New(callInstr);
+ IR::Opnd* argSrc1 = argObjByteCodeArgoutCapture->GetSrc1();
+ bytecodeUses->SetRemovedOpndSymbol(argSrc1->GetIsJITOptimizedReg(), argSrc1->GetStackSym()->m_id);
+ callInstr->InsertBefore(bytecodeUses);
}
IR::Instr* byteCodeArgOutUse = IR::Instr::New(Js::OpCode::BytecodeArgOutUse, callInstr->m_func);
byteCodeArgOutUse->SetSrc1(implicitThisArgOut->GetSrc1());
+ callInstr->InsertBefore(byteCodeArgOutUse);
if (argumentsObjArgOut)
{
- byteCodeArgOutUse->SetSrc2(argumentsObjArgOut->GetSrc1());
+ byteCodeArgOutUse = IR::Instr::New(Js::OpCode::BytecodeArgOutUse, callInstr->m_func);
+ byteCodeArgOutUse->SetSrc1(argumentsObjArgOut->GetSrc1());
+ callInstr->InsertBefore(byteCodeArgOutUse);
}
- callInstr->InsertBefore(byteCodeArgOutUse);
// don't need the implicit "this" anymore
explicitThisArgOut->ReplaceSrc2(startCall->GetDst());
@@ -2931,15 +3106,34 @@ bool Inline::InlineApplyScriptTarget(IR::Instr *callInstr, const FunctionJITTime
startCall->GetSrc1()->AsIntConstOpnd()->IncrValue(-1); // update the count of argouts as seen by JIT, in the start call instruction
*returnInstr = InlineCallApplyTarget_Shared(callInstr, originalCallTargetOpndIsJITOpt, originalCallTargetStackSym, inlineeData, inlineCacheIndex,
- safeThis, /*isApplyTarget*/ true, /*isCallTarget*/ false, recursiveInlineDepth);
+ safeThis, /*isApplyTarget*/ true, targetType, callbackDefInstr, recursiveInlineDepth, callInstr);
return true;
}
IR::Instr *
-Inline::InlineCallApplyTarget_Shared(IR::Instr *callInstr, bool originalCallTargetOpndIsJITOpt, StackSym* originalCallTargetStackSym, const FunctionJITTimeInfo *const inlineeData,
- uint inlineCacheIndex, bool safeThis, bool isApplyTarget, bool isCallTarget, uint recursiveInlineDepth)
+Inline::InlineCallApplyTarget_Shared(
+ IR::Instr *callInstr,
+ bool originalCallTargetOpndIsJITOpt,
+ StackSym* originalCallTargetStackSym,
+ const FunctionJITTimeInfo *const inlineeData,
+ uint inlineCacheIndex,
+ bool safeThis,
+ bool isApplyTarget,
+ CallApplyTargetSourceType targetType,
+ IR::Instr * inlineeDefInstr,
+ uint recursiveInlineDepth,
+ IR::Instr * funcObjCheckInsertInstr)
{
- Assert(isApplyTarget ^ isCallTarget);
+ const bool isCallback = inlineeDefInstr != nullptr;
+
+#if ENABLE_DEBUG_CONFIG_OPTIONS
+ if (isCallback)
+ {
+ char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
+ INLINE_CALLBACKS_TRACE(_u("INLINING CALLBACK : Inlining callback for call/apply target : \t%s (%s)\n"), inlineeData->GetBody()->GetDisplayName(),
+ inlineeData->GetDebugNumberSet(debugStringBuffer));
+ }
+#endif
// returnValueOpnd
IR::RegOpnd * returnValueOpnd;
@@ -2970,6 +3164,30 @@ Inline::InlineCallApplyTarget_Shared(IR::Instr *callInstr, bool originalCallTarg
workItemData->jitData = (FunctionJITTimeDataIDL*)(inlineeData);
JITTimeWorkItem * jitWorkItem = JitAnew(this->topFunc->m_alloc, JITTimeWorkItem, workItemData);
+ const FunctionJITRuntimeInfo * runtimeInfo;
+ switch (targetType)
+ {
+ case CallApplyTargetSourceType::LdFld:
+ runtimeInfo = callInstr->m_func->GetWorkItem()->GetJITTimeInfo()->GetLdFldInlineeRuntimeData(inlineCacheIndex);
+ break;
+
+ case CallApplyTargetSourceType::Callback:
+ runtimeInfo = inlineeDefInstr->m_func->GetWorkItem()->GetJITTimeInfo()->GetInlineeForCallbackInlineeRuntimeData(static_cast(inlineeDefInstr->AsProfiledInstr()->u.profileId), inlineeData->GetBody()->GetAddr());
+ break;
+
+ case CallApplyTargetSourceType::Other:
+ {
+ Js::ProfileId callApplyCallSiteId = callInstr->m_func->GetJITFunctionBody()->GetCallApplyCallSiteIdForCallSiteId(callSiteId);
+ runtimeInfo = callInstr->m_func->GetWorkItem()->GetJITTimeInfo()->GetCallApplyTargetInlineeRuntimeData(callApplyCallSiteId);
+ break;
+ }
+
+ default:
+ Assert(UNREACHED);
+ __assume(UNREACHED);
+ break;
+ }
+
JITTimePolymorphicInlineCacheInfo * entryPointPolymorphicInlineCacheInfo = inlineeData->HasBody() ? this->topFunc->GetWorkItem()->GetInlineePolymorphicInlineCacheInfo(inlineeData->GetBody()->GetAddr()) : nullptr;
#if !FLOATVAR
Func * inlinee = JitAnew(this->topFunc->m_alloc,
@@ -2980,7 +3198,7 @@ Inline::InlineCallApplyTarget_Shared(IR::Instr *callInstr, bool originalCallTarg
this->topFunc->GetScriptContextInfo(),
this->topFunc->GetJITOutput()->GetOutputData(),
nullptr,
- callInstr->m_func->GetWorkItem()->GetJITTimeInfo()->GetLdFldInlineeRuntimeData(inlineCacheIndex),
+ runtimeInfo,
entryPointPolymorphicInlineCacheInfo,
this->topFunc->GetCodeGenAllocators(),
this->topFunc->GetNumberAllocator(),
@@ -3001,7 +3219,7 @@ Inline::InlineCallApplyTarget_Shared(IR::Instr *callInstr, bool originalCallTarg
this->topFunc->GetScriptContextInfo(),
this->topFunc->GetJITOutput()->GetOutputData(),
nullptr,
- callInstr->m_func->GetWorkItem()->GetJITTimeInfo()->GetLdFldInlineeRuntimeData(inlineCacheIndex),
+ runtimeInfo,
entryPointPolymorphicInlineCacheInfo,
this->topFunc->GetCodeGenAllocators(),
this->topFunc->GetCodeGenProfiler(),
@@ -3017,7 +3235,7 @@ Inline::InlineCallApplyTarget_Shared(IR::Instr *callInstr, bool originalCallTarg
// instrNext
IR::Instr* instrNext = callInstr->m_next;
- return InlineFunctionCommon(callInstr, originalCallTargetOpndIsJITOpt, originalCallTargetStackSym, inlineeData, inlinee, instrNext, returnValueOpnd, callInstr, nullptr, recursiveInlineDepth, safeThis, isApplyTarget);
+ return InlineFunctionCommon(callInstr, originalCallTargetOpndIsJITOpt, originalCallTargetStackSym, inlineeData, inlinee, instrNext, returnValueOpnd, funcObjCheckInsertInstr, nullptr, recursiveInlineDepth, safeThis, isApplyTarget);
}
IR::Opnd *
@@ -3029,7 +3247,7 @@ Inline::ConvertToInlineBuiltInArgOut(IR::Instr * argInstr)
}
IR::Instr*
-Inline::InlineCall(IR::Instr *callInstr, const FunctionJITTimeInfo *funcInfo, const FunctionJITTimeInfo * inlinerData, const StackSym *symCallerThis, bool* pIsInlined, uint callSiteId, uint recursiveInlineDepth)
+Inline::InlineCall(IR::Instr *callInstr, const FunctionJITTimeInfo *funcInfo, const FunctionJITTimeInfo * inlinerData, const StackSym *symCallerThis, bool* pIsInlined, uint callSiteId, uint recursiveInlineDepth, bool isCallInstanceFunction)
{
IR::Instr* instrNext = callInstr->m_next;
Func *func = callInstr->m_func;
@@ -3057,7 +3275,7 @@ Inline::InlineCall(IR::Instr *callInstr, const FunctionJITTimeInfo *funcInfo, co
IR::Instr * returnInstr = nullptr;
if (!PHASE_OFF(Js::InlineCallTargetPhase, this->topFunc))
{
- if (InlineCallTarget(callInstr, inlinerData, &inlineeData, funcInfo, symCallerThis, &returnInstr, recursiveInlineDepth))
+ if (InlineCallTarget(callInstr, inlinerData, &inlineeData, funcInfo, symCallerThis, &returnInstr, recursiveInlineDepth, isCallInstanceFunction))
{
Assert(returnInstr);
return returnInstr;
@@ -3074,20 +3292,22 @@ Inline::InlineCall(IR::Instr *callInstr, const FunctionJITTimeInfo *funcInfo, co
// We are trying to optimize this.superConstructor.call(this, a, b,c);
// argImplicitInstr represents this.superConstructor which we need to call directly.
- IR::Instr * argImplicitInstr = nullptr;
- IR::Instr * dummyInstr1 = nullptr;
- IR::Instr * dummyInstr2 = nullptr;
- this->GetArgInstrsForCallAndApply(callInstr, &argImplicitInstr, &dummyInstr1, &dummyInstr2, actualCount);
+ IR::Instr * argImplicitThisInstr = nullptr;
+ IR::Instr * argSecond = nullptr;
+ IR::Instr * dummyInstr = nullptr;
+ this->GetArgInstrsForCallAndApply(callInstr, &argImplicitThisInstr, &argSecond, &dummyInstr, actualCount);
- Assert(argImplicitInstr);
+ IR::Instr * functionInstr = isCallInstanceFunction ? argSecond : argImplicitThisInstr;
+ Assert(functionInstr);
IR::SymOpnd* orgLinkOpnd = callInstr->GetSrc2()->AsSymOpnd();
EmitFixedMethodOrFunctionObjectChecksForBuiltIns(callInstr, callInstr, funcInfo, false /*isPolymorphic*/, true /*isBuiltIn*/, false /*isCtor*/, true /*isInlined*/);
- InsertInlineeBuiltInStartEndTags(callInstr, actualCount);
+ IR::Instr *instr = InsertInlineeBuiltInStartEndTags(callInstr, actualCount);
+ instr->m_opcode = Js::OpCode::InlineNonTrackingBuiltInEnd;
- uint actualCountToInlinedCall = actualCount - 1;
+ uint actualCountToInlinedCall = actualCount - (isCallInstanceFunction ? 2 : 1);
IR::Instr *startCall = IR::Instr::New(Js::OpCode::StartCall, func);
startCall->SetDst(IR::RegOpnd::New(TyVar, func));
@@ -3095,7 +3315,7 @@ Inline::InlineCall(IR::Instr *callInstr, const FunctionJITTimeInfo *funcInfo, co
callInstr->InsertBefore(startCall);
- callInstr->ReplaceSrc1(argImplicitInstr->GetSrc1());
+ callInstr->ReplaceSrc1(functionInstr->GetSrc1());
callInstr->UnlinkSrc2();
callInstr->m_opcode = Js::OpCode::CallI;
@@ -3110,9 +3330,9 @@ Inline::InlineCall(IR::Instr *callInstr, const FunctionJITTimeInfo *funcInfo, co
IR::Opnd *orgSrc1 = orgArgout->GetSrc1();
// Change ArgOut to use temp as src1.
- StackSym * stackSym = StackSym::New(orgSrc1->GetStackSym()->GetType(), argImplicitInstr->m_func);
- IR::Opnd* tempDst = IR::RegOpnd::New(stackSym, orgSrc1->GetType(), argImplicitInstr->m_func);
- IR::Instr *assignInstr = IR::Instr::New(Func::GetLoadOpForType(orgSrc1->GetType()), tempDst, orgSrc1, argImplicitInstr->m_func);
+ StackSym * stackSym = StackSym::New(orgSrc1->GetStackSym()->GetType(), functionInstr->m_func);
+ IR::Opnd* tempDst = IR::RegOpnd::New(stackSym, orgSrc1->GetType(), functionInstr->m_func);
+ IR::Instr *assignInstr = IR::Instr::New(Func::GetLoadOpForType(orgSrc1->GetType()), tempDst, orgSrc1, functionInstr->m_func);
assignInstr->SetByteCodeOffset(orgArgout);
tempDst->SetIsJITOptimizedReg(true);
orgArgout->InsertBefore(assignInstr);
@@ -3127,70 +3347,79 @@ Inline::InlineCall(IR::Instr *callInstr, const FunctionJITTimeInfo *funcInfo, co
insertBeforeInstr = clonedArgout;
}
clonedArgout->SetSrc2(startCall->GetDst());
- Assert(GetDefInstr(orgLinkOpnd) == argImplicitInstr);
+ Assert(GetDefInstr(orgLinkOpnd) == functionInstr);
return instrNext;
}
bool
Inline::InlineCallTarget(IR::Instr *callInstr, const FunctionJITTimeInfo* inlinerData, const FunctionJITTimeInfo** pInlineeData, const FunctionJITTimeInfo *callFuncInfo,
- const StackSym *symCallerThis, IR::Instr ** returnInstr, uint recursiveInlineDepth)
+ const StackSym *symCallerThis, IR::Instr ** returnInstr, uint recursiveInlineDepth, bool isCallInstanceFunction)
{
- IR::Opnd* src1 = callInstr->GetSrc1();
- Assert(src1->IsRegOpnd());
- StackSym* sym = src1->AsRegOpnd()->GetStackSym();
- if (!sym->IsSingleDef())
+ IR::Instr* callLdInstr = nullptr;
+ IR::Instr* callTargetLdInstr = nullptr;
+ if (!TryGetCallApplyAndTargetLdInstrs(callInstr, &callLdInstr, &callTargetLdInstr))
{
return false;
}
- IR::Instr* callLdInstr = sym->GetInstrDef();
- Assert(callLdInstr);
- IR::Instr* callTargetLdInstr = callLdInstr->m_prev;
- if (callTargetLdInstr->m_opcode != Js::OpCode::LdFldForCallApplyTarget ||
- ((callTargetLdInstr->AsProfiledInstr()->u.FldInfo().flags & Js::FldInfoFlags::FldInfo_FromAccessor) != 0))
+ const FunctionJITTimeInfo * inlineeData = nullptr;
+ Js::InlineCacheIndex inlineCacheIndex = Js::Constants::NoInlineCacheIndex;
+ IR::Instr * callbackDefInstr = nullptr;
+ CallApplyTargetSourceType targetType = CallApplyTargetSourceType::None;
+ if (!TryGetCallApplyInlineeData(inlinerData, callInstr, callLdInstr, callTargetLdInstr, &inlineeData, &inlineCacheIndex, &callbackDefInstr, isCallInstanceFunction, &targetType))
{
return false;
}
- IR::Opnd* callTargetLdOpnd = callTargetLdInstr->GetSrc1();
- if (!callTargetLdOpnd->IsSymOpnd() || !callTargetLdOpnd->AsSymOpnd()->IsPropertySymOpnd())
+ if (SkipCallApplyScriptTargetInlining_Shared(callInstr, inlinerData, inlineeData, /*isApplyTarget*/ false, /*isCallTarget*/ true))
{
+ *pInlineeData = inlineeData;
return false;
}
- const auto inlineCacheIndex = callTargetLdOpnd->AsPropertySymOpnd()->m_inlineCacheIndex;
- const auto inlineeData = inlinerData->GetLdFldInlinee(inlineCacheIndex);
+ const bool targetIsCallback = callbackDefInstr != nullptr;
- if (SkipCallApplyScriptTargetInlining_Shared(callInstr, inlinerData, inlineeData, /*isApplyTarget*/ false, /*isCallTarget*/ true))
+ if (!inlineeData->HasBody())
{
- *pInlineeData = inlineeData;
- return false;
+ if (targetIsCallback)
+ {
+ return false;
+ }
+
+ Js::OpCode builtInInlineOpCode;
+ ValueType returnType;
+ InliningDecider::GetBuiltInInfo(inlineeData, &builtInInlineOpCode, &returnType);
+ Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInForFuncInfo(inlineeData->GetLocalFunctionId());
+ switch (builtInId)
+ {
+ default:
+ {
+ if (CanInlineBuiltInFunction(callInstr, inlineeData, builtInInlineOpCode, inlinerData, builtInId, true))
+ {
+ return InlineCallBuiltInTarget(callInstr, inlinerData, inlineeData, callFuncInfo, callTargetLdInstr, builtInInlineOpCode, symCallerThis, recursiveInlineDepth, isCallInstanceFunction, returnInstr);
+ }
+ }
+ case Js::BuiltinFunction::JavascriptFunction_Apply:
+ case Js::BuiltinFunction::JavascriptFunction_Call:
+ case Js::BuiltinFunction::EngineInterfaceObject_CallInstanceFunction:
+ return false;
+ }
}
StackSym* originalCallTargetStackSym = callInstr->GetSrc1()->GetStackSym();
bool originalCallTargetOpndIsJITOpt = callInstr->GetSrc1()->GetIsJITOptimizedReg();
bool safeThis = false;
- if (!TryGetFixedMethodsForBuiltInAndTarget(callInstr, inlinerData, inlineeData, callFuncInfo, callLdInstr, callTargetLdInstr, safeThis, /*isApplyTarget*/ false))
- {
- return false;
- }
-
- IR::Instr* implicitThisArgOut = nullptr;
- IR::Instr* explicitThisArgOut = nullptr;
- callInstr->IterateArgInstrs([&] (IR::Instr* argInstr)
+ IR::Instr * funcObjCheckInsertInstr = TryGetFixedMethodsForBuiltInAndTarget(callInstr, inlinerData, inlineeData, callFuncInfo, callLdInstr, callTargetLdInstr, safeThis, /*isApplyTarget*/ false, targetIsCallback);
+ if (!funcObjCheckInsertInstr)
{
- explicitThisArgOut = implicitThisArgOut;
- implicitThisArgOut = argInstr;
-
- argInstr->GenerateBytecodeArgOutCapture(); // Generate BytecodeArgOutCapture here to capture the implicit "this" argout (which will be removed) as well,
- // so that any bailout in the call sequence restores the argouts stack as the interpreter would expect it to be.
- argInstr->GetDst()->AsSymOpnd()->GetStackSym()->DecrementArgSlotNum(); // We will be removing implicit "this" argout
return false;
- });
+ }
+ IR::Instr * explicitThisArgOut = nullptr;
+ AdjustArgoutsForCallTargetInlining(callInstr, &explicitThisArgOut, isCallInstanceFunction);
Assert(explicitThisArgOut);
- Assert(explicitThisArgOut->HasByteCodeArgOutCapture());
+
if (safeThis)
{
IR::Instr * byteCodeArgOutCapture = explicitThisArgOut->GetBytecodeArgOutCapture();
@@ -3202,6 +3431,54 @@ Inline::InlineCallTarget(IR::Instr *callInstr, const FunctionJITTimeInfo* inline
}
}
+ *returnInstr = InlineCallApplyTarget_Shared(callInstr, originalCallTargetOpndIsJITOpt, originalCallTargetStackSym, inlineeData, inlineCacheIndex,
+ safeThis, /*isApplyTarget*/ false, targetType, callbackDefInstr, recursiveInlineDepth, funcObjCheckInsertInstr);
+
+ return true;
+}
+
+void
+Inline::AdjustArgoutsForCallTargetInlining(IR::Instr* callInstr, IR::Instr ** pExplicitThisArgOut, bool isCallInstanceFunction)
+{
+ Assert(pExplicitThisArgOut);
+
+ IR::Instr * firstArgOut = nullptr;
+ IR::Instr * secondArgOut = nullptr;
+ IR::Instr * thirdArgOut = nullptr;
+
+ callInstr->IterateArgInstrs([&](IR::Instr* argInstr)
+ {
+ thirdArgOut = secondArgOut;
+ secondArgOut = firstArgOut;
+ firstArgOut = argInstr;
+
+ argInstr->GenerateBytecodeArgOutCapture(); // Generate BytecodeArgOutCapture here to capture the implicit "this" argout (which will be removed) as well,
+ // so that any bailout in the call sequence restores the argouts stack as the interpreter would expect it to be.
+
+ StackSym * argSym = argInstr->GetDst()->AsSymOpnd()->GetStackSym();
+
+ Assert(argSym->m_offset == (argSym->GetArgSlotNum() - 1) * MachPtr);
+ argSym->DecrementArgSlotNum(); // We will be removing implicit "this" argout
+ if (argSym->GetArgSlotNum() != 0)
+ {
+ this->topFunc->SetArgOffset(argSym, argSym->m_offset - MachPtr);
+ }
+
+ if (isCallInstanceFunction && argSym->GetArgSlotNum() != 0)
+ {
+ argSym->DecrementArgSlotNum(); // We will also be removing the function argout
+ }
+
+ return false;
+ });
+
+ IR::Instr * implicitThisArgOut = firstArgOut;
+ IR::Instr * explicitThisArgOut = isCallInstanceFunction ? thirdArgOut : secondArgOut;
+
+ Assert(explicitThisArgOut);
+ Assert(explicitThisArgOut->HasByteCodeArgOutCapture());
+
+
IR::Opnd* linkOpnd = implicitThisArgOut->GetSrc2();
Assert(linkOpnd->IsRegOpnd() && linkOpnd->AsRegOpnd()->GetStackSym()->IsSingleDef());
Assert(linkOpnd->AsRegOpnd()->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::StartCall);
@@ -3209,17 +3486,57 @@ Inline::InlineCallTarget(IR::Instr *callInstr, const FunctionJITTimeInfo* inline
IR::Instr* startCall = linkOpnd->AsRegOpnd()->GetStackSym()->GetInstrDef();
explicitThisArgOut->ReplaceSrc2(startCall->GetDst());
+ *pExplicitThisArgOut = explicitThisArgOut;
+
+ if (isCallInstanceFunction)
+ {
+ IR::Instr * functionArg = secondArgOut;
+ IR::Instr * bytecodeArgOutUse = IR::Instr::New(Js::OpCode::BytecodeArgOutUse, callInstr->m_func, callInstr);
+ callInstr->ReplaceSrc1(functionArg->GetSrc1());
+ bytecodeArgOutUse->SetSrc1(functionArg->GetSrc1());
+ callInstr->InsertBefore(bytecodeArgOutUse); // Need to keep the function argout live till the call instruction for it to be captured by any bailout in the call sequence.
+ functionArg->Remove();
+ }
- IR::Instr * bytecodeArgOutUse = IR::Instr::New(Js::OpCode::BytecodeArgOutUse, callInstr->m_func);
+ IR::Instr * bytecodeArgOutUse = IR::Instr::New(Js::OpCode::BytecodeArgOutUse, callInstr->m_func, callInstr);
bytecodeArgOutUse->SetSrc1(implicitThisArgOut->GetSrc1());
callInstr->InsertBefore(bytecodeArgOutUse); // Need to keep the implicit "this" argout live till the call instruction for it to be captured by any bailout in the call sequence.
implicitThisArgOut->Remove();
startCall->SetSrc2(IR::IntConstOpnd::New(startCall->GetArgOutCount(/*getInterpreterArgOutCount*/ false), TyUint32, startCall->m_func));
- startCall->GetSrc1()->AsIntConstOpnd()->SetValue(startCall->GetSrc1()->AsIntConstOpnd()->GetValue() - 1);
- *returnInstr = InlineCallApplyTarget_Shared(callInstr, originalCallTargetOpndIsJITOpt, originalCallTargetStackSym, inlineeData, inlineCacheIndex,
- safeThis, /*isApplyTarget*/ false, /*isCallTarget*/ true, recursiveInlineDepth);
+ uint argsRemoved = isCallInstanceFunction ? 2 : 1;
+ startCall->GetSrc1()->AsIntConstOpnd()->SetValue(startCall->GetSrc1()->AsIntConstOpnd()->GetValue() - argsRemoved);
+}
+
+bool
+Inline::InlineCallBuiltInTarget(IR::Instr *callInstr, const FunctionJITTimeInfo* inlinerData, const FunctionJITTimeInfo* inlineeData, const FunctionJITTimeInfo *callFuncInfo, IR::Instr * callTargetLdInstr, Js::OpCode inlineOpCode,
+ const StackSym *symThis, uint recursiveInlineDepth, bool isCallInstanceFunction, IR::Instr ** returnInstr)
+{
+ Js::OpCode originalCallOpCode = callInstr->m_opcode;
+ StackSym* originalCallTargetStackSym = callInstr->GetSrc1()->GetStackSym();
+ bool originalCallTargetOpndJITOpt = callInstr->GetSrc1()->GetIsJITOptimizedReg();
+
+ bool safeThis = false;
+ if (!TryOptimizeCallInstrWithFixedMethod(callInstr, callFuncInfo, false /*isPolymorphic*/, true /*isBuiltIn*/, false /*isCtor*/, true /*isInlined*/, safeThis))
+ {
+ return false;
+ }
+
+ IR::ByteCodeUsesInstr * useCallTargetInstr = IR::ByteCodeUsesInstr::New(callInstr);
+ useCallTargetInstr->SetRemovedOpndSymbol(originalCallTargetOpndJITOpt, originalCallTargetStackSym->m_id);
+ callInstr->InsertBefore(useCallTargetInstr);
+
+ callInstr->m_opcode = originalCallOpCode;
+ callInstr->ReplaceSrc1(callTargetLdInstr->GetDst());
+
+ IR::Instr * unused = nullptr;
+ AdjustArgoutsForCallTargetInlining(callInstr, &unused, isCallInstanceFunction);
+
+ bool isInlined = false;
+ Js::ProfileId callSiteId = static_cast(callInstr->AsProfiledInstr()->u.profileId);
+ *returnInstr = InlineBuiltInFunction(callInstr, inlineeData, inlineOpCode, inlinerData, symThis, &isInlined, callSiteId, recursiveInlineDepth, useCallTargetInstr);
+ Assert(isInlined);
return true;
}
@@ -3251,14 +3568,8 @@ Inline::SkipCallApplyScriptTargetInlining_Shared(IR::Instr *callInstr, const Fun
return true;
}
- if (!inlineeData->GetBody())
+ if (!inlineeData->HasBody() && isApplyTarget)
{
- if (isCallTarget)
- {
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: Skipping .call inlining, target is a built-in\tCaller: %s\t(#%d) \tTop Func:%s\t(#%d)\n"),
- inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer),
- this->topFunc->GetJITFunctionBody()->GetDisplayName(), this->topFunc->GetDebugNumberSet(debugStringBuffer2));
- }
return true;
}
@@ -3274,9 +3585,9 @@ Inline::SkipCallApplyScriptTargetInlining_Shared(IR::Instr *callInstr, const Fun
return false;
}
-bool
+IR::Instr *
Inline::TryGetFixedMethodsForBuiltInAndTarget(IR::Instr *callInstr, const FunctionJITTimeInfo* inlinerData, const FunctionJITTimeInfo* inlineeData, const FunctionJITTimeInfo *builtInFuncInfo,
- IR::Instr* builtInLdInstr, IR::Instr* targetLdInstr, bool& safeThis, bool isApplyTarget)
+ IR::Instr* builtInLdInstr, IR::Instr* targetLdInstr, bool& safeThis, bool isApplyTarget, bool isCallback)
{
#if ENABLE_DEBUG_CONFIG_OPTIONS
char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
@@ -3292,64 +3603,49 @@ Inline::TryGetFixedMethodsForBuiltInAndTarget(IR::Instr *callInstr, const Functi
IR::ByteCodeUsesInstr * useCallTargetInstr = IR::ByteCodeUsesInstr::New(callInstr);
- safeThis = false;
- // Check if we can get fixed method for call
- if (TryOptimizeCallInstrWithFixedMethod(callInstr, builtInFuncInfo/*funcinfo for call*/, false /*isPolymorphic*/, true /*isBuiltIn*/, false /*isCtor*/, true /*isInlined*/,
- safeThis /*unused here*/, true /*dontOptimizeJustCheck*/))
- {
- Assert(callInstr->m_opcode == originalCallOpCode); // check that we didn't change the opcode to CallIFixed.
- callInstr->ReplaceSrc1(targetLdInstr->GetDst());
- safeThis = false;
- // Check if we can get fixed method for call target
- if (!TryOptimizeCallInstrWithFixedMethod(callInstr, inlineeData, false /*isPolymorphic*/, false /*isBuiltIn*/, false /*isCtor*/, true /*isInlined*/,
- safeThis /*unused here*/, true /*dontOptimizeJustCheck*/))
- {
- callInstr->ReplaceSrc1(builtInLdInstr->GetDst());
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: Skipping %s target inlining, did not get fixed method for %s target \tInlinee: %s (#%d)\tCaller: %s\t(#%d) \tTop Func:%s\t(#%d)\n"), isApplyTarget ? _u("apply") : _u("call"), isApplyTarget ? _u("apply") : _u("call"),
- inlineeData->GetBody()->GetDisplayName(), inlineeData->GetDebugNumberSet(debugStringBuffer),
- inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer2),
- this->topFunc->GetJITFunctionBody()->GetDisplayName(), this->topFunc->GetDebugNumberSet(debugStringBuffer3));
- return false;
- }
+ IR::Opnd * functionOpnd;
+ if (isCallback)
+ {
+ functionOpnd = GetCallbackFunctionOpnd(callInstr);
}
else
{
- INLINE_TESTTRACE(_u("INLINING: Skip Inline: Skipping %s target inlining, did not get fixed method for %s \tInlinee: %s (#%d)\tCaller: %s\t(#%d) \tTop Func:%s\t(#%d)\n"), isApplyTarget ? _u("apply") : _u("call"), isApplyTarget ? _u("apply") : _u("call"),
- inlineeData->GetBody()->GetDisplayName(), inlineeData->GetDebugNumberSet(debugStringBuffer),
- inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer2),
- this->topFunc->GetJITFunctionBody()->GetDisplayName(), this->topFunc->GetDebugNumberSet(debugStringBuffer3));
- return false;
+ functionOpnd = targetLdInstr->GetDst();
}
- Assert(callInstr->m_opcode == originalCallOpCode);
- callInstr->ReplaceSrc1(builtInLdInstr->GetDst());
-
// Emit Fixed Method check for apply/call
safeThis = false;
- TryOptimizeCallInstrWithFixedMethod(callInstr, builtInFuncInfo/*funcinfo for apply/call */, false /*isPolymorphic*/, true /*isBuiltIn*/, false /*isCtor*/, true /*isInlined*/, safeThis /*unused here*/);
-
- // If we optimized the call instruction for a fixed function, we must extend the function object's lifetime until after
- // the bailout on non-stack arguments.
- Assert(callInstr->m_opcode == Js::OpCode::CallIFixed);
+ if (!TryOptimizeCallInstrWithFixedMethod(callInstr, builtInFuncInfo/*funcinfo for apply/call */, false /*isPolymorphic*/, true /*isBuiltIn*/, false /*isCtor*/, true /*isInlined*/, safeThis /*unused here*/))
+ {
+ callInstr->ReplaceSrc1(builtInLdInstr->GetDst());
+ INLINE_CALLBACKS_TRACE(_u("INLINING: Skip Inline: Skipping callback.%s target inlining, did not get fixed method for %s \tInlinee: %s (%s)\tCaller: %s\t(%s) \tTop Func:%s\t(%s)\n"), isApplyTarget ? _u("apply") : _u("call"), isApplyTarget ? _u("apply") : _u("call"),
+ inlineeData->GetBody()->GetDisplayName(), inlineeData->GetDebugNumberSet(debugStringBuffer),
+ inlinerData->GetBody()->GetDisplayName(), inlinerData->GetDebugNumberSet(debugStringBuffer2),
+ this->topFunc->GetJITFunctionBody()->GetDisplayName(), this->topFunc->GetDebugNumberSet(debugStringBuffer3));
+ return nullptr;
+ }
+
useCallTargetInstr->SetRemovedOpndSymbol(originalCallTargetOpndJITOpt, originalCallTargetStackSym->m_id);
- // Make the target of apply/call as the target of the call instruction
- callInstr->ReplaceSrc1(targetLdInstr->GetDst());
callInstr->m_opcode = originalCallOpCode;
+ callInstr->ReplaceSrc1(functionOpnd);
+
+ if (isCallback)
+ {
+ callInstr->InsertBefore(useCallTargetInstr);
+ return useCallTargetInstr;
+ }
- //Emit Fixed Method check for apply/call target
originalCallTargetStackSym = callInstr->GetSrc1()->GetStackSym();
safeThis = false;
- TryOptimizeCallInstrWithFixedMethod(callInstr, inlineeData, false /*isPolymorphic*/, false /*isBuiltIn*/, false /*isCtor*/, true /*isInlined*/, safeThis /*unused here*/);
-
- // If we optimized the call instruction for a fixed function, we must extend the function object's lifetime until after
- // the bailout on non-stack arguments.
- Assert(callInstr->m_opcode == Js::OpCode::CallIFixed);
- useCallTargetInstr->SetRemovedOpndSymbol(originalCallTargetOpndJITOpt, originalCallTargetStackSym->m_id);
+ // Check if we can get fixed method for call target
+ if (TryOptimizeCallInstrWithFixedMethod(callInstr, inlineeData, false /*isPolymorphic*/, false /*isBuiltIn*/, false /*isCtor*/, true /*isInlined*/, safeThis /*unused here*/))
+ {
+ useCallTargetInstr->SetRemovedOpndSymbol(originalCallTargetOpndJITOpt, originalCallTargetStackSym->m_id);
+ }
callInstr->InsertBefore(useCallTargetInstr);
-
- return true;
+ return useCallTargetInstr;
}
void
@@ -3357,6 +3653,10 @@ Inline::SetupInlineInstrForCallDirect(Js::BuiltinFunction builtInId, IR::Instr*
{
switch(builtInId)
{
+ case Js::BuiltinFunction::JavascriptArray_At:
+ callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperArray_At, callInstr->m_func));
+ break;
+
case Js::BuiltinFunction::JavascriptArray_Concat:
callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperArray_Concat, callInstr->m_func));
break;
@@ -3397,6 +3697,10 @@ Inline::SetupInlineInstrForCallDirect(Js::BuiltinFunction builtInId, IR::Instr*
callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperArray_Unshift, callInstr->m_func));
break;
+ case Js::BuiltinFunction::JavascriptString_At:
+ callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperString_At, callInstr->m_func));
+ break;
+
case Js::BuiltinFunction::JavascriptString_Concat:
callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperString_Concat, callInstr->m_func));
break;
@@ -3509,6 +3813,10 @@ Inline::SetupInlineInstrForCallDirect(Js::BuiltinFunction builtInId, IR::Instr*
callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperObject_HasOwnProperty, callInstr->m_func));
break;
+ case Js::BuiltinFunction::JavascriptObject_HasOwn:
+ callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperObject_HasOwn, callInstr->m_func));
+ break;
+
case Js::BuiltinFunction::JavascriptArray_IsArray:
callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperArray_IsArray, callInstr->m_func));
break;
@@ -3849,6 +4157,7 @@ Inline::InlineFunctionCommon(IR::Instr *callInstr, bool originalCallTargetOpndIs
callInstr->m_opcode = Js::OpCode::InlineeStart;
// Set it to belong to the inlinee, so that we can use the actual count when lowering InlineeStart
+ inlinee->SetInlineeStart(callInstr);
callInstr->m_func = inlinee;
callInstr->SetDst(IR::RegOpnd::New(returnValueOpnd ? returnValueOpnd->GetType() : TyVar, inlinee));
// Put the meta arguments that the stack walker expects to find on the stack.
@@ -3875,78 +4184,6 @@ Inline::InlineFunctionCommon(IR::Instr *callInstr, bool originalCallTargetOpndIs
return instrNext;
}
-#ifdef ENABLE_DOM_FAST_PATH
-// we have LdFld, src1 obj, src2: null; dest: return value
-// We need to convert it to inlined method call.
-// We cannot do CallDirect as it requires ArgOut and that cannot be hoisted/copyprop'd
-// Create a new OpCode, DOMFastPathGetter. The OpCode takes three arguments:
-// The function object, the "this" instance object, and the helper routine as we have one for each index
-// A functionInfo->Index# table is created in scriptContext (and potentially movable to threadContext if WS is not a concern).
-// we use the table to identify the helper that needs to be lowered.
-// At lower time we create the call to helper, which is function entrypoint at this time.
-void Inline::InlineDOMGetterSetterFunction(IR::Instr *ldFldInstr, const FunctionJITTimeInfo *const inlineeData, const FunctionJITTimeInfo *const inlinerData)
-{
- intptr_t functionInfo = inlineeData->GetFunctionInfoAddr();
-
- Assert(ldFldInstr->GetSrc1()->IsSymOpnd() && ldFldInstr->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
-
- Assert(ldFldInstr->GetSrc1()->AsPropertySymOpnd()->HasObjTypeSpecFldInfo());
- Assert(ldFldInstr->GetSrc1()->AsPropertySymOpnd()->GetObjTypeSpecInfo()->UsesAccessor());
-
- // Find the helper routine for this functionInfo.
- IR::JnHelperMethod helperMethod = this->topFunc->GetScriptContextInfo()->GetDOMFastPathHelper(functionInfo);
- if (helperMethod == IR::HelperInvalid)
- {
- // abort inlining if helper isn't found
- return;
- }
- // Find the instance object (External object).
- PropertySym * fieldSym = ldFldInstr->GetSrc1()->AsSymOpnd()->m_sym->AsPropertySym();
- IR::RegOpnd * instanceOpnd = IR::RegOpnd::New(fieldSym->m_stackSym, TyMachPtr, ldFldInstr->m_func);
-
- // Find the function object from getter inline cache. Need bailout to verify.
- IR::Instr *ldMethodFld = IR::Instr::New(Js::OpCode::LdMethodFromFlags, IR::RegOpnd::New(TyVar, ldFldInstr->m_func), ldFldInstr->GetSrc1(), ldFldInstr->m_func);
- ldFldInstr->InsertBefore(ldMethodFld);
- ldMethodFld = ldMethodFld->ConvertToBailOutInstr(ldFldInstr, IR::BailOutFailedInlineTypeCheck);
-
- ldFldInstr->ReplaceSrc1(ldMethodFld->GetDst());
- ldMethodFld->SetByteCodeOffset(ldFldInstr);
-
- // generate further object/type bailout
- PrepareInsertionPoint(ldFldInstr, inlineeData, ldFldInstr);
-
- // We have three arguments to pass to the OpCode. Create a new ExtendArg_A opcode to chain up the argument. It is similar to ArgOut chain
- // except that it is not argout.
- // The Opcode sequence is like:
- // (dst)helpArg1: ExtendArg_A (src1)thisObject (src2)null
- // (dst)helpArg2: ExtendArg_A (src1)funcObject (src2)helpArg1
- // method: DOMFastPathGetter (src1)HelperCall (src2)helpArg2
- IR::Instr* extendArg0 = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, ldFldInstr->m_func), instanceOpnd, ldFldInstr->m_func);
- ldFldInstr->InsertBefore(extendArg0);
- IR::Instr* extendArg1 = IR::Instr::New(Js::OpCode::ExtendArg_A, IR::RegOpnd::New(TyVar, ldFldInstr->m_func), ldMethodFld->GetDst(), extendArg0->GetDst(), ldFldInstr->m_func);
- ldFldInstr->InsertBefore(extendArg1);
- ldFldInstr->ReplaceSrc1(IR::HelperCallOpnd::New(helperMethod, ldFldInstr->m_func));
- ldFldInstr->SetSrc2(extendArg1->GetDst());
- ldFldInstr->m_opcode = Js::OpCode::DOMFastPathGetter;
-
- StackSym * tmpSym = StackSym::New(ldFldInstr->GetDst()->GetType(), ldFldInstr->m_func);
- IR::Opnd * tmpDst = IR::RegOpnd::New(tmpSym, tmpSym->GetType(), ldFldInstr->m_func);
- // Ensure that the original LdFld's dst profile data is also copied to the new instruction for later
- // type-specific optimizations. Otherwise, this optimization to reduce calls into the host will also
- // result in relatively more expensive calls in the runtime.
- tmpDst->SetValueType(ldFldInstr->GetDst()->GetValueType());
-
- IR::Opnd * callInstrDst = ldFldInstr->UnlinkDst();
- ldFldInstr->SetDst(tmpDst);
-
- IR::Instr * ldInstr = IR::Instr::New(Js::OpCode::Ld_A, callInstrDst, tmpDst, ldFldInstr->m_func);
- ldFldInstr->InsertAfter(ldInstr);
-
- this->topFunc->SetHasInlinee();
-
- InsertStatementBoundary(ldInstr->m_next);
-}
-#endif
void
Inline::InsertStatementBoundary(IR::Instr * instrNext)
{
@@ -4305,106 +4542,53 @@ Inline::SplitConstructorCallCommon(
}
void
-Inline::InsertObjectCheck(IR::Instr *callInstr, IR::Instr* insertBeforeInstr, IR::Instr*bailOutIfNotObject)
+Inline::InsertFunctionObjectCheck(IR::RegOpnd * funcOpnd, IR::Instr *insertBeforeInstr, IR::Instr *bailOutInstr, const FunctionJITTimeInfo *funcInfo)
{
- // Bailout if 'functionRegOpnd' is not an object.
- bailOutIfNotObject->SetSrc1(callInstr->GetSrc1()->AsRegOpnd());
- bailOutIfNotObject->SetByteCodeOffset(insertBeforeInstr);
- insertBeforeInstr->InsertBefore(bailOutIfNotObject);
-}
+ Js::BuiltinFunction index = Js::JavascriptLibrary::GetBuiltInForFuncInfo(funcInfo->GetLocalFunctionId());
+ AssertMsg(index < Js::BuiltinFunction::Count, "Invalid built-in index on a call target marked as built-in");
-void
-Inline::InsertFunctionTypeIdCheck(IR::Instr *callInstr, IR::Instr* insertBeforeInstr, IR::Instr* bailOutIfNotJsFunction)
-{
- // functionTypeRegOpnd = Ld functionRegOpnd->type
- IR::IndirOpnd *functionTypeIndirOpnd = IR::IndirOpnd::New(callInstr->GetSrc1()->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, callInstr->m_func);
- IR::RegOpnd *functionTypeRegOpnd = IR::RegOpnd::New(TyVar, this->topFunc);
- IR::Instr *instr = IR::Instr::New(Js::OpCode::Ld_A, functionTypeRegOpnd, functionTypeIndirOpnd, callInstr->m_func);
- if(instr->m_func->HasByteCodeOffset())
- {
- instr->SetByteCodeOffset(insertBeforeInstr);
- }
- insertBeforeInstr->InsertBefore(instr);
-
- CompileAssert(sizeof(Js::TypeId) == sizeof(int32));
- // if (functionTypeRegOpnd->typeId != TypeIds_Function) goto $noInlineLabel
- // BrNeq_I4 $noInlineLabel, functionTypeRegOpnd->typeId, TypeIds_Function
- IR::IndirOpnd *functionTypeIdIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, callInstr->m_func);
- IR::IntConstOpnd *typeIdFunctionConstOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, callInstr->m_func);
- bailOutIfNotJsFunction->SetSrc1(functionTypeIdIndirOpnd);
- bailOutIfNotJsFunction->SetSrc2(typeIdFunctionConstOpnd);
- insertBeforeInstr->InsertBefore(bailOutIfNotJsFunction);
+ bailOutInstr->SetSrc1(funcOpnd);
+ bailOutInstr->SetSrc2(IR::IntConstOpnd::New(index, TyInt32, insertBeforeInstr->m_func));
+ insertBeforeInstr->InsertBefore(bailOutInstr);
}
void
-Inline::InsertJsFunctionCheck(IR::Instr *callInstr, IR::Instr *insertBeforeInstr, IR::BailOutKind bailOutKind)
+Inline::InsertJsFunctionCheck(IR::Instr * callInstr, IR::Instr *insertBeforeInstr, IR::BailOutKind bailOutKind)
{
// This function only inserts bailout for tagged int & TypeIds_Function.
// As of now this is only used for polymorphic inlining.
Assert(bailOutKind == IR::BailOutOnPolymorphicInlineFunction);
-
Assert(insertBeforeInstr);
Assert(insertBeforeInstr->m_func == callInstr->m_func);
- // bailOutIfNotFunction is primary bailout instruction
- IR::Instr* bailOutIfNotFunction = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, bailOutKind, insertBeforeInstr, callInstr->m_func);
-
- IR::Instr *bailOutIfNotObject = IR::BailOutInstr::New(Js::OpCode::BailOnNotObject, bailOutKind, bailOutIfNotFunction->GetBailOutInfo(),callInstr->m_func);
- InsertObjectCheck(callInstr, insertBeforeInstr, bailOutIfNotObject);
-
- InsertFunctionTypeIdCheck(callInstr, insertBeforeInstr, bailOutIfNotFunction);
-
-}
-
-void
-Inline::InsertFunctionInfoCheck(IR::Instr *callInstr, IR::Instr *insertBeforeInstr, IR::Instr* bailoutInstr, const FunctionJITTimeInfo *funcInfo)
-{
- // if (JavascriptFunction::FromVar(r1)->functionInfo != funcInfo) goto noInlineLabel
- // BrNeq_I4 noInlineLabel, r1->functionInfo, funcInfo
- IR::IndirOpnd* opndFuncInfo = IR::IndirOpnd::New(callInstr->GetSrc1()->AsRegOpnd(), Js::JavascriptFunction::GetOffsetOfFunctionInfo(), TyMachPtr, callInstr->m_func);
- IR::AddrOpnd* inlinedFuncInfo = IR::AddrOpnd::New(funcInfo->GetFunctionInfoAddr(), IR::AddrOpndKindDynamicFunctionInfo, callInstr->m_func);
- bailoutInstr->SetSrc1(opndFuncInfo);
- bailoutInstr->SetSrc2(inlinedFuncInfo);
-
- insertBeforeInstr->InsertBefore(bailoutInstr);
-}
-
-void
-Inline::InsertFunctionObjectCheck(IR::Instr *callInstr, IR::Instr *insertBeforeInstr, IR::Instr *bailOutInstr, const FunctionJITTimeInfo *funcInfo)
-{
- Js::BuiltinFunction index = Js::JavascriptLibrary::GetBuiltInForFuncInfo(funcInfo->GetLocalFunctionId());
- AssertMsg(index < Js::BuiltinFunction::Count, "Invalid built-in index on a call target marked as built-in");
-
- bailOutInstr->SetSrc1(callInstr->GetSrc1()->AsRegOpnd());
- bailOutInstr->SetSrc2(IR::IntConstOpnd::New(index, TyInt32, callInstr->m_func));
- insertBeforeInstr->InsertBefore(bailOutInstr);
+ // Two bailout checks, an object check followed by a function type ID check, are required. These bailout instructions are created
+ // when lowering checkFunctionEntryPoint rather than being created here as checkFunctionEntryPoint can be hoisted outside of a loop.
+ IR::Instr *checkIsFuncObj = IR::BailOutInstr::New(Js::OpCode::CheckIsFuncObj, bailOutKind, insertBeforeInstr, callInstr->m_func);
+ checkIsFuncObj->SetSrc1(callInstr->GetSrc1()->AsRegOpnd());
+ checkIsFuncObj->SetByteCodeOffset(insertBeforeInstr);
+ insertBeforeInstr->InsertBefore(checkIsFuncObj);
}
IR::Instr *
-Inline::PrepareInsertionPoint(IR::Instr *callInstr, const FunctionJITTimeInfo *funcInfo, IR::Instr *insertBeforeInstr, IR::BailOutKind bailOutKind)
+Inline::PrepareInsertionPoint(IR::Instr *callInstr, const FunctionJITTimeInfo *funcInfo, IR::Instr *insertBeforeInstr)
{
Assert(insertBeforeInstr);
Assert(insertBeforeInstr->m_func == callInstr->m_func);
- Assert(bailOutKind == IR::BailOutOnInlineFunction);
- // FunctionBody check is the primary bailout instruction, create it first
- IR::BailOutInstr* primaryBailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, bailOutKind, insertBeforeInstr, callInstr->m_func);
+ IR::Instr* checkFuncInfo = IR::BailOutInstr::New(Js::OpCode::CheckFuncInfo, IR::BailOutOnInlineFunction, insertBeforeInstr, callInstr->m_func);
+ checkFuncInfo->SetSrc1(callInstr->GetSrc1()->AsRegOpnd());
- // 1. Bailout if function object is not an object.
- IR::Instr *bailOutIfNotObject = IR::BailOutInstr::New(Js::OpCode::BailOnNotObject,
- bailOutKind,
- primaryBailOutInstr->GetBailOutInfo(),
- callInstr->m_func);
- InsertObjectCheck(callInstr, insertBeforeInstr, bailOutIfNotObject);
+ IR::AddrOpnd* inlinedFuncInfo = IR::AddrOpnd::New(funcInfo->GetFunctionInfoAddr(), IR::AddrOpndKindDynamicFunctionInfo, insertBeforeInstr->m_func);
+ checkFuncInfo->SetSrc2(inlinedFuncInfo);
- // 2. Bailout if function object is not a TypeId_Function
- IR::Instr* bailOutIfNotJsFunction = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, bailOutKind, primaryBailOutInstr->GetBailOutInfo(), callInstr->m_func);
- InsertFunctionTypeIdCheck(callInstr, insertBeforeInstr, bailOutIfNotJsFunction);
+ checkFuncInfo->SetByteCodeOffset(insertBeforeInstr);
+ insertBeforeInstr->InsertBefore(checkFuncInfo);
- // 3. Bailout if function body doesn't match funcInfo
- InsertFunctionInfoCheck(callInstr, insertBeforeInstr, primaryBailOutInstr, funcInfo);
+ // checkFuncInfo can be hoisted later and then have its BailOutInfo garbage collected. Other instructions (ex: BailOnNotStackArgs) share
+ // checkFuncInfo's BailOutInfo. Explicitly force sharedBailOutKind to be true to stop this BailOutInfo from being garbage collected.
+ checkFuncInfo->ShareBailOut();
- return primaryBailOutInstr;
+ return checkFuncInfo;
}
IR::ByteCodeUsesInstr*
@@ -4427,7 +4611,7 @@ Inline::EmitFixedMethodOrFunctionObjectChecksForBuiltIns(IR::Instr *callInstr, I
else
{
IR::BailOutInstr * bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotBuiltIn, IR::BailOutOnInlineFunction, callInstr, callInstr->m_func);
- InsertFunctionObjectCheck(callInstr, funcObjCheckInsertInstr, bailOutInstr, inlineeInfo);
+ InsertFunctionObjectCheck(callInstr->GetSrc1()->AsRegOpnd(), funcObjCheckInsertInstr, bailOutInstr, inlineeInfo);
}
return useCallTargetInstr;
}
@@ -5209,7 +5393,6 @@ Inline::MapFormals(Func *inlinee,
case Js::OpCode::LdThis:
- case Js::OpCode::StrictLdThis:
// Optimization of LdThis may be possible.
// Verify that this is a use of the "this" passed by the caller (not a nested function).
if (instr->GetSrc1()->AsRegOpnd()->m_sym == symThis)
@@ -5455,7 +5638,7 @@ Inline::DoCheckThisOpt(IR::Instr * instr)
// If the instr is an inlined LdThis, try to replace it with a CheckThis
// that will bail out if a helper call is required to get the real "this" pointer.
- Assert(instr->m_opcode == Js::OpCode::LdThis || instr->m_opcode == Js::OpCode::StrictLdThis);
+ Assert(instr->m_opcode == Js::OpCode::LdThis);
Assert(instr->IsInlined());
// Create the CheckThis. The target is the original offset, i.e., the LdThis still has to be executed.
@@ -5464,7 +5647,7 @@ Inline::DoCheckThisOpt(IR::Instr * instr)
instr->FreeSrc2();
}
IR::Instr *newInstr =
- IR::BailOutInstr::New( instr->m_opcode == Js::OpCode::LdThis ? Js::OpCode::CheckThis : Js::OpCode::StrictCheckThis, IR::BailOutCheckThis, instr, instr->m_func);
+ IR::BailOutInstr::New(Js::OpCode::CheckThis, IR::BailOutCheckThis, instr, instr->m_func);
// Just re-use the original src1 since the LdThis will usually be deleted.
newInstr->SetSrc1(instr->GetSrc1());
newInstr->SetByteCodeOffset(instr);
diff --git a/lib/Backend/Inline.h b/lib/Backend/Inline.h
index f180fa66e9a..d502a8cf511 100644
--- a/lib/Backend/Inline.h
+++ b/lib/Backend/Inline.h
@@ -4,6 +4,14 @@
//-------------------------------------------------------------------------------------------------------
#pragma once
+enum class CallApplyTargetSourceType : byte
+{
+ None,
+ LdFld,
+ Callback,
+ Other
+};
+
class Inline
{
public:
@@ -57,43 +65,52 @@ class Inline
bool dontOptimizeJustCheck = false, uint i = 0 /*i-th inlinee at a polymorphic call site*/);
intptr_t TryOptimizeInstrWithFixedDataProperty(IR::Instr *&instr);
IR::Instr * InlineScriptFunction(IR::Instr *callInstr, const FunctionJITTimeInfo *const inlineeData, const StackSym *symThis, const Js::ProfileId profileId, bool* pIsInlined, IR::Instr * inlineeDefInstr, uint recursiveInlineDepth);
-#ifdef ENABLE_DOM_FAST_PATH
- void InlineDOMGetterSetterFunction(IR::Instr *ldFldInstr, const FunctionJITTimeInfo *const inlineeData, const FunctionJITTimeInfo *const inlinerData);
-#endif
IR::Instr * InlineGetterSetterFunction(IR::Instr *accessorInstr, const FunctionJITTimeInfo *const inlineeData, const StackSym *symCallerThis, const uint inlineCacheIndex, bool isGetter, bool *pIsInlined, uint recursiveInlineDepth);
IR::Instr * InlineFunctionCommon(IR::Instr *callInstr, bool originalCallTargetOpndIsJITOpt, StackSym* originalCallTargetStackSym, const FunctionJITTimeInfo *funcInfo, Func *inlinee, IR::Instr *instrNext,
IR::RegOpnd * returnValueOpnd, IR::Instr *inlineBailoutChecksBeforeInstr, const StackSym *symCallerThis, uint recursiveInlineDepth, bool safeThis = false, bool isApplyTarget = false);
IR::Instr * SimulateCallForGetterSetter(IR::Instr *accessorInstr, IR::Instr* insertInstr, IR::PropertySymOpnd* methodOpnd, bool isGetter);
IR::Instr * InlineApply(IR::Instr *callInstr, const FunctionJITTimeInfo * applyData, const FunctionJITTimeInfo * inlinerData, const StackSym *symThis, bool* pIsInlined, uint callSiteId, uint recursiveInlineDepth, uint argsCount);
- IR::Instr * InlineApplyBuiltInTargetWithArray(IR::Instr *callInstr, const FunctionJITTimeInfo * applyInfo, const FunctionJITTimeInfo * builtInInfo);
+ IR::Instr * InlineApplyBuiltInTargetWithArray(IR::Instr *callInstr, const FunctionJITTimeInfo * applyInfo, const FunctionJITTimeInfo * builtInInfo, bool * pIsInlined);
IR::Instr * InlineApplyWithArgumentsObject(IR::Instr * callInstr, IR::Instr * argsObjectArgInstr, const FunctionJITTimeInfo * inlineeInfo);
IR::Instr * InlineApplyWithoutArrayArgument(IR::Instr *callInstr, const FunctionJITTimeInfo * applyInfo, const FunctionJITTimeInfo * applyTargetInfo);
bool InlineApplyScriptTarget(IR::Instr *callInstr, const FunctionJITTimeInfo* inlinerData, const FunctionJITTimeInfo** pInlineeData, const FunctionJITTimeInfo * applyFuncInfo,
const StackSym *symThis, IR::Instr ** returnInstr, uint recursiveInlineDepth, bool isArrayOpndArgumentsObject, uint argsCount);
void GetArgInstrsForCallAndApply(IR::Instr* callInstr, IR::Instr** implicitThisArgOut, IR::Instr** explicitThisArgOut, IR::Instr** argumentsOrArrayArgOut, uint &argOutCount);
- _Success_(return != false) bool TryGetApplyAndTargetLdInstrs(IR::Instr * callInstr, _Outptr_result_nullonfailure_ IR::Instr ** applyLdInstr, _Outptr_result_nullonfailure_ IR::Instr ** applyTargetLdInstr);
- IR::Instr * InlineCall(IR::Instr *callInstr, const FunctionJITTimeInfo * inlineeData, const FunctionJITTimeInfo * inlinerData, const StackSym *symThis, bool* pIsInlined, uint callSiteId, uint recursiveInlineDepth);
+ _Success_(return != false) bool TryGetCallApplyAndTargetLdInstrs(IR::Instr * callInstr, _Outptr_result_nullonfailure_ IR::Instr ** callApplyLdInstr, _Outptr_result_nullonfailure_ IR::Instr ** callApplyTargetLdInstr);
+ IR::Instr * InlineCall(IR::Instr *callInstr, const FunctionJITTimeInfo * inlineeData, const FunctionJITTimeInfo * inlinerData, const StackSym *symThis, bool* pIsInlined, uint callSiteId, uint recursiveInlineDepth, bool isCallInstanceFunction);
bool InlineCallTarget(IR::Instr *callInstr, const FunctionJITTimeInfo* inlinerData, const FunctionJITTimeInfo** pInlineeData, const FunctionJITTimeInfo *callFuncInfo,
- const StackSym *symThis, IR::Instr ** returnInstr, uint recursiveInlineDepth);
+ const StackSym *symThis, IR::Instr ** returnInstr, uint recursiveInlineDepth, bool isCallInstanceFunction);
+ bool InlineCallBuiltInTarget(IR::Instr *callInstr, const FunctionJITTimeInfo* inlinerData, const FunctionJITTimeInfo* inlineeData, const FunctionJITTimeInfo *callFuncInfo, IR::Instr * callTargetLdInstr, Js::OpCode inlineOpcode,
+ const StackSym *symThis, uint recursiveInlineDepth, bool isCallInstanceFunction, IR::Instr ** returnInstr);
+ void AdjustArgoutsForCallTargetInlining(IR::Instr* callInstr, IR::Instr** pExplicitThisArgOut, bool isCallInstanceFunction);
+ bool TryGetCallApplyInlineeData(const FunctionJITTimeInfo* inlinerData, IR::Instr * callInstr, IR::Instr * callApplyLdInstr, IR::Instr * callApplyTargetLdInstr, const FunctionJITTimeInfo ** inlineeData, Js::InlineCacheIndex * inlineCacheIndex,
+ IR::Instr ** callbackDefInstr, bool isCallInstanceFunction, CallApplyTargetSourceType* targetType);
bool InlConstFoldArg(IR::Instr *instr, __in_ecount_opt(callerArgOutCount) IR::Instr *callerArgOuts[], Js::ArgSlot callerArgOutCount);
bool InlConstFold(IR::Instr *instr, IntConstType *pValue, __in_ecount_opt(callerArgOutCount) IR::Instr *callerArgOuts[], Js::ArgSlot callerArgOutCount);
IR::Instr * InlineCallApplyTarget_Shared(IR::Instr *callInstr, bool originalCallTargetOpndIsJITOpt, StackSym* originalCallTargetStackSym, const FunctionJITTimeInfo*const inlineeData,
- uint inlineCacheIndex, bool safeThis, bool isApplyTarget, bool isCallTarget, uint recursiveInlineDepth);
+ uint inlineCacheIndex, bool safeThis, bool isApplyTarget, CallApplyTargetSourceType targetType, IR::Instr * inlineeDefInstr, uint recursiveInlineDepth, IR::Instr * funcObjCheckInsertInstr);
bool SkipCallApplyScriptTargetInlining_Shared(IR::Instr *callInstr, const FunctionJITTimeInfo* inlinerData, const FunctionJITTimeInfo* inlineeData, bool isApplyTarget, bool isCallTarget);
- bool TryGetFixedMethodsForBuiltInAndTarget(IR::Instr *callInstr, const FunctionJITTimeInfo* inlinerData, const FunctionJITTimeInfo* inlineeData, const FunctionJITTimeInfo *builtInFuncInfo,
- IR::Instr* builtInLdInstr, IR::Instr* targetLdInstr, bool& safeThis, bool isApplyTarget);
+ IR::Instr * TryGetFixedMethodsForBuiltInAndTarget(IR::Instr *callInstr, const FunctionJITTimeInfo* inlinerData, const FunctionJITTimeInfo* inlineeData, const FunctionJITTimeInfo *builtInFuncInfo,
+ IR::Instr* builtInLdInstr, IR::Instr* targetLdInstr, bool& safeThis, bool isApplyTarget, bool isCallback);
- IR::Instr * InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo * inlineeData, Js::OpCode inlineCallOpCode, const FunctionJITTimeInfo * inlinerData, const StackSym *symCallerThis, bool* pIsInlined, uint profileId, uint recursiveInlineDepth);
+ bool CanInlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo * inlineeData, Js::OpCode inlineCallOpCode, const FunctionJITTimeInfo * inlinerData, Js::BuiltinFunction builtInId, bool isCallApplyTarget);
+ IR::Instr * InlineBuiltInFunction(IR::Instr *callInstr, const FunctionJITTimeInfo * inlineeData, Js::OpCode inlineCallOpCode, const FunctionJITTimeInfo * inlinerData, const StackSym *symCallerThis, bool* pIsInlined, uint profileId, uint recursiveInlineDepth, IR::Instr * funcObjCheckInsertInstr);
IR::Instr * InlineFunc(IR::Instr *callInstr, const FunctionJITTimeInfo *const inlineeData, const uint profileId);
bool SplitConstructorCall(IR::Instr *const newObjInstr, const bool isInlined, const bool doneFixedMethodFld, IR::Instr** createObjInstrOut = nullptr, IR::Instr** callCtorInstrOut = nullptr) const;
bool SplitConstructorCallCommon(IR::Instr *const newObjInstr, IR::Opnd *const lastArgOpnd, const Js::OpCode newObjOpCode,
const bool isInlined, const bool doneFixedMethodFld, IR::Instr** createObjInstrOut, IR::Instr** callCtorInstrOut) const;
IR::Instr * InlinePolymorphicFunction(IR::Instr *callInstr, const FunctionJITTimeInfo * inlinerData, const StackSym *symCallerThis, const Js::ProfileId profileId, bool* pIsInlined, uint recursiveInlineDepth, bool triedUsingFixedMethods = false);
IR::Instr * InlinePolymorphicFunctionUsingFixedMethods(IR::Instr *callInstr, const FunctionJITTimeInfo * inlinerData, const StackSym *symCallerThis, const Js::ProfileId profileId, IR::PropertySymOpnd* methodValueOpnd, bool* pIsInlined, uint recursiveInlineDepth);
- IR::Instr * TryGetCallbackDefInstr(IR::Instr * callInstr);
+
+ IR::RegOpnd * GetCallbackFunctionOpnd(IR::Instr * callInstr);
+ IR::Instr * TryGetCallbackDefInstr(StackSym * callbackSym);
+ IR::Instr * TryGetCallbackDefInstrForCallInstr(IR::Instr * callInstr);
+ IR::Instr * TryGetCallbackDefInstrForCallApplyTarget(IR::Instr * callApplyLdInstr);
+ IR::Instr * TryGetCallbackDefInstrForCallInstanceFunction(IR::Instr * callInstr);
+ bool TryGetCallApplyCallbackTargetInlineeData(const FunctionJITTimeInfo* inlinerData, IR::Instr * callInstr, IR::Instr * callApplyLdInstr, const FunctionJITTimeInfo ** inlineeData, IR::Instr ** callbackDefInstr, bool isCallInstanceFunction);
IR::Instr * InlineSpread(IR::Instr *spreadCall);
@@ -102,7 +119,7 @@ class Inline
void SetupInlineeFrame(Func *inlinee, IR::Instr *inlineeStart, Js::ArgSlot actualCount, IR::Opnd *functionObject);
void FixupExtraActualParams(IR::Instr * instr, IR::Instr *argOuts[], IR::Instr *argOutsExtra[], uint index, uint actualCount, Js::ProfileId callSiteId);
void RemoveExtraFixupArgouts(IR::Instr* instr, uint argoutRemoveCount, Js::ProfileId callSiteId);
- IR::Instr* PrepareInsertionPoint(IR::Instr *callInstr, const FunctionJITTimeInfo *funcInfo, IR::Instr *insertBeforeInstr, IR::BailOutKind bailOutKind = IR::BailOutOnInlineFunction);
+ IR::Instr* PrepareInsertionPoint(IR::Instr *callInstr, const FunctionJITTimeInfo *funcInfo, IR::Instr *insertBeforeInstr);
IR::ByteCodeUsesInstr* EmitFixedMethodOrFunctionObjectChecksForBuiltIns(IR::Instr *callInstr, IR::Instr * funcObjCheckInsertInstr, const FunctionJITTimeInfo * inlineeInfo, bool isPolymorphic, bool isBuiltIn, bool isCtor, bool isInlined);
Js::ArgSlot MapActuals(IR::Instr *callInstr, __out_ecount(maxParamCount) IR::Instr *argOuts[], Js::ArgSlot formalCount, Func *inlinee, Js::ProfileId callSiteId, bool *stackArgsArgOutExpanded, IR::Instr *argOutsExtra[] = nullptr, Js::ArgSlot maxParamCount = Js::InlineeCallInfo::MaxInlineeArgoutCount);
uint32 CountActuals(IR::Instr *callIntr);
@@ -145,11 +162,8 @@ class Inline
void SetInlineeFrameStartSym(Func *inlinee, uint actualCount);
void CloneCallSequence(IR::Instr* callInstr, IR::Instr* clonedCallInstr);
- void InsertObjectCheck(IR::Instr *callInstr, IR::Instr* insertBeforeInstr, IR::Instr*bailOutInstr);
- void InsertFunctionTypeIdCheck(IR::Instr *callInstr, IR::Instr* insertBeforeInstr, IR::Instr*bailOutInstr);
- void InsertJsFunctionCheck(IR::Instr *callInstr, IR::Instr *insertBeforeInstr, IR::BailOutKind bailOutKind);
- void InsertFunctionInfoCheck(IR::Instr *callInstr, IR::Instr *insertBeforeInstr, IR::Instr* bailoutInstr, const FunctionJITTimeInfo *funcInfo);
- void InsertFunctionObjectCheck(IR::Instr *callInstr, IR::Instr *insertBeforeInstr, IR::Instr* bailoutInstr, const FunctionJITTimeInfo *funcInfo);
+ void InsertJsFunctionCheck(IR::Instr * callInstr, IR::Instr *insertBeforeInstr, IR::BailOutKind bailOutKind);
+ void InsertFunctionObjectCheck(IR::RegOpnd * funcOpnd, IR::Instr *insertBeforeInstr, IR::Instr* bailoutInstr, const FunctionJITTimeInfo *funcInfo);
void TryResetObjTypeSpecFldInfoOn(IR::PropertySymOpnd* propertySymOpnd);
void TryDisableRuntimePolymorphicCacheOn(IR::PropertySymOpnd* propertySymOpnd);
diff --git a/lib/Backend/InlineeFrameInfo.cpp b/lib/Backend/InlineeFrameInfo.cpp
index 4ac71f94bc3..86d1d2cd7cb 100644
--- a/lib/Backend/InlineeFrameInfo.cpp
+++ b/lib/Backend/InlineeFrameInfo.cpp
@@ -76,8 +76,7 @@ bool BailoutConstantValue::IsEqual(const BailoutConstantValue & bailoutConstValu
return false;
}
-
-void InlineeFrameInfo::AllocateRecord(Func* func, intptr_t functionBodyAddr)
+void InlineeFrameInfo::AllocateRecord(Func* inlinee, intptr_t functionBodyAddr)
{
uint constantCount = 0;
@@ -100,7 +99,7 @@ void InlineeFrameInfo::AllocateRecord(Func* func, intptr_t functionBodyAddr)
// update the record
if (!this->record)
{
- this->record = InlineeFrameRecord::New(func->GetNativeCodeDataAllocator(), (uint)arguments->Count(), constantCount, functionBodyAddr, this);
+ this->record = InlineeFrameRecord::New(inlinee->GetNativeCodeDataAllocator(), (uint)arguments->Count(), constantCount, functionBodyAddr, this);
}
uint i = 0;
@@ -131,7 +130,7 @@ void InlineeFrameInfo::AllocateRecord(Func* func, intptr_t functionBodyAddr)
{
// Constants
Assert(constantIndex < constantCount);
- this->record->constants[constantIndex] = value.constValue.ToVar(func);
+ this->record->constants[constantIndex] = value.constValue.ToVar(inlinee);
this->record->argOffsets[i] = constantIndex;
constantIndex++;
}
@@ -150,10 +149,10 @@ void InlineeFrameInfo::AllocateRecord(Func* func, intptr_t functionBodyAddr)
#endif
this->record->functionOffset = offset;
}
- else
+ else if (inlinee->m_hasInlineArgsOpt)
{
Assert(constantIndex < constantCount);
- this->record->constants[constantIndex] = function.constValue.ToVar(func);
+ this->record->constants[constantIndex] = function.constValue.ToVar(inlinee);
this->record->functionOffset = constantIndex;
}
}
@@ -162,10 +161,14 @@ void InlineeFrameRecord::PopulateParent(Func* func)
{
Assert(this->parent == nullptr);
Assert(!func->IsTopFunc());
- if (func->GetParentFunc()->m_hasInlineArgsOpt)
+ for (Func* currFunc = func; !currFunc->IsTopFunc(); currFunc = currFunc->GetParentFunc())
{
- this->parent = func->GetParentFunc()->frameInfo->record;
- Assert(this->parent != nullptr);
+ if (currFunc->GetParentFunc()->frameInfo)
+ {
+ this->parent = currFunc->GetParentFunc()->frameInfo->record;
+ Assert(this->parent != nullptr);
+ return;
+ }
}
}
@@ -211,9 +214,9 @@ void InlineeFrameRecord::Restore(Js::FunctionBody* functionBody, InlinedFrameLay
BAILOUT_VERBOSE_TRACE(functionBody, _u("Restore function object: "));
// No deepCopy needed for just the function
Js::Var varFunction = this->Restore(this->functionOffset, /*isFloat64*/ false, /*isInt32*/ false, layout, functionBody, boxValues);
- Assert(Js::ScriptFunction::Is(varFunction));
+ Assert(Js::VarIs(varFunction));
- Js::ScriptFunction* function = Js::ScriptFunction::FromVar(varFunction);
+ Js::ScriptFunction* function = Js::VarTo(varFunction);
BAILOUT_VERBOSE_TRACE(functionBody, _u("Inlinee: %s [%d.%d] \n"), function->GetFunctionBody()->GetDisplayName(), function->GetFunctionBody()->GetSourceContextId(), function->GetFunctionBody()->GetLocalFunctionId());
inlinedFrame->function = function;
@@ -230,7 +233,7 @@ void InlineeFrameRecord::Restore(Js::FunctionBody* functionBody, InlinedFrameLay
#if DBG
if (boxValues && !Js::TaggedNumber::Is(var))
{
- Js::RecyclableObject *const recyclableObject = Js::RecyclableObject::FromVar(var);
+ Js::RecyclableObject *const recyclableObject = Js::VarTo(var);
Assert(!ThreadContext::IsOnStack(recyclableObject));
}
#endif
@@ -272,22 +275,49 @@ void InlineeFrameRecord::RestoreFrames(Js::FunctionBody* functionBody, InlinedFr
inlineDepth++;
currentFrame = currentFrame->Next();
}
-
// Align the inline depth of the record with the frame that needs to be restored
while (currentRecord && currentRecord->inlineDepth != inlineDepth)
{
currentRecord = currentRecord->parent;
}
+ int currentDepth = inlineDepth;
- while (currentRecord)
+ // Return if there is nothing to restore
+ if (!currentRecord)
{
- currentRecord->Restore(functionBody, currentFrame, callstack, boxValues);
- currentRecord = currentRecord->parent;
- currentFrame = currentFrame->Next();
+ return;
}
- // Terminate the inlined stack
- currentFrame->callInfo.Count = 0;
+ // We have InlineeFrameRecords for optimized frames and parents (i.e. inlinees) of optimized frames
+ // InlineeFrameRecords for unoptimized frames don't have values to restore and have argCount 0
+ while (currentRecord && (currentRecord->argCount != 0 || currentRecord->parent))
+ {
+ // There is nothing to restore for unoptimized frames
+ if (currentRecord->argCount != 0)
+ {
+ currentRecord->Restore(functionBody, currentFrame, callstack, boxValues);
+ }
+ currentRecord = currentRecord->parent;
+
+ // Walk stack frames forward to the depth of the next record
+ if (currentRecord)
+ {
+ while (currentDepth != currentRecord->inlineDepth)
+ {
+ currentFrame = currentFrame->Next();
+ currentDepth++;
+ }
+ }
+ }
+
+ // If we don't have any more InlineeFrameRecords, the innermost inlinee was an optimized frame
+ if (!currentRecord)
+ {
+ // We determine the innermost inlinee by frame->Next()->callInfo.Count == 0
+ // Optimized frames don't have this set when entering inlinee in the JITed code, so we must do
+ // this for them now
+ currentFrame->Next()->callInfo.Count = 0;
+ }
}
Js::Var InlineeFrameRecord::Restore(int offset, bool isFloat64, bool isInt32, Js::JavascriptCallStackLayout * layout, Js::FunctionBody* functionBody, bool boxValue) const
diff --git a/lib/Backend/InliningDecider.cpp b/lib/Backend/InliningDecider.cpp
index 2f21acd5cfa..b658759f3f8 100644
--- a/lib/Backend/InliningDecider.cpp
+++ b/lib/Backend/InliningDecider.cpp
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) 2021 ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#include "Backend.h"
@@ -101,6 +102,17 @@ Js::FunctionInfo * InliningDecider::GetCallSiteCallbackInfo(Js::FunctionBody *co
return profileData->GetCallbackInfo(inliner, profiledCallSiteId);
}
+Js::FunctionInfo * InliningDecider::GetCallApplyTargetInfo(Js::FunctionBody *const inliner, const Js::ProfileId profiledCallSiteId)
+{
+ Assert(inliner != nullptr);
+ Assert(profiledCallSiteId < inliner->GetProfiledCallSiteCount());
+
+ Js::DynamicProfileInfo * profileData = inliner->GetAnyDynamicProfileInfo();
+ Assert(profileData != nullptr);
+
+ return profileData->GetCallApplyTargetInfo(inliner, profiledCallSiteId);
+}
+
uint16 InliningDecider::GetConstantArgInfo(Js::FunctionBody *const inliner, const Js::ProfileId profiledCallSiteId)
{
Assert(inliner);
@@ -147,6 +159,16 @@ Js::FunctionInfo * InliningDecider::InlineCallback(Js::FunctionBody *const inlin
return nullptr;
}
+Js::FunctionInfo * InliningDecider::InlineCallApplyTarget(Js::FunctionBody *const inliner, const Js::ProfileId profiledCallSiteId, uint recursiveInlineDepth)
+{
+ Js::FunctionInfo * functionInfo = GetCallApplyTargetInfo(inliner, profiledCallSiteId);
+ if (functionInfo)
+ {
+ return Inline(inliner, functionInfo, false, false, false, GetConstantArgInfo(inliner, profiledCallSiteId), profiledCallSiteId, recursiveInlineDepth, true);
+ }
+ return functionInfo;
+}
+
uint InliningDecider::InlinePolymorphicCallSite(Js::FunctionBody *const inliner, const Js::ProfileId profiledCallSiteId,
Js::FunctionBody** functionBodyArray, uint functionBodyArrayLength, bool* canInlineArray, uint recursiveInlineDepth)
{
@@ -484,12 +506,14 @@ bool InliningDecider::GetBuiltInInfoCommon(
*inlineCandidateOpCode = Js::OpCode::InlineArrayPop;
break;
+ case Js::JavascriptBuiltInFunction::JavascriptArray_At:
case Js::JavascriptBuiltInFunction::JavascriptArray_Concat:
case Js::JavascriptBuiltInFunction::JavascriptArray_Reverse:
case Js::JavascriptBuiltInFunction::JavascriptArray_Shift:
case Js::JavascriptBuiltInFunction::JavascriptArray_Slice:
case Js::JavascriptBuiltInFunction::JavascriptArray_Splice:
+ case Js::JavascriptBuiltInFunction::JavascriptString_At:
case Js::JavascriptBuiltInFunction::JavascriptString_Link:
goto CallDirectCommon;
@@ -516,6 +540,7 @@ bool InliningDecider::GetBuiltInInfoCommon(
case Js::JavascriptBuiltInFunction::JavascriptArray_Includes:
case Js::JavascriptBuiltInFunction::JavascriptObject_HasOwnProperty:
+ case Js::JavascriptBuiltInFunction::JavascriptObject_HasOwn:
case Js::JavascriptBuiltInFunction::JavascriptArray_IsArray:
*returnType = ValueType::Boolean;
goto CallDirectCommon;
@@ -553,6 +578,9 @@ bool InliningDecider::GetBuiltInInfoCommon(
case Js::JavascriptBuiltInFunction::JavascriptFunction_Call:
*inlineCandidateOpCode = Js::OpCode::InlineFunctionCall;
break;
+ case Js::JavascriptBuiltInFunction::EngineInterfaceObject_CallInstanceFunction:
+ *inlineCandidateOpCode = Js::OpCode::InlineCallInstanceFunction;
+ break;
// The following are not currently inlined, but are tracked for their return type
// TODO: Add more built-ins that return objects. May consider tracking all built-ins.
@@ -649,12 +677,6 @@ bool InliningDecider::GetBuiltInInfoCommon(
*returnType = ValueType::GetObject(ObjectType::CharArray);
break;
-#ifdef ENABLE_DOM_FAST_PATH
- case Js::JavascriptBuiltInFunction::DOMFastPathGetter:
- *inlineCandidateOpCode = Js::OpCode::DOMFastPathGetter;
- break;
-#endif
-
default:
return false;
}
diff --git a/lib/Backend/InliningDecider.h b/lib/Backend/InliningDecider.h
index 2a15a64da24..0d14a6e7350 100644
--- a/lib/Backend/InliningDecider.h
+++ b/lib/Backend/InliningDecider.h
@@ -32,6 +32,8 @@ class InliningDecider
Js::FunctionInfo *GetCallSiteFuncInfo(Js::FunctionBody *const inliner, const Js::ProfileId profiledCallSiteId, bool* isConstructorCall, bool* isPolymorphicCall);
Js::FunctionInfo * InlineCallback(Js::FunctionBody *const inliner, const Js::ProfileId profiledCallSiteId, uint recursiveInlineDepth);
Js::FunctionInfo * GetCallSiteCallbackInfo(Js::FunctionBody *const inliner, const Js::ProfileId profiledCallSiteId);
+ Js::FunctionInfo * InlineCallApplyTarget(Js::FunctionBody *const inliner, const Js::ProfileId profiledCallSiteId, uint recursiveInlineDepth);
+ Js::FunctionInfo * GetCallApplyTargetInfo(Js::FunctionBody *const inliner, const Js::ProfileId profiledCallSiteId);
uint16 GetConstantArgInfo(Js::FunctionBody *const inliner, const Js::ProfileId profiledCallSiteId);
bool HasCallSiteInfo(Js::FunctionBody *const inliner, const Js::ProfileId profiledCallSiteId);
uint InlinePolymorphicCallSite(Js::FunctionBody *const inliner, const Js::ProfileId profiledCallSiteId, Js::FunctionBody** functionBodyArray, uint functionBodyArrayLength, bool* canInlineArray, uint recursiveInlineDepth = 0);
@@ -135,6 +137,12 @@ class InliningDecider
{ \
Output::Flush(); \
}
+#define INLINE_CALLBACKS_TRACE(...) \
+ if (PHASE_TESTTRACE(Js::InlineCallbacksPhase, this->topFunc) || PHASE_TRACE(Js::InlineCallbacksPhase, this->topFunc)) \
+ { \
+ Output::Print(__VA_ARGS__); \
+ Output::Flush(); \
+ }
#else
#define INLINE_VERBOSE_TRACE(...)
#define POLYMORPHIC_INLINE_TESTTRACE(...)
@@ -143,4 +151,6 @@ class InliningDecider
#define INLINE_FLUSH()
#define INLINE_TESTTRACE(...)
#define INLINE_TESTTRACE_VERBOSE(...)
+#define INLINE_TRACE_AND_TESTTRACE(...)
+#define INLINE_CALLBACKS_TRACE(...)
#endif
diff --git a/lib/Backend/InterpreterThunkEmitter.cpp b/lib/Backend/InterpreterThunkEmitter.cpp
index e7f9bcbcb03..7d459f47a5d 100644
--- a/lib/Backend/InterpreterThunkEmitter.cpp
+++ b/lib/Backend/InterpreterThunkEmitter.cpp
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#include "Backend.h"
@@ -157,19 +158,31 @@ constexpr BYTE Epilog[] = {
};
#elif defined(_M_ARM64)
+#ifdef _WIN32
constexpr BYTE FunctionInfoOffset = 24;
constexpr BYTE FunctionProxyOffset = 28;
constexpr BYTE DynamicThunkAddressOffset = 32;
constexpr BYTE ThunkAddressOffset = 36;
+#else
+constexpr BYTE FunctionInfoOffset = 8;
+constexpr BYTE FunctionProxyOffset = 12;
+constexpr BYTE DynamicThunkAddressOffset = 16;
+constexpr BYTE ThunkAddressOffset = 20;
+#endif
//TODO: saravind :Implement Range Check for ARM64
constexpr BYTE InterpreterThunk[InterpreterThunkEmitter::InterpreterThunkSize] = {
+#ifdef _WIN32
0xFD, 0x7B, 0xBB, 0xA9, //stp fp, lr, [sp, #-80]! ;Prologue
0xFD, 0x03, 0x00, 0x91, //mov fp, sp ;update frame pointer to the stack pointer
0xE0, 0x07, 0x01, 0xA9, //stp x0, x1, [sp, #16] ;Prologue again; save all registers
0xE2, 0x0F, 0x02, 0xA9, //stp x2, x3, [sp, #32]
0xE4, 0x17, 0x03, 0xA9, //stp x4, x5, [sp, #48]
0xE6, 0x1F, 0x04, 0xA9, //stp x6, x7, [sp, #64]
+#else
+ 0xFD, 0x7B, 0xBF, 0xA9, //stp fp, lr, [sp, #-16]! ;Prologue
+ 0xFD, 0x03, 0x00, 0x91, //mov fp, sp ;update frame pointer to the stack pointer
+#endif
0x02, 0x00, 0x40, 0xF9, //ldr x2, [x0, #0x00] ;offset will be replaced with Offset of FunctionInfo
0x40, 0x00, 0x40, 0xF9, //ldr x0, [x2, #0x00] ;offset will be replaced with Offset of FunctionProxy
0x03, 0x00, 0x40, 0xF9, //ldr x3, [x0, #0x00] ;offset will be replaced with offset of DynamicInterpreterThunk
@@ -191,7 +204,11 @@ constexpr BYTE Call[] = {
};
constexpr BYTE Epilog[] = {
+#ifdef _WIN32
0xfd, 0x7b, 0xc5, 0xa8, // ldp fp, lr, [sp], #80
+#else
+ 0xfd, 0x7b, 0xc1, 0xa8, // ldp fp, lr, [sp], #16
+#endif
0xc0, 0x03, 0x5f, 0xd6 // ret
};
#else // x86
diff --git a/lib/Backend/InterpreterThunkEmitter.h b/lib/Backend/InterpreterThunkEmitter.h
index 2420ca7a5b3..10c4fc4d769 100644
--- a/lib/Backend/InterpreterThunkEmitter.h
+++ b/lib/Backend/InterpreterThunkEmitter.h
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#pragma once
@@ -68,7 +69,11 @@ class InterpreterThunkEmitter
#elif defined(_M_ARM)
static constexpr size_t InterpreterThunkSize = 72;
#elif defined(_M_ARM64)
+#ifdef _WIN32
static constexpr size_t InterpreterThunkSize = 64;
+#else
+ static constexpr size_t InterpreterThunkSize = 48;
+#endif
#else
static constexpr size_t InterpreterThunkSize = 56;
#endif
diff --git a/lib/Backend/JITTimeFunctionBody.cpp b/lib/Backend/JITTimeFunctionBody.cpp
index 23b3da8f978..eab25a41890 100644
--- a/lib/Backend/JITTimeFunctionBody.cpp
+++ b/lib/Backend/JITTimeFunctionBody.cpp
@@ -162,6 +162,7 @@ JITTimeFunctionBody::InitializeJITFunctionData(
}
}
+ jitBody->yieldReg = functionBody->GetYieldRegister();
jitBody->localFrameDisplayReg = functionBody->GetLocalFrameDisplayRegister();
jitBody->localClosureReg = functionBody->GetLocalClosureRegister();
jitBody->envReg = functionBody->GetEnvRegister();
@@ -174,6 +175,7 @@ JITTimeFunctionBody::InitializeJITFunctionData(
}
jitBody->envDepth = functionBody->GetEnvDepth();
jitBody->profiledCallSiteCount = functionBody->GetProfiledCallSiteCount();
+ jitBody->profiledCallApplyCallSiteCount = functionBody->GetProfiledCallApplyCallSiteCount();
jitBody->inParamCount = functionBody->GetInParamsCount();
jitBody->thisRegisterForEventHandler = functionBody->GetThisRegisterForEventHandler();
jitBody->funcExprScopeRegister = functionBody->GetFuncExprScopeRegister();
@@ -251,6 +253,12 @@ JITTimeFunctionBody::InitializeJITFunctionData(
jitBody->functionSlotsInCachedScopeCount = slotIdInCachedScopeToNestedIndexArray->count;
jitBody->slotIdInCachedScopeToNestedIndexArray = slotIdInCachedScopeToNestedIndexArray->elements;
}
+ Js::ProfileId * callSiteToCallApplyCallSiteArray = functionBody->GetCallSiteToCallApplyCallSiteArrayWithLock();
+ if (callSiteToCallApplyCallSiteArray)
+ {
+ jitBody->callSiteToCallApplyCallSiteArrayCount = jitBody->profiledCallSiteCount;
+ jitBody->callSiteToCallApplyCallSiteArray = callSiteToCallApplyCallSiteArray;
+ }
#ifdef ASMJS_PLAT
if (functionBody->GetIsAsmJsFunction())
{
@@ -394,6 +402,12 @@ JITTimeFunctionBody::GetLocalFrameDisplayReg() const
return static_cast(m_bodyData.localFrameDisplayReg);
}
+Js::RegSlot
+JITTimeFunctionBody::GetYieldReg() const
+{
+ return static_cast(m_bodyData.yieldReg);
+}
+
Js::RegSlot
JITTimeFunctionBody::GetLocalClosureReg() const
{
@@ -809,6 +823,12 @@ JITTimeFunctionBody::NeedScopeObjectForArguments(bool hasNonSimpleParams) const
&& !dontNeedScopeObject;
}
+bool
+JITTimeFunctionBody::RegIsConstant(Js::RegSlot reg) const
+{
+ return reg > 0 && reg < this->GetConstCount();
+}
+
bool
JITTimeFunctionBody::GetDoScopeObjectCreation() const
{
@@ -1052,6 +1072,22 @@ JITTimeFunctionBody::HasPropIdToFormalsMap() const
return m_bodyData.propertyIdsForRegSlotsCount > 0 && GetFormalsPropIdArray() != nullptr;
}
+Js::ProfileId
+JITTimeFunctionBody::GetCallApplyCallSiteIdForCallSiteId(Js::ProfileId callSiteId) const
+{
+ AssertOrFailFast(callSiteId < m_bodyData.profiledCallSiteCount);
+ Js::ProfileId callApplyId = Js::Constants::NoProfileId;
+ if (m_bodyData.callSiteToCallApplyCallSiteArray)
+ {
+ callApplyId = m_bodyData.callSiteToCallApplyCallSiteArray[callSiteId];
+ AssertOrFailFast(
+ callApplyId == Js::Constants::NoProfileId ||
+ callApplyId < m_bodyData.profiledCallApplyCallSiteCount);
+ }
+
+ return callApplyId;
+}
+
bool
JITTimeFunctionBody::IsRegSlotFormal(Js::RegSlot reg) const
{
diff --git a/lib/Backend/JITTimeFunctionBody.h b/lib/Backend/JITTimeFunctionBody.h
index 2cd9b99d72b..0f0ea51786b 100644
--- a/lib/Backend/JITTimeFunctionBody.h
+++ b/lib/Backend/JITTimeFunctionBody.h
@@ -37,6 +37,7 @@ class JITTimeFunctionBody
uint GetInlineCacheCount() const;
uint GetRecursiveCallSiteCount() const;
uint GetForInLoopDepth() const;
+ Js::RegSlot GetYieldReg() const;
Js::RegSlot GetLocalFrameDisplayReg() const;
Js::RegSlot GetLocalClosureReg() const;
Js::RegSlot GetEnvReg() const;
@@ -102,6 +103,7 @@ class JITTimeFunctionBody
void EnsureConsistentConstCount() const;
bool HasComputedName() const;
bool HasHomeObj() const;
+ bool RegIsConstant(Js::RegSlot reg) const;
const byte * GetByteCodeBuffer() const;
StatementMapIDL * GetFullStatementMap() const;
@@ -177,6 +179,7 @@ class JITTimeFunctionBody
bool HasProfileInfo() const;
bool IsRegSlotFormal(Js::RegSlot reg) const;
bool HasPropIdToFormalsMap() const;
+ Js::ProfileId GetCallApplyCallSiteIdForCallSiteId(Js::ProfileId callSiteId) const;
static bool LoopContains(const JITLoopHeaderIDL * loop1, const JITLoopHeaderIDL * loop2);
diff --git a/lib/Backend/JITTimeWorkItem.cpp b/lib/Backend/JITTimeWorkItem.cpp
index 040d92a0366..4cd9c5f1ec0 100644
--- a/lib/Backend/JITTimeWorkItem.cpp
+++ b/lib/Backend/JITTimeWorkItem.cpp
@@ -41,9 +41,7 @@ JITTimeWorkItem::IsLoopBody() const
bool
JITTimeWorkItem::IsJitInDebugMode() const
{
- // TODO (michhol): flags?
- return Js::Configuration::Global.EnableJitInDebugMode()
- && m_workItemData->isJitInDebugMode;
+ return m_workItemData->isJitInDebugMode;
}
intptr_t
diff --git a/lib/Backend/JitTransferData.cpp b/lib/Backend/JitTransferData.cpp
index 3c99866b441..bb50778ae59 100644
--- a/lib/Backend/JitTransferData.cpp
+++ b/lib/Backend/JitTransferData.cpp
@@ -115,4 +115,4 @@ void JitTransferData::Cleanup()
}
midl_user_free(entries);
}
-}
\ No newline at end of file
+}
diff --git a/lib/Backend/JitTransferData.h b/lib/Backend/JitTransferData.h
index 0daf45e4a07..2cc8179b73f 100644
--- a/lib/Backend/JitTransferData.h
+++ b/lib/Backend/JitTransferData.h
@@ -111,4 +111,4 @@ class JitTransferData
void Cleanup();
private:
void EnsureJitTimeTypeRefs(Recycler* recycler);
-};
\ No newline at end of file
+};
diff --git a/lib/Backend/JnHelperMethodList.h b/lib/Backend/JnHelperMethodList.h
index 4c37b00cab8..ffc960a97eb 100644
--- a/lib/Backend/JnHelperMethodList.h
+++ b/lib/Backend/JnHelperMethodList.h
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
-// Copyright (C) Microsoft Corporation and contributors. All rights reserved.
+// Copyright (C) Microsoft. All rights reserved.
+// Copyright (c) ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#ifndef HELPERCALL
@@ -29,6 +30,7 @@ HELPERCALLCHK(ScrFunc_OP_NewScFunc, Js::ScriptFunction::OP_NewScFunc, AttrCanNot
HELPERCALLCHK(ScrFunc_OP_NewScFuncHomeObj, Js::ScriptFunction::OP_NewScFuncHomeObj, AttrCanNotBeReentrant)
HELPERCALLCHK(ScrFunc_OP_NewScGenFunc, Js::JavascriptGeneratorFunction::OP_NewScGenFunc, AttrCanNotBeReentrant)
HELPERCALLCHK(ScrFunc_OP_NewScGenFuncHomeObj, Js::JavascriptGeneratorFunction::OP_NewScGenFuncHomeObj, AttrCanNotBeReentrant)
+HELPERCALLCHK(ScrFunc_OP_NewClassConstructor, Js::ScriptFunction::OP_NewClassConstructor, AttrCanNotBeReentrant)
HELPERCALLCHK(ScrFunc_CheckAlignment, Js::JavascriptFunction::CheckAlignment, AttrCanNotBeReentrant)
HELPERCALLCHK(ScrObj_LdHandlerScope, Js::JavascriptOperators::OP_LdHandlerScope, 0)
HELPERCALLCHK(ScrObj_LdFrameDisplay, Js::JavascriptOperators::OP_LdFrameDisplay, AttrCanNotBeReentrant)
@@ -43,7 +45,9 @@ HELPERCALLCHK(ScrObj_OP_IsInst, Js::JavascriptOperators::OP_IsInst, AttrCanThrow
HELPERCALLCHK(Op_IsIn, Js::JavascriptOperators::IsIn, AttrCanThrow)
HELPERCALLCHK(Op_IsObject, (BOOL (*) (Js::Var))Js::JavascriptOperators::IsObject, AttrCanNotBeReentrant)
+HELPERCALLCHK(Op_IsObjectOrNull, (BOOL (*) (Js::Var))Js::JavascriptOperators::IsObjectOrNull, AttrCanNotBeReentrant)
HELPERCALLCHK(Op_IsClassConstructor, Js::JavascriptOperators::IsClassConstructor, AttrCanNotBeReentrant)
+HELPERCALLCHK(Op_IsConstructor, (bool (*)(Js::Var))Js::JavascriptOperators::IsConstructor, AttrCanNotBeReentrant)
HELPERCALLCHK(Op_IsBaseConstructorKind, Js::JavascriptOperators::IsBaseConstructorKind, AttrCanNotBeReentrant)
HELPERCALLCHK(Op_LoadHeapArguments, Js::JavascriptOperators::LoadHeapArguments, AttrCanNotBeReentrant)
HELPERCALLCHK(Op_LoadHeapArgsCached, Js::JavascriptOperators::LoadHeapArgsCached, AttrCanNotBeReentrant)
@@ -60,12 +64,12 @@ HELPERCALLCHK(OP_CloneInnerScopeSlots, Js::JavascriptOperators::OP_CloneScopeSlo
HELPERCALLCHK(OP_CloneBlockScope, Js::JavascriptOperators::OP_CloneBlockScope, AttrCanNotBeReentrant)
HELPERCALLCHK(LdThis, Js::JavascriptOperators::OP_GetThis, AttrCanThrow)
HELPERCALLCHK(LdThisNoFastPath, Js::JavascriptOperators::OP_GetThisNoFastPath, 0)
-HELPERCALLCHK(StrictLdThis, Js::JavascriptOperators::OP_StrictGetThis, AttrCanNotBeReentrant)
HELPERCALLCHK(Op_LdElemUndef, Js::JavascriptOperators::OP_LoadUndefinedToElement, AttrCanNotBeReentrant)
HELPERCALLCHK(Op_LdElemUndefDynamic, Js::JavascriptOperators::OP_LoadUndefinedToElementDynamic, AttrCanNotBeReentrant)
HELPERCALLCHK(Op_LdElemUndefScoped, Js::JavascriptOperators::OP_LoadUndefinedToElementScoped, AttrCanNotBeReentrant)
HELPERCALLCHK(Op_EnsureNoRootProperty, Js::JavascriptOperators::OP_EnsureNoRootProperty, AttrCanThrow | AttrCanNotBeReentrant)
HELPERCALLCHK(Op_EnsureNoRootRedeclProperty, Js::JavascriptOperators::OP_EnsureNoRootRedeclProperty, AttrCanThrow | AttrCanNotBeReentrant)
+HELPERCALLCHK(Op_EnsureCanDeclGloFunc, Js::JavascriptOperators::OP_EnsureCanDeclGloFunc, AttrCanThrow | AttrCanNotBeReentrant)
HELPERCALLCHK(Op_EnsureNoRedeclPropertyScoped, Js::JavascriptOperators::OP_ScopedEnsureNoRedeclProperty, AttrCanThrow | AttrCanNotBeReentrant)
HELPERCALLCHK(Op_ToSpreadedFunctionArgument, Js::JavascriptOperators::OP_LdCustomSpreadIteratorList, AttrCanThrow)
@@ -73,8 +77,8 @@ HELPERCALLCHK(Op_ConvObject, Js::JavascriptOperators::ToObject, AttrCanThrow | A
HELPERCALLCHK(Op_NewUnscopablesWrapperObject, Js::JavascriptOperators::ToUnscopablesWrapperObject, AttrCanThrow | AttrCanNotBeReentrant)
HELPERCALLCHK(SetComputedNameVar, Js::JavascriptOperators::OP_SetComputedNameVar, AttrCanNotBeReentrant)
HELPERCALLCHK(Op_UnwrapWithObj, Js::JavascriptOperators::OP_UnwrapWithObj, AttrCanNotBeReentrant)
-HELPERCALLCHK(Op_ConvNumber_Full, Js::JavascriptOperators::ToNumber, AttrCanThrow)
-HELPERCALLCHK(Op_ConvNumberInPlace, Js::JavascriptOperators::ToNumberInPlace, AttrCanThrow)
+HELPERCALLCHK(Op_ConvNumber_Full, Js::JavascriptOperators::ToNumeric, AttrCanThrow)
+HELPERCALLCHK(Op_ConvNumberInPlace, Js::JavascriptOperators::ToNumericInPlace, AttrCanThrow)
HELPERCALLCHK(Op_ConvNumber_Helper, Js::JavascriptConversion::ToNumber_Helper, 0)
HELPERCALLCHK(Op_ConvFloat_Helper, Js::JavascriptConversion::ToFloat_Helper, 0)
HELPERCALLCHK(Op_ConvNumber_FromPrimitive, Js::JavascriptConversion::ToNumber_FromPrimitive, 0)
@@ -254,6 +258,17 @@ HELPERCALLCHK(Op_PatchPutValueWithThisPtrNoLocalFastPathPolymorphic, ((void (*)(
HELPERCALLCHK(Op_PatchPutRootValueNoLocalFastPath, ((void (*)(Js::FunctionBody *const, Js::InlineCache *const, const Js::InlineCacheIndex, Js::Var, Js::PropertyId, Js::Var, Js::PropertyOperationFlags))Js::JavascriptOperators::PatchPutRootValueNoLocalFastPath), AttrCanThrow)
HELPERCALLCHK(Op_PatchPutRootValueNoLocalFastPathPolymorphic, ((void (*)(Js::FunctionBody *const, Js::PolymorphicInlineCache *const, const Js::InlineCacheIndex, Js::Var, Js::PropertyId, Js::Var, Js::PropertyOperationFlags))Js::JavascriptOperators::PatchPutRootValueNoLocalFastPath), AttrCanThrow)
+HELPERCALLCHK(Op_PatchInitValueCantChangeType, ((bool (*)(Js::FunctionBody *const, Js::InlineCache *const, const Js::InlineCacheIndex, Js::RecyclableObject*, Js::PropertyId, Js::Var))Js::JavascriptOperators::PatchInitValueCantChangeType), AttrCanThrow)
+HELPERCALLCHK(Op_PatchInitValuePolymorphicCantChangeType, ((bool (*)(Js::FunctionBody *const, Js::PolymorphicInlineCache *const, const Js::InlineCacheIndex, Js::RecyclableObject*, Js::PropertyId, Js::Var))Js::JavascriptOperators::PatchInitValueCantChangeType), AttrCanThrow)
+HELPERCALLCHK(Op_PatchPutValueCantChangeType, ((bool (*)(Js::FunctionBody *const, Js::InlineCache *const, const Js::InlineCacheIndex, Js::Var, Js::PropertyId, Js::Var, Js::PropertyOperationFlags))Js::JavascriptOperators::PatchPutValueCantChangeType), AttrCanThrow)
+HELPERCALLCHK(Op_PatchPutValueWithThisPtrCantChangeType, ((bool (*)(Js::FunctionBody *const, Js::InlineCache *const, const Js::InlineCacheIndex, Js::Var, Js::PropertyId, Js::Var, Js::Var, Js::PropertyOperationFlags))Js::JavascriptOperators::PatchPutValueWithThisPtrCantChangeType), AttrCanThrow)
+HELPERCALLCHK(Op_PatchPutValuePolymorphicCantChangeType, ((bool (*)(Js::FunctionBody *const, Js::PolymorphicInlineCache *const, const Js::InlineCacheIndex, Js::Var, Js::PropertyId, Js::Var, Js::PropertyOperationFlags))Js::JavascriptOperators::PatchPutValueCantChangeType), AttrCanThrow)
+HELPERCALLCHK(Op_PatchPutValueWithThisPtrPolymorphicCantChangeType, ((bool (*)(Js::FunctionBody *const, Js::PolymorphicInlineCache *const, const Js::InlineCacheIndex, Js::Var, Js::PropertyId, Js::Var, Js::Var, Js::PropertyOperationFlags))Js::JavascriptOperators::PatchPutValueWithThisPtrCantChangeType), AttrCanThrow)
+HELPERCALLCHK(Op_PatchPutValueNoLocalFastPathCantChangeType, ((bool (*)(Js::FunctionBody *const, Js::InlineCache *const, const Js::InlineCacheIndex, Js::Var, Js::PropertyId, Js::Var, Js::PropertyOperationFlags))Js::JavascriptOperators::PatchPutValueNoLocalFastPathCantChangeType), AttrCanThrow)
+HELPERCALLCHK(Op_PatchPutValueWithThisPtrNoLocalFastPathCantChangeType, ((bool (*)(Js::FunctionBody *const, Js::InlineCache *const, const Js::InlineCacheIndex, Js::Var, Js::PropertyId, Js::Var, Js::Var, Js::PropertyOperationFlags))Js::JavascriptOperators::PatchPutValueWithThisPtrNoLocalFastPathCantChangeType), AttrCanThrow)
+HELPERCALLCHK(Op_PatchPutValueNoLocalFastPathPolymorphicCantChangeType, ((bool (*)(Js::FunctionBody *const, Js::PolymorphicInlineCache *const, const Js::InlineCacheIndex, Js::Var, Js::PropertyId, Js::Var, Js::PropertyOperationFlags))Js::JavascriptOperators::PatchPutValueNoLocalFastPathCantChangeType), AttrCanThrow)
+HELPERCALLCHK(Op_PatchPutValueWithThisPtrNoLocalFastPathPolymorphicCantChangeType, ((bool (*)(Js::FunctionBody *const, Js::PolymorphicInlineCache *const, const Js::InlineCacheIndex, Js::Var, Js::PropertyId, Js::Var, Js::Var, Js::PropertyOperationFlags))Js::JavascriptOperators::PatchPutValueWithThisPtrNoLocalFastPathCantChangeType), AttrCanThrow)
+
HELPERCALLCHK(Op_PatchInitValueCheckLayout, ((bool (*)(Js::FunctionBody *const, Js::InlineCache *const, const Js::InlineCacheIndex, Js::RecyclableObject*, Js::PropertyId, Js::Var))Js::JavascriptOperators::PatchInitValueCheckLayout), AttrCanThrow)
HELPERCALLCHK(Op_PatchInitValuePolymorphicCheckLayout, ((bool (*)(Js::FunctionBody *const, Js::PolymorphicInlineCache *const, const Js::InlineCacheIndex, Js::RecyclableObject*, Js::PropertyId, Js::Var))Js::JavascriptOperators::PatchInitValueCheckLayout), AttrCanThrow)
HELPERCALLCHK(Op_PatchPutValueCheckLayout, ((bool (*)(Js::FunctionBody *const, Js::InlineCache *const, const Js::InlineCacheIndex, Js::Var, Js::PropertyId, Js::Var, Js::PropertyOperationFlags))Js::JavascriptOperators::PatchPutValueCheckLayout), AttrCanThrow)
@@ -338,7 +353,7 @@ HELPERCALLCHK(NewScObjectNoArgNoCtor, Js::JavascriptOperators::NewScObjectNoArgN
HELPERCALLCHK(UpdateNewScObjectCache, Js::JavascriptOperators::UpdateNewScObjectCache, AttrCanNotBeReentrant)
HELPERCALLCHK(EnsureObjectLiteralType, Js::JavascriptOperators::EnsureObjectLiteralType, AttrCanNotBeReentrant)
-HELPERCALLCHK(OP_InitClass, Js::JavascriptOperators::OP_InitClass, AttrCanThrow)
+HELPERCALLCHK(Op_NewClassProto, Js::JavascriptOperators::OP_NewClassProto, AttrCanNotBeReentrant)
HELPERCALLCHK(OP_ClearAttributes, Js::JavascriptOperators::OP_ClearAttributes, AttrCanThrow | AttrCanNotBeReentrant)
@@ -424,6 +439,7 @@ HELPERCALLCHK(ProfiledLdRootFld, Js::ProfilingHelpers::ProfiledLdRootFld_Jit, 0)
HELPERCALLCHK(ProfiledLdRootMethodFld, Js::ProfilingHelpers::ProfiledLdRootMethodFld_Jit, 0)
HELPERCALLCHK(ProfiledStFld, Js::ProfilingHelpers::ProfiledStFld_Jit, 0)
HELPERCALLCHK(ProfiledStSuperFld, Js::ProfilingHelpers::ProfiledStSuperFld_Jit, 0)
+HELPERCALLCHK(ProfiledStSuperFld_Strict, Js::ProfilingHelpers::ProfiledStSuperFld_Strict_Jit, 0)
HELPERCALLCHK(ProfiledStFld_Strict, Js::ProfilingHelpers::ProfiledStFld_Strict_Jit, 0)
HELPERCALLCHK(ProfiledStRootFld, Js::ProfilingHelpers::ProfiledStRootFld_Jit, 0)
HELPERCALLCHK(ProfiledStRootFld_Strict, Js::ProfilingHelpers::ProfiledStRootFld_Strict_Jit, 0)
@@ -434,7 +450,6 @@ HELPERCALLCHK(SimpleProfileCall_DefaultInlineCacheIndex, Js::SimpleJitHelpers::P
HELPERCALLCHK(SimpleProfileCall, Js::SimpleJitHelpers::ProfileCall, AttrCanNotBeReentrant)
HELPERCALLCHK(SimpleProfileReturnTypeCall, Js::SimpleJitHelpers::ProfileReturnTypeCall, AttrCanNotBeReentrant)
//HELPERCALLCHK(SimpleProfiledLdLen, Js::SimpleJitHelpers::ProfiledLdLen_A, AttrCanThrow) //Can throw because it mirrors OP_GetProperty
-HELPERCALLCHK(SimpleProfiledStrictLdThis, Js::SimpleJitHelpers::ProfiledStrictLdThis, AttrCanNotBeReentrant)
HELPERCALLCHK(SimpleProfiledLdThis, Js::SimpleJitHelpers::ProfiledLdThis, AttrCanNotBeReentrant)
HELPERCALLCHK(SimpleProfiledSwitch, Js::SimpleJitHelpers::ProfiledSwitch, AttrCanNotBeReentrant)
HELPERCALLCHK(SimpleProfiledDivide, Js::SimpleJitHelpers::ProfiledDivide, AttrCanThrow)
@@ -449,7 +464,7 @@ HELPERCALLCHK(SimpleRecordLoopImplicitCallFlags, Js::SimpleJitHelpers::RecordLoo
HELPERCALLCHK(ScriptAbort, Js::JavascriptOperators::ScriptAbort, AttrCanThrow | AttrCanNotBeReentrant)
-HELPERCALLCHK(NoSaveRegistersBailOutForElidedYield, BailOutRecord::BailOutForElidedYield, 0)
+HELPERCALLCHK(NoSaveRegistersBailOutForElidedYield, BailOutRecord::BailOutForElidedYield, AttrCanNotBeReentrant)
// We don't want these functions to be valid iCall targets because they can be used to disclose stack addresses
// which CFG cannot defend against. Instead, return these addresses in GetNonTableMethodAddress
@@ -461,6 +476,7 @@ HELPERCALL(SaveAllRegistersNoSse2AndBranchBailOut, nullptr, AttrCanNotBeReentran
#endif
//Helpers for inlining built-ins
+HELPERCALLCHK(Array_At, Js::JavascriptArray::EntryAt, 0)
HELPERCALLCHK(Array_Concat, Js::JavascriptArray::EntryConcat, 0)
HELPERCALLCHK(Array_IndexOf, Js::JavascriptArray::EntryIndexOf, 0)
HELPERCALLCHK(Array_Includes, Js::JavascriptArray::EntryIncludes, 0)
@@ -470,7 +486,7 @@ HELPERCALLCHK(Array_VarPush, Js::JavascriptArray::Push, 0)
HELPERCALLCHK(Array_NativeIntPush, Js::JavascriptNativeIntArray::Push, 0)
HELPERCALLCHK(Array_NativeFloatPush, Js::JavascriptNativeFloatArray::Push, 0)
HELPERCALLCHK(Array_VarPop, Js::JavascriptArray::Pop, 0)
-HELPERCALLCHK(Array_NativePopWithNoDst, Js::JavascriptNativeArray::PopWithNoDst, AttrCanNotBeReentrant)
+HELPERCALLCHK(Array_NativePopWithNoDst, Js::JavascriptNativeArray::PopWithNoDst, 0)
HELPERCALLCHK(Array_NativeIntPop, Js::JavascriptNativeIntArray::Pop, AttrCanNotBeReentrant)
HELPERCALLCHK(Array_NativeFloatPop, Js::JavascriptNativeFloatArray::Pop, AttrCanNotBeReentrant)
HELPERCALLCHK(Array_Reverse, Js::JavascriptArray::EntryReverse, 0)
@@ -480,6 +496,7 @@ HELPERCALLCHK(Array_Splice, Js::JavascriptArray::EntrySplice, 0)
HELPERCALLCHK(Array_Unshift, Js::JavascriptArray::EntryUnshift, 0)
HELPERCALLCHK(Array_IsArray, Js::JavascriptArray::EntryIsArray, 0)
+HELPERCALL(String_At, Js::JavascriptString::EntryAt, 0)
HELPERCALL(String_Concat, Js::JavascriptString::EntryConcat, 0)
HELPERCALL(String_CharCodeAt, Js::JavascriptString::EntryCharCodeAt, 0)
HELPERCALL(String_CharAt, Js::JavascriptString::EntryCharAt, 0)
@@ -501,13 +518,14 @@ HELPERCALL(String_ToLocaleUpperCase, Js::JavascriptString::EntryToLocaleUpperCas
HELPERCALL(String_ToLowerCase, Js::JavascriptString::EntryToLowerCase, 0)
HELPERCALL(String_ToUpperCase, Js::JavascriptString::EntryToUpperCase, 0)
HELPERCALL(String_Trim, Js::JavascriptString::EntryTrim, 0)
-HELPERCALL(String_TrimLeft, Js::JavascriptString::EntryTrimLeft, 0)
-HELPERCALL(String_TrimRight, Js::JavascriptString::EntryTrimRight, 0)
+HELPERCALL(String_TrimLeft, Js::JavascriptString::EntryTrimStart, 0)
+HELPERCALL(String_TrimRight, Js::JavascriptString::EntryTrimEnd, 0)
HELPERCALL(String_GetSz, Js::JavascriptString::GetSzHelper, 0)
HELPERCALL(String_PadStart, Js::JavascriptString::EntryPadStart, 0)
HELPERCALL(String_PadEnd, Js::JavascriptString::EntryPadEnd, 0)
HELPERCALLCHK(GlobalObject_ParseInt, Js::GlobalObject::EntryParseInt, 0)
HELPERCALLCHK(Object_HasOwnProperty, Js::JavascriptObject::EntryHasOwnProperty, 0)
+HELPERCALLCHK(Object_HasOwn, Js::JavascriptObject::EntryHasOwn, 0)
HELPERCALL(RegExp_SplitResultUsed, Js::RegexHelper::RegexSplitResultUsed, 0)
HELPERCALL(RegExp_SplitResultUsedAndMayBeTemp, Js::RegexHelper::RegexSplitResultUsedAndMayBeTemp, 0)
@@ -529,15 +547,28 @@ HELPERCALL(EnsureFunctionProxyDeferredPrototypeType, &Js::FunctionProxy::EnsureF
HELPERCALL(SpreadArrayLiteral, Js::JavascriptArray::SpreadArrayArgs, 0)
HELPERCALL(SpreadCall, Js::JavascriptFunction::EntrySpreadCall, 0)
-HELPERCALLCHK(LdHomeObj, Js::JavascriptOperators::OP_LdHomeObj, AttrCanNotBeReentrant)
-HELPERCALLCHK(LdFuncObj, Js::JavascriptOperators::OP_LdFuncObj, AttrCanNotBeReentrant)
-HELPERCALLCHK(SetHomeObj, Js::JavascriptOperators::OP_SetHomeObj, AttrCanNotBeReentrant)
-HELPERCALLCHK(LdHomeObjProto, Js::JavascriptOperators::OP_LdHomeObjProto, AttrCanNotBeReentrant)
-HELPERCALLCHK(LdFuncObjProto, Js::JavascriptOperators::OP_LdFuncObjProto, AttrCanNotBeReentrant)
+HELPERCALL(SpreadObjectLiteral, Js::JavascriptObject::SpreadObjectLiteral, 0)
+HELPERCALL(Restify, Js::JavascriptObject::Restify, 0)
+HELPERCALL(NewPropIdArrForCompProps, Js::InterpreterStackFrame::OP_NewPropIdArrForCompProps, AttrCanNotBeReentrant)
+HELPERCALL(StPropIdArrFromVar, Js::InterpreterStackFrame::OP_StPropIdArrFromVar, 0)
+
+
+HELPERCALLCHK(LdHomeObj, Js::JavascriptOperators::OP_LdHomeObj, AttrCanNotBeReentrant)
+HELPERCALLCHK(LdFuncObj, Js::JavascriptOperators::OP_LdFuncObj, AttrCanNotBeReentrant)
+HELPERCALLCHK(SetHomeObj, Js::JavascriptOperators::OP_SetHomeObj, AttrCanNotBeReentrant)
+HELPERCALLCHK(LdHomeObjProto, Js::JavascriptOperators::OP_LdHomeObjProto, AttrCanNotBeReentrant)
+HELPERCALLCHK(LdFuncObjProto, Js::JavascriptOperators::OP_LdFuncObjProto, AttrCanNotBeReentrant)
-HELPERCALLCHK(ImportCall, Js::JavascriptOperators::OP_ImportCall, 0)
+HELPERCALLCHK(ImportCall, Js::JavascriptOperators::OP_ImportCall, 0)
+HELPERCALLCHK(LdImportMeta, Js::JavascriptOperators::OP_LdImportMeta, 0)
+HELPERCALLCHK(NewAsyncFromSyncIterator, Js::JavascriptOperators::OP_NewAsyncFromSyncIterator, AttrCanNotBeReentrant)
+HELPERCALLCHK(NewAwaitObject, Js::JavascriptOperators::OP_NewAwaitObject, AttrCanNotBeReentrant)
-HELPERCALLCHK(ResumeYield, Js::JavascriptOperators::OP_ResumeYield, AttrCanThrow)
+HELPERCALL(CreateInterpreterStackFrameForGenerator, Js::InterpreterStackFrame::CreateInterpreterStackFrameForGenerator, AttrCanNotBeReentrant)
+
+#ifdef ENABLE_DEBUG_CONFIG_OPTIONS
+HELPERCALL(OutputGeneratorBailInTrace, Js::JavascriptGenerator::OutputBailInTrace, AttrCanNotBeReentrant)
+#endif
#if DBG
HELPERCALL(IntRangeCheckFailure, Js::JavascriptNativeOperators::IntRangeCheckFailure, AttrCanNotBeReentrant)
diff --git a/lib/Backend/LinearScan.cpp b/lib/Backend/LinearScan.cpp
index 2cf603a01a7..ed0c8026eb4 100644
--- a/lib/Backend/LinearScan.cpp
+++ b/lib/Backend/LinearScan.cpp
@@ -1,5 +1,6 @@
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft Corporation and contributors. All rights reserved.
+// Copyright (c) ChakraCore Project Contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
@@ -146,8 +147,6 @@ LinearScan::RegAlloc()
}
m_bailOutRecordCount = 0;
- IR::Instr * insertBailInAfter = nullptr;
- BailOutInfo * bailOutInfoForBailIn = nullptr;
bool endOfBasicBlock = true;
FOREACH_INSTR_EDITING(instr, instrNext, currentInstr)
{
@@ -212,7 +211,32 @@ LinearScan::RegAlloc()
continue;
}
- if (instr->HasBailOutInfo())
+#if DBG
+ // Since not all call instructions are forwarded to ChangeToHelperCall, we might have
+ // missed allocating bailout records for them. Additionally, some instructions might
+ // end up being lowered differently, so the lazy bailout is not on a CALL instruction
+ // anymore. Use this opportunity to detect them.
+ // Note that the dump for the instruction will also be printed with -ForcePostLowerGlobOptInstrString
+ if (instr->HasBailOutInfo() && instr->GetBailOutInfo()->bailOutRecord == nullptr)
+ {
+ if (CONFIG_FLAG(ForcePostLowerGlobOptInstrString))
+ {
+ // The instruction has already been lowered, find the start to get the globopt dump
+ IR::Instr *curr = instr;
+ while (curr->globOptInstrString == nullptr)
+ {
+ curr = curr->m_prev;
+ }
+
+ instr->Dump();
+ curr->DumpGlobOptInstrString();
+ }
+
+ AssertMsg(false, "Lazy bailout: bailOutRecord not allocated");
+ }
+#endif
+
+ if (instr->HasBailOutInfo() && !instr->HasLazyBailOut())
{
if (this->currentRegion)
{
@@ -224,12 +248,6 @@ LinearScan::RegAlloc()
}
this->FillBailOutRecord(instr);
- if (instr->GetBailOutKind() == IR::BailOutForGeneratorYield)
- {
- Assert(instr->m_next->IsLabelInstr());
- insertBailInAfter = instr->m_next;
- bailOutInfoForBailIn = instr->GetBailOutInfo();
- }
}
this->SetSrcRegs(instr);
@@ -249,6 +267,8 @@ LinearScan::RegAlloc()
this->KillImplicitRegs(instr);
+ this->ProcessLazyBailOut(instr);
+
this->AllocateNewLifetimes(instr);
this->SetDstReg(instr);
@@ -268,13 +288,11 @@ LinearScan::RegAlloc()
endOfBasicBlock = true;
}
- if (insertBailInAfter == instr)
+ if (instr->IsGeneratorBailInInstr())
{
- instrNext = linearScanMD.GenerateBailInForGeneratorYield(instr, bailOutInfoForBailIn);
- insertBailInAfter = nullptr;
- bailOutInfoForBailIn = nullptr;
+ instrNext = this->bailIn.GenerateBailIn(instr->AsGeneratorBailInInstr());
}
- }NEXT_INSTR_EDITING;
+ } NEXT_INSTR_EDITING;
if (func->hasBailout)
{
@@ -1152,7 +1170,6 @@ struct FillBailOutState
FillBailOutState(JitArenaAllocator * allocator) : constantList(allocator) {}
};
-
void
LinearScan::FillBailOutOffset(int * offset, StackSym * stackSym, FillBailOutState * state, IR::Instr * instr)
{
@@ -1175,7 +1192,7 @@ LinearScan::FillBailOutOffset(int * offset, StackSym * stackSym, FillBailOutStat
else
{
Lifetime * lifetime = stackSym->scratch.linearScan.lifetime;
- Assert(lifetime && lifetime->start < instr->GetNumber() && instr->GetNumber() <= lifetime->end);
+ Assert(instr->HasLazyBailOut() || lifetime && lifetime->start < instr->GetNumber() && instr->GetNumber() <= lifetime->end);
if (instr->GetBailOutKind() == IR::BailOutOnException)
{
// Apart from the exception object sym, lifetimes for all other syms that need to be restored at this bailout,
@@ -1185,10 +1202,28 @@ LinearScan::FillBailOutOffset(int * offset, StackSym * stackSym, FillBailOutStat
}
this->PrepareForUse(lifetime);
- if (lifetime->isSpilled ||
+
+ if (instr->HasLazyBailOut() && instr->GetBailOutInfo()->GetClearedUseOfDstId() == stackSym->m_id)
+ {
+ // Force value of bytecode upward exposed destination symbol of a call instruction
+ // with lazy bailout to be restored from `rax`
+ // We clear the bit in bytecode upward exposed for destination symbol of a call
+ // instructions with lazy bailout in globopt to get past the assert that the
+ // register hasn't been initialized yet.
+ // Now, since the value is actually in rax, during FillBailOutRecord,
+ // we can always force the bailout to restore that symbol from rax.
+#ifdef _M_X64
+ *offset = this->SaveSymbolToReg(RegRAX, state, stackSym);
+#elif _M_IX86
+ *offset = this->SaveSymbolToReg(RegEAX, state, stackSym);
+#else
+ AssertMsg(false, "Lazy bailout for ARM is not yet supported");
+#endif
+ }
+ else if (lifetime->isSpilled ||
((instr->GetBailOutKind() == IR::BailOutOnException) && (stackSym != this->currentRegion->GetExceptionObjectSym()))) // BailOutOnException must restore from memory
{
- Assert(stackSym->IsAllocated());
+ Assert(stackSym->IsAllocated() || lifetime->isDeadStore);
#ifdef MD_GROW_LOCALS_AREA_UP
*offset = -((int)stackSym->m_offset + BailOutInfo::StackSymBias);
#else
@@ -1198,22 +1233,28 @@ LinearScan::FillBailOutOffset(int * offset, StackSym * stackSym, FillBailOutStat
}
else
{
- Assert(lifetime->reg != RegNOREG);
- Assert(state->registerSaveSyms[lifetime->reg - 1] == nullptr ||
- state->registerSaveSyms[lifetime->reg - 1] == stackSym);
- AssertMsg((stackSym->IsFloat64() || stackSym->IsSimd128()) && RegTypes[lifetime->reg] == TyFloat64 ||
- !(stackSym->IsFloat64() || stackSym->IsSimd128()) && RegTypes[lifetime->reg] != TyFloat64,
- "Trying to save float64 sym into non-float64 reg or non-float64 sym into float64 reg");
-
- // Save the register value to the register save space using the reg enum value as index
- state->registerSaveSyms[lifetime->reg - 1] = stackSym;
- *offset = LinearScanMD::GetRegisterSaveIndex(lifetime->reg);
-
- state->registerSaveCount++;
+ *offset = this->SaveSymbolToReg(lifetime->reg, state, stackSym);
}
}
}
+int
+LinearScan::SaveSymbolToReg(RegNum reg, FillBailOutState * state, StackSym * stackSym)
+{
+ Assert(reg != RegNOREG);
+ Assert(state->registerSaveSyms[reg - 1] == nullptr ||
+ state->registerSaveSyms[reg - 1] == stackSym);
+ AssertMsg((stackSym->IsFloat64() || stackSym->IsSimd128()) && RegTypes[reg] == TyFloat64 ||
+ !(stackSym->IsFloat64() || stackSym->IsSimd128()) && RegTypes[reg] != TyFloat64,
+ "Trying to save float64 sym into non-float64 reg or non-float64 sym into float64 reg");
+
+ // Save the register value to the register save space using the reg enum value as index
+ state->registerSaveSyms[reg - 1] = stackSym;
+ state->registerSaveCount++;
+
+ return LinearScanMD::GetRegisterSaveIndex(reg);
+}
+
struct FuncBailOutData
{
Func * func;
@@ -1282,11 +1323,11 @@ LinearScan::EnsureGlobalBailOutRecordTable(Func *func)
Func *topFunc = func->GetTopFunc();
bool isTopFunc = (func == topFunc);
uint32 inlineeID = isTopFunc ? 0 : func->m_inlineeId;
- NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
GlobalBailOutRecordDataTable *globalBailOutRecordDataTable = globalBailOutRecordTables[inlineeID];
if (globalBailOutRecordDataTable == nullptr)
{
+ NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
globalBailOutRecordDataTable = globalBailOutRecordTables[inlineeID] = NativeCodeDataNew(allocator, GlobalBailOutRecordDataTable);
globalBailOutRecordDataTable->entryPointInfo = (Js::EntryPointInfo*)func->GetWorkItem()->GetJITTimeInfo()->GetEntryPointInfoAddr();
globalBailOutRecordDataTable->length = globalBailOutRecordDataTable->size = 0;
@@ -1300,7 +1341,14 @@ LinearScan::EnsureGlobalBailOutRecordTable(Func *func)
globalBailOutRecordDataTable->firstActualStackOffset = -1;
globalBailOutRecordDataTable->registerSaveSpace = (Js::Var*)func->GetThreadContextInfo()->GetBailOutRegisterSaveSpaceAddr();
globalBailOutRecordDataTable->globalBailOutRecordDataRows = nullptr;
- if (func->GetJITFunctionBody()->GetForInLoopDepth() != 0)
+
+ if (func->GetJITFunctionBody()->IsCoroutine())
+ {
+ // Don't restore for-in enumerators for generators because they are
+ // already on the generator's interpreter frame
+ globalBailOutRecordDataTable->forInEnumeratorArrayRestoreOffset = -1;
+ }
+ else if (func->GetJITFunctionBody()->GetForInLoopDepth() != 0)
{
#ifdef MD_GROW_LOCALS_AREA_UP
Assert(func->GetForInEnumeratorArrayOffset() >= 0);
@@ -1417,9 +1465,8 @@ LinearScan::FillBailOutRecord(IR::Instr * instr)
memset(state.registerSaveSyms, 0, sizeof(state.registerSaveSyms));
// Fill in the constants
- FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, value, &bailOutInfo->usedCapturedValues.constantValues, constantValuesIterator)
+ FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, value, &bailOutInfo->usedCapturedValues->constantValues, constantValuesIterator)
{
- AssertMsg(bailOutInfo->bailOutRecord->bailOutKind != IR::BailOutForGeneratorYield, "constant prop syms unexpected for bail-in for generator yield");
StackSym * stackSym = value.Key();
if(stackSym->HasArgSlotNum())
{
@@ -1460,9 +1507,8 @@ LinearScan::FillBailOutRecord(IR::Instr * instr)
NEXT_SLISTBASE_ENTRY_EDITING;
// Fill in the copy prop syms
- FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSyms, &bailOutInfo->usedCapturedValues.copyPropSyms, copyPropSymsIter)
+ FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSyms, &bailOutInfo->usedCapturedValues->copyPropSyms, copyPropSymsIter)
{
- AssertMsg(bailOutInfo->bailOutRecord->bailOutKind != IR::BailOutForGeneratorYield, "copy prop syms unexpected for bail-in for generator yield");
StackSym * stackSym = copyPropSyms.Key();
if(stackSym->HasArgSlotNum())
{
@@ -1513,9 +1559,9 @@ LinearScan::FillBailOutRecord(IR::Instr * instr)
}
NEXT_BITSET_IN_SPARSEBV;
- if (bailOutInfo->usedCapturedValues.argObjSyms)
+ if (bailOutInfo->usedCapturedValues->argObjSyms)
{
- FOREACH_BITSET_IN_SPARSEBV(id, bailOutInfo->usedCapturedValues.argObjSyms)
+ FOREACH_BITSET_IN_SPARSEBV(id, bailOutInfo->usedCapturedValues->argObjSyms)
{
StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
Assert(stackSym != nullptr);
@@ -1705,7 +1751,7 @@ LinearScan::FillBailOutRecord(IR::Instr * instr)
uint outParamOffsetIndex = outParamStart + argSlot;
if (!sym->m_isBailOutReferenced && !sym->IsArgSlotSym())
{
- FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, constantValue, &bailOutInfo->usedCapturedValues.constantValues, iterator)
+ FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, constantValue, &bailOutInfo->usedCapturedValues->constantValues, iterator)
{
if (constantValue.Key()->m_id == sym->m_id)
{
@@ -1731,13 +1777,13 @@ LinearScan::FillBailOutRecord(IR::Instr * instr)
continue;
}
- FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSym, &bailOutInfo->usedCapturedValues.copyPropSyms, iter)
+ FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSym, &bailOutInfo->usedCapturedValues->copyPropSyms, iter)
{
if (copyPropSym.Key()->m_id == sym->m_id)
{
StackSym * copyStackSym = copyPropSym.Value();
- BVSparse* argObjSyms = bailOutInfo->usedCapturedValues.argObjSyms;
+ BVSparse* argObjSyms = bailOutInfo->usedCapturedValues->argObjSyms;
if (argObjSyms && argObjSyms->Test(copyStackSym->m_id))
{
outParamOffsets[outParamOffsetIndex] = BailOutRecord::GetArgumentsObjectOffset();
@@ -1845,7 +1891,7 @@ LinearScan::FillBailOutRecord(IR::Instr * instr)
Assert(LowererMD::IsAssign(instrDef));
}
- if (bailOutInfo->usedCapturedValues.argObjSyms && bailOutInfo->usedCapturedValues.argObjSyms->Test(sym->m_id))
+ if (bailOutInfo->usedCapturedValues->argObjSyms && bailOutInfo->usedCapturedValues->argObjSyms->Test(sym->m_id))
{
//foo.apply(this,arguments) case and we bailout when the apply is overridden. We need to restore the arguments object.
outParamOffsets[outParamOffsetIndex] = BailOutRecord::GetArgumentsObjectOffset();
@@ -1921,7 +1967,10 @@ LinearScan::FillBailOutRecord(IR::Instr * instr)
instr->m_func = this->func;
}
- linearScanMD.GenerateBailOut(instr, state.registerSaveSyms, _countof(state.registerSaveSyms));
+ if (!instr->HasLazyBailOut())
+ {
+ linearScanMD.GenerateBailOut(instr, state.registerSaveSyms, _countof(state.registerSaveSyms));
+ }
// generate the constant table
Js::Var * constants = NativeCodeDataNewArrayNoFixup(allocator, Js::Var, state.constantList.Count());
@@ -3164,7 +3213,7 @@ LinearScan::InsertStore(IR::Instr *instr, StackSym *sym, RegNum reg)
}
// LinearScan::InsertLoad
-void
+IR::Instr*
LinearScan::InsertLoad(IR::Instr *instr, StackSym *sym, RegNum reg)
{
IR::Opnd *src;
@@ -3236,6 +3285,8 @@ LinearScan::InsertLoad(IR::Instr *instr, StackSym *sym, RegNum reg)
}
}
#endif
+
+ return load;
}
uint8
@@ -3287,14 +3338,26 @@ LinearScan::KillImplicitRegs(IR::Instr *instr)
this->RecordLoopUse(nullptr, LowererMDArch::GetRegIMulHighDestLower());
return;
}
+
+ if (instr->m_opcode == Js::OpCode::Yield)
+ {
+ this->bailIn.SpillRegsForBailIn();
+ return;
+ }
#endif
this->TrackInlineeArgLifetimes(instr);
- // Don't care about kills on bailout calls as we are going to exit anyways
- // Also, for bailout scenarios we have already handled the inlinee frame spills
+ // Don't care about kills on bailout calls (e.g: call SaveAllRegAndBailOut) as we are going to exit anyways.
+ // Note that those are different from normal helper calls with LazyBailOut because they are not guaranteed to exit.
+ // Also, for bailout scenarios we have already handled the inlinee frame spills.
+ //
+ // Lazy bailout:
+ // Also make sure that Call instructions that previously do not have bailouts are still processed the same way in RegAlloc
+ // Previously only `call SaveAllRegistersAndBailOut` can have bailout, but now other calls may have lazy bailouts too.
+ // This makes them not being processed the same way as before(such as computing Lifetime across calls).
Assert(LowererMD::IsCall(instr) || !instr->HasBailOutInfo());
- if (!LowererMD::IsCall(instr) || instr->HasBailOutInfo())
+ if (!LowererMD::IsCall(instr) || (instr->HasBailOutInfo() && !instr->HasLazyBailOut()))
{
return;
}
@@ -3405,20 +3468,30 @@ void LinearScan::TrackInlineeArgLifetimes(IR::Instr* instr)
});
if (this->currentBlock->inlineeStack.Count() > 0)
{
- Assert(instr->m_func->inlineDepth == this->currentBlock->inlineeStack.Last()->inlineDepth + 1);
+ Assert(instr->m_func->inlineDepth > this->currentBlock->inlineeStack.Last()->inlineDepth);
}
this->currentBlock->inlineeStack.Add(instr->m_func);
}
- else
+ else if (instr->m_func->GetParentFunc()->m_hasInlineArgsOpt)
{
- Assert(this->currentBlock->inlineeStack.Count() == 0);
+ Assert(!instr->m_func->frameInfo);
+ Assert(instr->m_func->cachedInlineeFrameInfo);
+
+ Assert(this->currentBlock->inlineeStack.Empty() || instr->m_func->inlineDepth == this->currentBlock->inlineeStack.Last()->inlineDepth + 1);
+
+ this->currentBlock->inlineeStack.Add(instr->m_func);
}
}
else if (instr->m_opcode == Js::OpCode::InlineeEnd || instr->HasBailOnNoProfile())
{
- if (instr->m_func->m_hasInlineArgsOpt)
+ if (instr->m_func->m_hasInlineArgsOpt || (instr->m_func->GetParentFunc() && instr->m_func->GetParentFunc()->m_hasInlineArgsOpt))
{
- instr->m_func->frameInfo->AllocateRecord(this->func, instr->m_func->GetJITFunctionBody()->GetAddr());
+ if (!instr->m_func->m_hasInlineArgsOpt)
+ {
+ Assert(instr->m_func->cachedInlineeFrameInfo);
+ instr->m_func->frameInfo = instr->m_func->cachedInlineeFrameInfo;
+ }
+ instr->m_func->frameInfo->AllocateRecord(instr->m_func, instr->m_func->GetJITFunctionBody()->GetAddr());
if(this->currentBlock->inlineeStack.Count() == 0)
{
@@ -3431,25 +3504,28 @@ void LinearScan::TrackInlineeArgLifetimes(IR::Instr* instr)
Func* func = this->currentBlock->inlineeStack.RemoveAtEnd();
Assert(func == instr->m_func);
- instr->m_func->frameInfo->IterateSyms([=](StackSym* sym){
- Lifetime* lifetime = this->currentBlock->inlineeFrameLifetimes.RemoveAtEnd();
+ if (instr->m_func->m_hasInlineArgsOpt)
+ {
+ instr->m_func->frameInfo->IterateSyms([=](StackSym* sym) {
+ Lifetime* lifetime = this->currentBlock->inlineeFrameLifetimes.RemoveAtEnd();
- uint* value;
- if (this->currentBlock->inlineeFrameSyms.TryGetReference(sym->m_id, &value))
- {
- *value = *value - 1;
- if (*value == 0)
+ uint* value;
+ if (this->currentBlock->inlineeFrameSyms.TryGetReference(sym->m_id, &value))
{
- bool removed = this->currentBlock->inlineeFrameSyms.Remove(sym->m_id);
- Assert(removed);
+ *value = *value - 1;
+ if (*value == 0)
+ {
+ bool removed = this->currentBlock->inlineeFrameSyms.Remove(sym->m_id);
+ Assert(removed);
+ }
}
- }
- else
- {
- Assert(UNREACHED);
- }
- Assert(sym->scratch.linearScan.lifetime == lifetime);
- }, /*reverse*/ true);
+ else
+ {
+ Assert(UNREACHED);
+ }
+ Assert(sym->scratch.linearScan.lifetime == lifetime);
+ }, /*reverse*/ true);
+ }
}
}
}
@@ -3974,6 +4050,13 @@ LinearScan::InsertSecondChanceCompensation(Lifetime ** branchRegContent, Lifetim
continue;
}
+ // Allow us to properly insert compensation code for symbols whose lifetimes start after the generator jump table
+ // The GeneratorBailInLabel will have 2 edges in: one from the normal flow, one straight from the generator jump table
+ if (!branchLifetime && lifetime && lifetime->start > branchInstr->GetNumber() && labelInstr->m_opcode == Js::OpCode::GeneratorBailInLabel)
+ {
+ continue;
+ }
+
if (branchLifetime && branchLifetime->isSpilled && !branchLifetime->sym->IsConst() && branchLifetime->end > labelInstr->GetNumber())
{
// The lifetime was in a reg at the branch and is now spilled. We need a store on this path.
@@ -4000,6 +4083,20 @@ LinearScan::InsertSecondChanceCompensation(Lifetime ** branchRegContent, Lifetim
{
if (insertionInstr->m_prev->AsLabelInstr()->isOpHelper && !insertionInstr->AsLabelInstr()->isOpHelper)
{
+ // Ignore assertion error for cases where we insert an "airlock" helper block
+ // for a Branch instruction's helper path that:
+ // 1) ends up being empty
+ // 2) comes after a helper block from another instruction
+ // 3) is followed by a non-helper block
+ //
+ // Currently we would mark this block as a non-helper, but that makes
+ // this block only reachable through helper blocks, thus failing the assert
+#if DBG
+ if (insertionInstr->m_prev->AsLabelInstr()->isOpHelper)
+ {
+ insertionInstr->m_prev->AsLabelInstr()->m_noHelperAssert = true;
+ }
+#endif
insertionInstr->m_prev->AsLabelInstr()->isOpHelper = false;
}
}
@@ -4808,3 +4905,535 @@ IR::Instr* LinearScan::InsertLea(IR::RegOpnd *dst, IR::Opnd *src, IR::Instr *con
return instrRet;
}
+
+void
+LinearScan::ProcessLazyBailOut(IR::Instr *instr)
+{
+ if (instr->HasLazyBailOut())
+ {
+ // No lazy bailout for function with try/catch for now
+ Assert(!this->func->HasTry());
+
+ this->func->EnsureLazyBailOutRecordSlot();
+
+ if (instr->GetBailOutInfo()->NeedsToRestoreUseOfDst())
+ {
+ Assert(instr->OnlyHasLazyBailOut());
+ instr->GetBailOutInfo()->RestoreUseOfDst();
+ }
+
+ // FillBailOutRecord on lazy bailout must be called after KillImplicitRegs
+ //
+ // s1(rax) = ...
+ // s2 = call s1(rax)
+ // ...
+ // use of s1
+ //
+ // s1 in this case needs to be spilled due to the call.
+ // If we fill the bailout record similarly to normal bailouts,
+ // we wouldn't have the correct value of s1 because rax would have already
+ // been replaced by the result of the call.
+ // Therefore we have to capture the value of it after the call and after KillImplicitRegs.
+ this->FillBailOutRecord(instr);
+ }
+}
+
+LinearScan::GeneratorBailIn::GeneratorBailIn(Func* func, LinearScan* linearScan) :
+ func { func },
+ linearScan { linearScan },
+ jitFnBody { func->GetJITFunctionBody() },
+ initializedRegs { func->m_alloc },
+ regs {
+#if defined(_M_X64)
+ RegRAX, RegRCX
+#elif defined(_M_IX86)
+ RegEAX, RegECX
+#elif defined(_M_ARM64)
+ RegR0, RegR1
+#endif
+ },
+ interpreterFrameRegOpnd { IR::RegOpnd::New(nullptr, regs[0], TyMachPtr, func) },
+ tempRegOpnd { IR::RegOpnd::New(nullptr, regs[1], TyVar, func) }
+{
+ // The yield register holds the evaluated value of the expression passed as
+ // the parameter to .next(), this can be obtained from the generator object itself,
+ // so no need to restore.
+ this->initializedRegs.Set(this->jitFnBody->GetYieldReg());
+
+ // The environment is loaded before the resume jump table. At bail-in point, it can either
+ // still be in register or already spilled. If it's in register we're good. If it's been spilled,
+ // the register allocator should have inserted compensation code before the bail-in block, so we
+ // are still fine there.
+ this->initializedRegs.Set(this->jitFnBody->GetEnvReg());
+
+ this->bailInSymbols = JitAnew(this->func->m_alloc, SListBase);
+}
+
+LinearScan::GeneratorBailIn::~GeneratorBailIn()
+{
+ this->bailInSymbols->Clear(this->func->m_alloc);
+ this->bailInSymbols->Reset();
+ JitAdelete(this->func->m_alloc, this->bailInSymbols);
+}
+
+void LinearScan::GeneratorBailIn::SpillRegsForBailIn()
+{
+ for (int i = 0; i < GeneratorBailIn::regNum; i++)
+ {
+ this->linearScan->SpillReg(this->regs[i]);
+ this->linearScan->tempRegs.Clear(this->regs[i]);
+ this->linearScan->RecordLoopUse(nullptr, this->regs[i]);
+ }
+}
+
+// Note: Comments refer to rax/rcx for x64. For x86, we use their equivalence: eax/ecx
+// Restores the live stack locations followed by the live registers from
+// the interpreter's register slots.
+// RecordDefs each live register that is restored.
+//
+// Generates the following code:
+//
+// MOV rax, param0
+// MOV rax, [rax + JavascriptGenerator::GetFrameOffset()]
+//
+// for each live stack location, sym
+//
+// MOV rcx, [rax + regslot offset]
+// MOV sym(stack location), rcx
+//
+// for each live register, sym (rax is restore last if it is live)
+//
+// MOV sym(register), [rax + regslot offset]
+//
+IR::Instr* LinearScan::GeneratorBailIn::GenerateBailIn(IR::GeneratorBailInInstr* bailInInstr)
+{
+ BailOutInfo* bailOutInfo = bailInInstr->yieldInstr->GetBailOutInfo();
+
+ Assert(!bailOutInfo->capturedValues || bailOutInfo->capturedValues->constantValues.Empty());
+ Assert(!bailOutInfo->capturedValues || bailOutInfo->capturedValues->copyPropSyms.Empty());
+ Assert(!bailOutInfo->liveLosslessInt32Syms || bailOutInfo->liveLosslessInt32Syms->IsEmpty());
+ Assert(!bailOutInfo->liveFloat64Syms || bailOutInfo->liveFloat64Syms->IsEmpty());
+
+ IR::Instr* instrAfter = bailInInstr->m_next;
+
+ // 1) Load the generator object that was passed as one of the arguments to the jitted frame
+ LinearScan::InsertMove(this->interpreterFrameRegOpnd, this->CreateGeneratorObjectOpnd(), instrAfter);
+
+ // 2) Gets the InterpreterStackFrame pointer into rax
+ IR::IndirOpnd* generatorFrameOpnd = IR::IndirOpnd::New(this->interpreterFrameRegOpnd, Js::JavascriptGenerator::GetFrameOffset(), TyMachPtr, this->func);
+ LinearScan::InsertMove(this->interpreterFrameRegOpnd, generatorFrameOpnd, instrAfter);
+
+ // 3) Put the Javascript's `arguments` object, which is stored in the interpreter frame, to the jit's stack slot if needed
+ // See BailOutRecord::RestoreValues
+ if (this->func->HasArgumentSlot())
+ {
+ IR::IndirOpnd* generatorArgumentsOpnd = IR::IndirOpnd::New(this->interpreterFrameRegOpnd, Js::InterpreterStackFrame::GetOffsetOfArguments(), TyMachPtr, this->func);
+ LinearScan::InsertMove(this->tempRegOpnd, generatorArgumentsOpnd, instrAfter);
+ LinearScan::InsertMove(LowererMD::CreateStackArgumentsSlotOpnd(this->func), this->tempRegOpnd, instrAfter);
+ }
+
+ BailInInsertionPoint insertionPoint
+ {
+ nullptr, /* raxRestoreInstr */
+ instrAfter, /* instrInsertStackSym */
+ instrAfter /* instrInsertRegSym */
+ };
+
+ // 4) Restore symbols
+ // - We don't need to restore argObjSyms because StackArgs is currently not enabled
+ // Commented out here in case we do want to enable it in the future:
+ // this->InsertRestoreSymbols(bailOutInfo->capturedValues->argObjSyms, insertionPoint, saveInitializedReg);
+ //
+ // - We move all argout symbols right before the call so we don't need to restore argouts either
+
+ this->BuildBailInSymbolList(
+ *bailOutInfo->byteCodeUpwardExposedUsed,
+ bailInInstr->upwardExposedUses,
+ bailInInstr->capturedValues
+ );
+
+ this->InsertRestoreSymbols(
+ *bailOutInfo->byteCodeUpwardExposedUsed,
+ bailInInstr->upwardExposedUses,
+ bailInInstr->capturedValues,
+ insertionPoint
+ );
+ Assert(!this->func->IsStackArgsEnabled());
+
+#ifdef ENABLE_DEBUG_CONFIG_OPTIONS
+ if (PHASE_TRACE(Js::Phase::BailInPhase, this->func))
+ {
+ IR::Instr* insertBailInTraceBefore = instrAfter;
+ Assert(insertBailInTraceBefore->m_opcode == Js::OpCode::GeneratorOutputBailInTraceLabel);
+ this->InsertBailInTrace(bailOutInfo->byteCodeUpwardExposedUsed, insertBailInTraceBefore->m_next);
+ }
+#endif
+
+ return instrAfter;
+}
+
+void LinearScan::GeneratorBailIn::BuildBailInSymbolList(
+ const BVSparse& byteCodeUpwardExposedUses,
+ const BVSparse& upwardExposedUses,
+ const CapturedValues& capturedValues
+)
+{
+ this->bailInSymbols->Clear(this->func->m_alloc);
+
+ // Make sure that all symbols in `upwardExposedUses` can be restored.
+ // The idea is to first assume that we cannot restore any of the symbols.
+ // Then we use the information in `byteCodeUpwardExposedUses` and `capturedValues`
+ // which contains information about symbols in the bytecode, copy-prop'd symbols, and
+ // symbols with constant values. As we go through these lists, we clear the
+ // bits in `unrestorableSymbols` to indicate that they can be restored. At the
+ // end, the bitvector has to be empty.
+
+ // Assume all symbols cannot be restored.
+ BVSparse