diff --git a/.codecov.yml b/.codecov.yml
new file mode 100644
index 0000000000..895adc8dc9
--- /dev/null
+++ b/.codecov.yml
@@ -0,0 +1,11 @@
+codecov:
+  require_ci_to_pass: yes
+
+coverage:
+  precision: 2
+  round: down
+  status:
+    project:
+      default:
+        target: 99%    # the required coverage value
+        threshold: 1%  # the leniency in hitting the target 
diff --git a/.github/workflows/link-checker.yml b/.github/workflows/link-checker.yml
new file mode 100644
index 0000000000..be6f481978
--- /dev/null
+++ b/.github/workflows/link-checker.yml
@@ -0,0 +1,23 @@
+name: Link Checker
+on:
+  push:
+    branches: [ main ]
+  pull_request:
+    branches: [ main ]
+
+jobs:
+  linkchecker:
+
+    runs-on: ubuntu-latest
+
+    steps:
+      - uses: actions/checkout@v2
+      - name: lychee Link Checker
+        id: lychee
+        uses: lycheeverse/lychee-action@master
+        with:
+          args: --accept=200,403,429  "**/*.html" "**/*.md" "**/*.txt" --exclude "http://localhost*" "https://localhost" "https://odfe-node1:9200/" "https://community.tableau.com/docs/DOC-17978" ".*family.zzz" "https://pypi.python.org/pypi/opensearch-sql-cli/" "opensearch*" ".*@amazon.com" ".*email.com" "git@github.com" "http://timestamp.verisign.com/scripts/timstamp.dll"
+        env:
+          GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
+      - name: Fail if there were link errors
+        run: exit ${{ steps.lychee.outputs.exit_code }}
diff --git a/.github/workflows/sql-test-and-build-workflow.yml b/.github/workflows/sql-test-and-build-workflow.yml
index f8708943d8..8ad294d853 100644
--- a/.github/workflows/sql-test-and-build-workflow.yml
+++ b/.github/workflows/sql-test-and-build-workflow.yml
@@ -35,6 +35,12 @@ jobs:
         mkdir -p opensearch-sql-builds
         cp -r ./plugin/build/distributions/*.zip opensearch-sql-builds/
 
+    # This step uses the codecov-action Github action: https://github.com/codecov/codecov-action
+    - name: Upload SQL Coverage Report
+      uses: codecov/codecov-action@v1
+      with:
+        token: ${{ secrets.CODECOV_TOKEN }}
+
     - name: Upload Artifacts
       uses: actions/upload-artifact@v1
       with:
diff --git a/.github/workflows/sql-workbench-release-workflow.yml b/.github/workflows/sql-workbench-release-workflow.yml
index 668318e207..b22f7649b3 100644
--- a/.github/workflows/sql-workbench-release-workflow.yml
+++ b/.github/workflows/sql-workbench-release-workflow.yml
@@ -6,7 +6,7 @@ on:
       - 'v*'
 
 env: 
-  PLUGIN_NAME: queryWorkbenchDashboards
+  PLUGIN_NAME: query-workbench-dashboards
   OPENSEARCH_VERSION: '1.0'
   OPENSEARCH_PLUGIN_VERSION: 1.0.0.0-rc1
 
diff --git a/.github/workflows/sql-workbench-test-and-build-workflow.yml b/.github/workflows/sql-workbench-test-and-build-workflow.yml
index 059326b121..05a6541e34 100644
--- a/.github/workflows/sql-workbench-test-and-build-workflow.yml
+++ b/.github/workflows/sql-workbench-test-and-build-workflow.yml
@@ -3,7 +3,7 @@ name: SQL Workbench Test and Build
 on: [pull_request, push]
 
 env: 
-  PLUGIN_NAME: queryWorkbenchDashboards
+  PLUGIN_NAME: query-workbench-dashboards
   OPENSEARCH_VERSION: '1.0'
   OPENSEARCH_PLUGIN_VERSION: 1.0.0.0-rc1
 
diff --git a/ADMINS.md b/ADMINS.md
new file mode 100644
index 0000000000..81f320c5c0
--- /dev/null
+++ b/ADMINS.md
@@ -0,0 +1,7 @@
+## Admins
+
+| Admin           | GitHub ID                               | Affiliation |
+| --------------- | --------------------------------------- | ----------- |
+| Henri Yandell   | [hyandell](https://github.com/hyandell) | Amazon      |
+
+[This document](https://github.com/opensearch-project/.github/blob/main/ADMINS.md) explains what admins do in this repo. and how they should be doing it. If you're interested in becoming a maintainer, see [MAINTAINERS](MAINTAINERS.md). If you're interested in contributing, see [CONTRIBUTING](CONTRIBUTING.md).
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index c16674bb01..4d91732232 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,105 +1,4 @@
-# Contributing Guidelines
+## Contributing to this project
 
-Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
-documentation, we greatly value feedback and contributions from our community.
-
-Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
-information to effectively respond to your bug report or contribution.
-
-
-## Reporting Bugs/Feature Requests
-
-We welcome you to use the GitHub issue tracker to report bugs or suggest features.
-
-When filing an issue, please check [existing open](https://github.com/opensearch-project/sql/issues), or [recently closed](https://github.com/opensearch-project/sql/issues?q=is%3Aissue+is%3Aclosed), issues to make sure somebody else hasn't already
-reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
-
-* A reproducible test case or series of steps
-* The version of our code being used
-* Any modifications you've made relevant to the bug
-* Anything unusual about your environment or deployment
-
-## Sign your work
-OpenSearch is an open source product released under the Apache 2.0 license (see either [the Apache site](https://www.apache.org/licenses/LICENSE-2.0) or the [LICENSE.txt file](./LICENSE.txt)).  The Apache 2.0 license allows you to freely use, modify, distribute, and sell your own products that include Apache 2.0 licensed software.
-
-We respect intellectual property rights of others and we want to make sure all incoming contributions are correctly attributed and licensed. A Developer Certificate of Origin (DCO) is a lightweight mechanism to do that.
-
-The DCO is a declaration attached to every contribution made by every developer. In the commit message of the contribution, the developer simply adds a `Signed-off-by` statement and thereby agrees to the DCO, which you can find below or at [DeveloperCertificate.org](http://developercertificate.org/).
-
-```
-Developer's Certificate of Origin 1.1
-
-By making a contribution to this project, I certify that:
-
-(a) The contribution was created in whole or in part by me and I
-    have the right to submit it under the open source license
-    indicated in the file; or
-
-(b) The contribution is based upon previous work that, to the
-    best of my knowledge, is covered under an appropriate open
-    source license and I have the right under that license to
-    submit that work with modifications, whether created in whole
-    or in part by me, under the same open source license (unless
-    I am permitted to submit under a different license), as
-    Indicated in the file; or
-
-(c) The contribution was provided directly to me by some other
-    person who certified (a), (b) or (c) and I have not modified
-    it.
-
-(d) I understand and agree that this project and the contribution
-    are public and that a record of the contribution (including
-    all personal information I submit with it, including my
-    sign-off) is maintained indefinitely and may be redistributed
-    consistent with this project or the open source license(s)
-    involved.
- ```
-We require that every contribution to OpenSearch is signed with a Developer Certificate of Origin.  Additionally, please use your real name.  We do not accept anonymous contributors nor those utilizing pseudonyms.
-
-Each commit must include a DCO which looks like this
-
-```
-Signed-off-by: Jane Smith <jane.smith@email.com>
-```
-You may type this line on your own when writing your commit messages.  However, if your user.name and user.email are set in your git configs, you can use `-s` or `– – signoff` to add the `Signed-off-by` line to the end of the commit message.
-
-## Contributing via Pull Requests
-Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
-
-1. You are working against the latest source on the *develop* branch.
-2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
-3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
-
-To send us a pull request, please:
-
-1. Fork the repository.
-2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
-3. Ensure local tests pass; please add unit tests for all the new code paths introduced by your change.
-4. Commit to your fork using clear commit messages.
-5. Send us a pull request, answering any default questions in the pull request interface.
-6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
-
-GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
-[creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
-
-## Developing
-
-For instructions for development in details, please refer to [Developer Guide](./DEVELOPER_GUIDE.rst).
-
-## Finding contributions to work on
-Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/opensearch-project/sql/issues?q=label%3A%22help+wanted%22) issues is a great place to start.
-
-
-## Code of Conduct
-
-This project has adopted an [Open Source Code of Conduct](./CODE_OF_CONDUCT.md).
-
-
-## Security issue notifications
-
-If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public GitHub issue.
-
-
-## Licensing
-
-See the [LICENSE](./LICENSE.txt) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
+OpenSearch is a community project that is built and maintained by people just like **you**. 
+[This document](https://github.com/opensearch-project/.github/blob/main/CONTRIBUTING.md) explains how you can contribute to this and related projects.
\ No newline at end of file
diff --git a/MAINTAINERS.md b/MAINTAINERS.md
index a6f0a6a252..cb4ea162d0 100644
--- a/MAINTAINERS.md
+++ b/MAINTAINERS.md
@@ -6,6 +6,6 @@
 | --------------- | --------- | ----------- |
 | Anirudha (Ani) Jadhav | [anirudha](https://github.com/anirudha) | Amazon |
 | Peng Huo | [penghuo](https://github.com/penghuo) | Amazon |
-| Chloe | [chloe-zh](https://github.com/chloe-zh) | Amazon |
 | Chen Dai | [dai-chen](https://github.com/dai-chen) | Amazon |
-| Harold Wang | [harold-wang](https://github.com/harold-wang) | Amazon |
+| Nick Knize | [nknize](https://github.com/nknize) | Amazon |
+| Charlotte Henkle | [CEHENKLE](https://github.com/CEHENKLE) | Amazon |
\ No newline at end of file
diff --git a/README.md b/README.md
index 98cab87858..bf76415459 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
 
 [![Test and Build Workflow](https://github.com/opendistro-for-elasticsearch/sql/workflows/Java%20CI/badge.svg)](https://github.com/opendistro-for-elasticsearch/sql/actions)
 [![codecov](https://codecov.io/gh/opendistro-for-elasticsearch/sql/branch/develop/graph/badge.svg)](https://codecov.io/gh/opendistro-for-elasticsearch/sql)
-[![Documentation](https://img.shields.io/badge/api-reference-blue.svg)](https://docs-beta.opensearch.org/docs/sql/endpoints/)
+[![Documentation](https://img.shields.io/badge/api-reference-blue.svg)](https://docs-beta.opensearch.org/search-plugins/sql/endpoints/)
 [![Chat](https://img.shields.io/badge/chat-on%20forums-blue)](https://discuss.opendistrocommunity.dev/c/sql/)
 ![PRs welcome!](https://img.shields.io/badge/PRs-welcome!-success)
 
diff --git a/RELEASING.md b/RELEASING.md
new file mode 100644
index 0000000000..28e660d615
--- /dev/null
+++ b/RELEASING.md
@@ -0,0 +1 @@
+ This project follows the [OpenSearch release process](https://github.com/opensearch-project/.github/blob/main/RELEASING.md).
\ No newline at end of file
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000000..0b85ca04ed
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,3 @@
+## Reporting a Vulnerability
+
+If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/) or directly via email to aws-security@amazon.com. Please do **not** create a public GitHub issue.
\ No newline at end of file
diff --git a/common/src/main/java/org/opensearch/sql/common/setting/LegacySettings.java b/common/src/main/java/org/opensearch/sql/common/setting/LegacySettings.java
index ae34475c9a..3ff9b81ec5 100644
--- a/common/src/main/java/org/opensearch/sql/common/setting/LegacySettings.java
+++ b/common/src/main/java/org/opensearch/sql/common/setting/LegacySettings.java
@@ -56,7 +56,18 @@ public enum Key {
     /**
      * Legacy Common Settings.
      */
-    QUERY_SIZE_LIMIT("opendistro.query.size_limit");
+    QUERY_SIZE_LIMIT("opendistro.query.size_limit"),
+
+    /**
+     * Deprecated Settings.
+     */
+    SQL_NEW_ENGINE_ENABLED("opendistro.sql.engine.new.enabled"),
+    QUERY_ANALYSIS_ENABLED("opendistro.sql.query.analysis.enabled"),
+    QUERY_ANALYSIS_SEMANTIC_SUGGESTION("opendistro.sql.query.analysis.semantic.suggestion"),
+    QUERY_ANALYSIS_SEMANTIC_THRESHOLD("opendistro.sql.query.analysis.semantic.threshold"),
+    QUERY_RESPONSE_FORMAT("opendistro.sql.query.response.format"),
+    SQL_CURSOR_ENABLED("opendistro.sql.cursor.enabled"),
+    SQL_CURSOR_FETCH_SIZE("opendistro.sql.cursor.fetch_size");
 
     @Getter
     private final String keyValue;
diff --git a/core/build.gradle b/core/build.gradle
index 69acf5cef3..1c6c0c0481 100644
--- a/core/build.gradle
+++ b/core/build.gradle
@@ -51,6 +51,7 @@ dependencies {
     compile group: 'org.springframework', name: 'spring-beans', version: '5.2.5.RELEASE'
     compile group: 'org.apache.commons', name: 'commons-lang3', version: '3.10'
     compile group: 'com.facebook.presto', name: 'presto-matching', version: '0.240'
+    compile group: 'org.apache.commons', name: 'commons-math3', version: '3.6.1'
     compile project(':common')
 
     testImplementation('org.junit.jupiter:junit-jupiter:5.6.2')
diff --git a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java
index 0f207c0374..d5c1538b77 100644
--- a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java
+++ b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java
@@ -155,7 +155,8 @@ public Expression visitNot(Not node, AnalysisContext context) {
 
   @Override
   public Expression visitAggregateFunction(AggregateFunction node, AnalysisContext context) {
-    Optional<BuiltinFunctionName> builtinFunctionName = BuiltinFunctionName.of(node.getFuncName());
+    Optional<BuiltinFunctionName> builtinFunctionName =
+        BuiltinFunctionName.ofAggregation(node.getFuncName());
     if (builtinFunctionName.isPresent()) {
       Expression arg = node.getField().accept(this, context);
       Aggregator aggregator = (Aggregator) repository.compile(
diff --git a/core/src/main/java/org/opensearch/sql/expression/DSL.java b/core/src/main/java/org/opensearch/sql/expression/DSL.java
index 31050afc87..560414592c 100644
--- a/core/src/main/java/org/opensearch/sql/expression/DSL.java
+++ b/core/src/main/java/org/opensearch/sql/expression/DSL.java
@@ -500,6 +500,22 @@ public Aggregator count(Expression... expressions) {
     return aggregate(BuiltinFunctionName.COUNT, expressions);
   }
 
+  public Aggregator varSamp(Expression... expressions) {
+    return aggregate(BuiltinFunctionName.VARSAMP, expressions);
+  }
+
+  public Aggregator varPop(Expression... expressions) {
+    return aggregate(BuiltinFunctionName.VARPOP, expressions);
+  }
+
+  public Aggregator stddevSamp(Expression... expressions) {
+    return aggregate(BuiltinFunctionName.STDDEV_SAMP, expressions);
+  }
+
+  public Aggregator stddevPop(Expression... expressions) {
+    return aggregate(BuiltinFunctionName.STDDEV_POP, expressions);
+  }
+
   public RankingWindowFunction rowNumber() {
     return (RankingWindowFunction) repository.compile(
         BuiltinFunctionName.ROW_NUMBER.getName(), Collections.emptyList());
diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java
index a6be7378f7..640ae8a934 100644
--- a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java
+++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java
@@ -35,6 +35,10 @@
 import static org.opensearch.sql.data.type.ExprCoreType.STRING;
 import static org.opensearch.sql.data.type.ExprCoreType.TIME;
 import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP;
+import static org.opensearch.sql.expression.aggregation.StdDevAggregator.stddevPopulation;
+import static org.opensearch.sql.expression.aggregation.StdDevAggregator.stddevSample;
+import static org.opensearch.sql.expression.aggregation.VarianceAggregator.variancePopulation;
+import static org.opensearch.sql.expression.aggregation.VarianceAggregator.varianceSample;
 
 import com.google.common.collect.ImmutableMap;
 import java.util.Collections;
@@ -68,6 +72,10 @@ public static void register(BuiltinFunctionRepository repository) {
     repository.register(count());
     repository.register(min());
     repository.register(max());
+    repository.register(varSamp());
+    repository.register(varPop());
+    repository.register(stddevSamp());
+    repository.register(stddevPop());
   }
 
   private static FunctionResolver avg() {
@@ -159,4 +167,48 @@ private static FunctionResolver max() {
             .build()
     );
   }
+
+  private static FunctionResolver varSamp() {
+    FunctionName functionName = BuiltinFunctionName.VARSAMP.getName();
+    return new FunctionResolver(
+        functionName,
+        new ImmutableMap.Builder<FunctionSignature, FunctionBuilder>()
+            .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)),
+                arguments -> varianceSample(arguments, DOUBLE))
+            .build()
+    );
+  }
+
+  private static FunctionResolver varPop() {
+    FunctionName functionName = BuiltinFunctionName.VARPOP.getName();
+    return new FunctionResolver(
+        functionName,
+        new ImmutableMap.Builder<FunctionSignature, FunctionBuilder>()
+            .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)),
+                arguments -> variancePopulation(arguments, DOUBLE))
+            .build()
+    );
+  }
+
+  private static FunctionResolver stddevSamp() {
+    FunctionName functionName = BuiltinFunctionName.STDDEV_SAMP.getName();
+    return new FunctionResolver(
+        functionName,
+        new ImmutableMap.Builder<FunctionSignature, FunctionBuilder>()
+            .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)),
+                arguments -> stddevSample(arguments, DOUBLE))
+            .build()
+    );
+  }
+
+  private static FunctionResolver stddevPop() {
+    FunctionName functionName = BuiltinFunctionName.STDDEV_POP.getName();
+    return new FunctionResolver(
+        functionName,
+        new ImmutableMap.Builder<FunctionSignature, FunctionBuilder>()
+            .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)),
+                arguments -> stddevPopulation(arguments, DOUBLE))
+            .build()
+    );
+  }
 }
diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/NamedAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/NamedAggregator.java
index a1bf2b9961..346bd2d28c 100644
--- a/core/src/main/java/org/opensearch/sql/expression/aggregation/NamedAggregator.java
+++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/NamedAggregator.java
@@ -54,6 +54,8 @@ public class NamedAggregator extends Aggregator<AggregationState> {
 
   /**
    * NamedAggregator.
+   * The aggregator properties {@link #condition} is inherited by named aggregator
+   * to avoid errors introduced by the property inconsistency.
    *
    * @param name name
    * @param delegated delegated
@@ -64,6 +66,7 @@ public NamedAggregator(
     super(delegated.getFunctionName(), delegated.getArguments(), delegated.returnType);
     this.name = name;
     this.delegated = delegated;
+    this.condition = delegated.condition;
   }
 
   @Override
diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/StdDevAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/StdDevAggregator.java
new file mode 100644
index 0000000000..0cd8494449
--- /dev/null
+++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/StdDevAggregator.java
@@ -0,0 +1,110 @@
+/*
+ *   Licensed under the Apache License, Version 2.0 (the "License").
+ *   You may not use this file except in compliance with the License.
+ *   A copy of the License is located at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   or in the "license" file accompanying this file. This file is distributed
+ *   on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ *   express or implied. See the License for the specific language governing
+ *   permissions and limitations under the License.
+ */
+
+package org.opensearch.sql.expression.aggregation;
+
+import static org.opensearch.sql.data.model.ExprValueUtils.doubleValue;
+import static org.opensearch.sql.utils.ExpressionUtils.format;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.commons.math3.stat.descriptive.moment.StandardDeviation;
+import org.opensearch.sql.common.utils.StringUtils;
+import org.opensearch.sql.data.model.ExprNullValue;
+import org.opensearch.sql.data.model.ExprValue;
+import org.opensearch.sql.data.type.ExprCoreType;
+import org.opensearch.sql.expression.Expression;
+import org.opensearch.sql.expression.function.BuiltinFunctionName;
+
+/**
+ * StandardDeviation Aggregator.
+ */
+public class StdDevAggregator extends Aggregator<StdDevAggregator.StdDevState> {
+
+  private final boolean isSampleStdDev;
+
+  /**
+   * Build Population Variance {@link VarianceAggregator}.
+   */
+  public static Aggregator stddevPopulation(List<Expression> arguments,
+                                              ExprCoreType returnType) {
+    return new StdDevAggregator(false, arguments, returnType);
+  }
+
+  /**
+   * Build Sample Variance {@link VarianceAggregator}.
+   */
+  public static Aggregator stddevSample(List<Expression> arguments,
+                                          ExprCoreType returnType) {
+    return new StdDevAggregator(true, arguments, returnType);
+  }
+
+  /**
+   * VarianceAggregator constructor.
+   *
+   * @param isSampleStdDev true for sample standard deviation aggregator, false for population
+   *                       standard deviation aggregator.
+   * @param arguments aggregator arguments.
+   * @param returnType aggregator return types.
+   */
+  public StdDevAggregator(
+      Boolean isSampleStdDev, List<Expression> arguments, ExprCoreType returnType) {
+    super(
+        isSampleStdDev
+            ? BuiltinFunctionName.STDDEV_SAMP.getName()
+            : BuiltinFunctionName.STDDEV_POP.getName(),
+        arguments,
+        returnType);
+    this.isSampleStdDev = isSampleStdDev;
+  }
+
+  @Override
+  public StdDevAggregator.StdDevState create() {
+    return new StdDevAggregator.StdDevState(isSampleStdDev);
+  }
+
+  @Override
+  protected StdDevAggregator.StdDevState iterate(ExprValue value,
+                                                 StdDevAggregator.StdDevState state) {
+    state.evaluate(value);
+    return state;
+  }
+
+  @Override
+  public String toString() {
+    return StringUtils.format(
+        "%s(%s)", isSampleStdDev ? "stddev_samp" : "stddev_pop", format(getArguments()));
+  }
+
+  protected static class StdDevState implements AggregationState {
+
+    private final StandardDeviation standardDeviation;
+
+    private final List<Double> values = new ArrayList<>();
+
+    public StdDevState(boolean isSampleStdDev) {
+      this.standardDeviation = new StandardDeviation(isSampleStdDev);
+    }
+
+    public void evaluate(ExprValue value) {
+      values.add(value.doubleValue());
+    }
+
+    @Override
+    public ExprValue result() {
+      return values.size() == 0
+          ? ExprNullValue.of()
+          : doubleValue(standardDeviation.evaluate(values.stream().mapToDouble(d -> d).toArray()));
+    }
+  }
+}
diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/VarianceAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/VarianceAggregator.java
new file mode 100644
index 0000000000..bd9f0948f6
--- /dev/null
+++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/VarianceAggregator.java
@@ -0,0 +1,109 @@
+/*
+ *   Licensed under the Apache License, Version 2.0 (the "License").
+ *   You may not use this file except in compliance with the License.
+ *   A copy of the License is located at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   or in the "license" file accompanying this file. This file is distributed
+ *   on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ *   express or implied. See the License for the specific language governing
+ *   permissions and limitations under the License.
+ */
+
+package org.opensearch.sql.expression.aggregation;
+
+import static org.opensearch.sql.data.model.ExprValueUtils.doubleValue;
+import static org.opensearch.sql.utils.ExpressionUtils.format;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.commons.math3.stat.descriptive.moment.Variance;
+import org.opensearch.sql.common.utils.StringUtils;
+import org.opensearch.sql.data.model.ExprNullValue;
+import org.opensearch.sql.data.model.ExprValue;
+import org.opensearch.sql.data.type.ExprCoreType;
+import org.opensearch.sql.expression.Expression;
+import org.opensearch.sql.expression.function.BuiltinFunctionName;
+
+/**
+ * Variance Aggregator.
+ */
+public class VarianceAggregator extends Aggregator<VarianceAggregator.VarianceState> {
+
+  private final boolean isSampleVariance;
+
+  /**
+   * Build Population Variance {@link VarianceAggregator}.
+   */
+  public static Aggregator variancePopulation(List<Expression> arguments,
+                                                      ExprCoreType returnType) {
+    return new VarianceAggregator(false, arguments, returnType);
+  }
+
+  /**
+   * Build Sample Variance {@link VarianceAggregator}.
+   */
+  public static Aggregator varianceSample(List<Expression> arguments,
+                                                      ExprCoreType returnType) {
+    return new VarianceAggregator(true, arguments, returnType);
+  }
+
+  /**
+   * VarianceAggregator constructor.
+   *
+   * @param isSampleVariance true for sample variance aggregator, false for population variance
+   *     aggregator.
+   * @param arguments aggregator arguments.
+   * @param returnType aggregator return types.
+   */
+  public VarianceAggregator(
+      Boolean isSampleVariance, List<Expression> arguments, ExprCoreType returnType) {
+    super(
+        isSampleVariance
+            ? BuiltinFunctionName.VARSAMP.getName()
+            : BuiltinFunctionName.VARPOP.getName(),
+        arguments,
+        returnType);
+    this.isSampleVariance = isSampleVariance;
+  }
+
+  @Override
+  public VarianceState create() {
+    return new VarianceState(isSampleVariance);
+  }
+
+  @Override
+  protected VarianceState iterate(ExprValue value, VarianceState state) {
+    state.evaluate(value);
+    return state;
+  }
+
+  @Override
+  public String toString() {
+    return StringUtils.format(
+        "%s(%s)", isSampleVariance ? "var_samp" : "var_pop", format(getArguments()));
+  }
+
+  protected static class VarianceState implements AggregationState {
+
+    private final Variance variance;
+
+    private final List<Double> values = new ArrayList<>();
+
+    public VarianceState(boolean isSampleVariance) {
+      this.variance = new Variance(isSampleVariance);
+    }
+
+    public void evaluate(ExprValue value) {
+      values.add(value.doubleValue());
+    }
+
+    @Override
+    public ExprValue result() {
+      return values.size() == 0
+          ? ExprNullValue.of()
+          : doubleValue(variance.evaluate(values.stream().mapToDouble(d -> d).toArray()));
+    }
+  }
+}
diff --git a/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionName.java b/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionName.java
index 0210161abe..24e65d4b5d 100644
--- a/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionName.java
+++ b/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionName.java
@@ -12,6 +12,7 @@
 package org.opensearch.sql.expression.function;
 
 import com.google.common.collect.ImmutableMap;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Optional;
 import lombok.Getter;
@@ -126,6 +127,14 @@ public enum BuiltinFunctionName {
   COUNT(FunctionName.of("count")),
   MIN(FunctionName.of("min")),
   MAX(FunctionName.of("max")),
+  // sample variance
+  VARSAMP(FunctionName.of("var_samp")),
+  // population standard variance
+  VARPOP(FunctionName.of("var_pop")),
+  // sample standard deviation.
+  STDDEV_SAMP(FunctionName.of("stddev_samp")),
+  // population standard deviation.
+  STDDEV_POP(FunctionName.of("stddev_pop")),
 
   /**
    * Text Functions.
@@ -189,7 +198,28 @@ public enum BuiltinFunctionName {
     ALL_NATIVE_FUNCTIONS = builder.build();
   }
 
+  private static final Map<String, BuiltinFunctionName> AGGREGATION_FUNC_MAPPING =
+      new ImmutableMap.Builder<String, BuiltinFunctionName>()
+          .put("max", BuiltinFunctionName.MAX)
+          .put("min", BuiltinFunctionName.MIN)
+          .put("avg", BuiltinFunctionName.AVG)
+          .put("count", BuiltinFunctionName.COUNT)
+          .put("sum", BuiltinFunctionName.SUM)
+          .put("var_pop", BuiltinFunctionName.VARPOP)
+          .put("var_samp", BuiltinFunctionName.VARSAMP)
+          .put("variance", BuiltinFunctionName.VARPOP)
+          .put("std", BuiltinFunctionName.STDDEV_POP)
+          .put("stddev", BuiltinFunctionName.STDDEV_POP)
+          .put("stddev_pop", BuiltinFunctionName.STDDEV_POP)
+          .put("stddev_samp", BuiltinFunctionName.STDDEV_SAMP)
+          .build();
+
   public static Optional<BuiltinFunctionName> of(String str) {
     return Optional.ofNullable(ALL_NATIVE_FUNCTIONS.getOrDefault(FunctionName.of(str), null));
   }
+
+  public static Optional<BuiltinFunctionName> ofAggregation(String functionName) {
+    return Optional.ofNullable(
+        AGGREGATION_FUNC_MAPPING.getOrDefault(functionName.toLowerCase(Locale.ROOT), null));
+  }
 }
diff --git a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java
index 9b42c70e32..fc45f34ffe 100644
--- a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java
+++ b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java
@@ -36,6 +36,7 @@
 import static org.opensearch.sql.ast.dsl.AstDSL.compare;
 import static org.opensearch.sql.ast.dsl.AstDSL.field;
 import static org.opensearch.sql.ast.dsl.AstDSL.filter;
+import static org.opensearch.sql.ast.dsl.AstDSL.filteredAggregate;
 import static org.opensearch.sql.ast.dsl.AstDSL.function;
 import static org.opensearch.sql.ast.dsl.AstDSL.intLiteral;
 import static org.opensearch.sql.ast.dsl.AstDSL.qualifiedName;
@@ -624,4 +625,43 @@ public void limit_offset() {
         )
     );
   }
+
+  /**
+   * SELECT COUNT(NAME) FILTER(WHERE age > 1) FROM test.
+   * This test is to verify that the aggregator properties are taken
+   * when wrapping it to {@link org.opensearch.sql.expression.aggregation.NamedAggregator}
+   */
+  @Test
+  public void named_aggregator_with_condition() {
+    assertAnalyzeEqual(
+        LogicalPlanDSL.project(
+            LogicalPlanDSL.aggregation(
+                LogicalPlanDSL.relation("schema"),
+                ImmutableList.of(
+                    DSL.named("count(string_value) filter(where integer_value > 1)",
+                        dsl.count(DSL.ref("string_value", STRING)).condition(dsl.greater(DSL.ref(
+                            "integer_value", INTEGER), DSL.literal(1))))
+                ),
+                emptyList()
+            ),
+            DSL.named("count(string_value) filter(where integer_value > 1)", DSL.ref(
+                "count(string_value) filter(where integer_value > 1)", INTEGER))
+        ),
+        AstDSL.project(
+            AstDSL.agg(
+                AstDSL.relation("schema"),
+                ImmutableList.of(
+                    alias("count(string_value) filter(where integer_value > 1)", filteredAggregate(
+                        "count", qualifiedName("string_value"), function(
+                            ">", qualifiedName("integer_value"), intLiteral(1))))),
+                emptyList(),
+                emptyList(),
+                emptyList()
+            ),
+            AstDSL.alias("count(string_value) filter(where integer_value > 1)", filteredAggregate(
+                "count", qualifiedName("string_value"), function(
+                    ">", qualifiedName("integer_value"), intLiteral(1))))
+        )
+    );
+  }
 }
diff --git a/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java
index aa8d2b12de..8cb7288273 100644
--- a/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java
+++ b/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java
@@ -292,6 +292,14 @@ public void aggregation_filter() {
     );
   }
 
+  @Test
+  public void variance_mapto_varPop() {
+    assertAnalyzeEqual(
+        dsl.varPop(DSL.ref("integer_value", INTEGER)),
+        AstDSL.aggregate("variance", qualifiedName("integer_value"))
+    );
+  }
+
   protected Expression analyze(UnresolvedExpression unresolvedExpression) {
     return expressionAnalyzer.analyze(unresolvedExpression, analysisContext);
   }
diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/StdDevAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/StdDevAggregatorTest.java
new file mode 100644
index 0000000000..ef085a81d3
--- /dev/null
+++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/StdDevAggregatorTest.java
@@ -0,0 +1,182 @@
+/*
+ *   Licensed under the Apache License, Version 2.0 (the "License").
+ *   You may not use this file except in compliance with the License.
+ *   A copy of the License is located at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   or in the "license" file accompanying this file. This file is distributed
+ *   on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ *   express or implied. See the License for the specific language governing
+ *   permissions and limitations under the License.
+ */
+
+package org.opensearch.sql.expression.aggregation;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.when;
+import static org.opensearch.sql.data.model.ExprValueUtils.doubleValue;
+import static org.opensearch.sql.data.model.ExprValueUtils.integerValue;
+import static org.opensearch.sql.data.model.ExprValueUtils.missingValue;
+import static org.opensearch.sql.data.model.ExprValueUtils.nullValue;
+import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE;
+import static org.opensearch.sql.data.type.ExprCoreType.INTEGER;
+import static org.opensearch.sql.expression.DSL.ref;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.opensearch.sql.data.model.ExprValue;
+import org.opensearch.sql.data.model.ExprValueUtils;
+import org.opensearch.sql.expression.DSL;
+import org.opensearch.sql.expression.Expression;
+import org.opensearch.sql.storage.bindingtuple.BindingTuple;
+
+@ExtendWith(MockitoExtension.class)
+public class StdDevAggregatorTest extends AggregationTest {
+
+  @Mock
+  Expression expression;
+
+  @Mock
+  ExprValue tupleValue;
+
+  @Mock
+  BindingTuple tuple;
+
+  @Test
+  public void stddev_sample_field_expression() {
+    ExprValue result =
+        stddevSample(integerValue(1), integerValue(2), integerValue(3), integerValue(4));
+    assertEquals(1.2909944487358056, result.value());
+  }
+
+  @Test
+  public void stddev_population_field_expression() {
+    ExprValue result =
+        stddevPop(integerValue(1), integerValue(2), integerValue(3), integerValue(4));
+    assertEquals(1.118033988749895, result.value());
+  }
+
+  @Test
+  public void stddev_sample_arithmetic_expression() {
+    ExprValue result =
+        aggregation(
+            dsl.stddevSamp(dsl.multiply(ref("integer_value", INTEGER), DSL.literal(10))), tuples);
+    assertEquals(12.909944487358056, result.value());
+  }
+
+  @Test
+  public void stddev_population_arithmetic_expression() {
+    ExprValue result =
+        aggregation(
+            dsl.stddevPop(dsl.multiply(ref("integer_value", INTEGER), DSL.literal(10))), tuples);
+    assertEquals(11.180339887498949, result.value());
+  }
+
+  @Test
+  public void filtered_stddev_sample() {
+    ExprValue result =
+        aggregation(
+            dsl.stddevSamp(ref("integer_value", INTEGER))
+                .condition(dsl.greater(ref("integer_value", INTEGER), DSL.literal(1))),
+            tuples);
+    assertEquals(1.0, result.value());
+  }
+
+  @Test
+  public void filtered_stddev_population() {
+    ExprValue result =
+        aggregation(
+            dsl.stddevPop(ref("integer_value", INTEGER))
+                .condition(dsl.greater(ref("integer_value", INTEGER), DSL.literal(1))),
+            tuples);
+    assertEquals(0.816496580927726, result.value());
+  }
+
+  @Test
+  public void stddev_sample_with_missing() {
+    ExprValue result = stddevSample(integerValue(2), integerValue(1), missingValue());
+    assertEquals(0.7071067811865476, result.value());
+  }
+
+  @Test
+  public void stddev_population_with_missing() {
+    ExprValue result = stddevPop(integerValue(2), integerValue(1), missingValue());
+    assertEquals(0.5, result.value());
+  }
+
+  @Test
+  public void stddev_sample_with_null() {
+    ExprValue result = stddevSample(doubleValue(3d), doubleValue(4d), nullValue());
+    assertEquals(0.7071067811865476, result.value());
+  }
+
+  @Test
+  public void stddev_pop_with_null() {
+    ExprValue result = stddevPop(doubleValue(3d), doubleValue(4d), nullValue());
+    assertEquals(0.5, result.value());
+  }
+
+  @Test
+  public void stddev_sample_with_all_missing_or_null() {
+    ExprValue result = stddevSample(missingValue(), nullValue());
+    assertTrue(result.isNull());
+  }
+
+  @Test
+  public void stddev_pop_with_all_missing_or_null() {
+    ExprValue result = stddevPop(missingValue(), nullValue());
+    assertTrue(result.isNull());
+  }
+
+  @Test
+  public void stddev_sample_to_string() {
+    Aggregator aggregator = dsl.stddevSamp(ref("integer_value", INTEGER));
+    assertEquals("stddev_samp(integer_value)", aggregator.toString());
+  }
+
+  @Test
+  public void stddev_pop_to_string() {
+    Aggregator aggregator = dsl.stddevPop(ref("integer_value", INTEGER));
+    assertEquals("stddev_pop(integer_value)", aggregator.toString());
+  }
+
+  @Test
+  public void stddev_sample_nested_to_string() {
+    Aggregator avgAggregator =
+        dsl.stddevSamp(
+            dsl.multiply(
+                ref("integer_value", INTEGER), DSL.literal(ExprValueUtils.integerValue(10))));
+    assertEquals(
+        String.format("stddev_samp(*(%s, %d))", ref("integer_value", INTEGER), 10),
+        avgAggregator.toString());
+  }
+
+  private ExprValue stddevSample(ExprValue value, ExprValue... values) {
+    when(expression.valueOf(any())).thenReturn(value, values);
+    when(expression.type()).thenReturn(DOUBLE);
+    return aggregation(dsl.stddevSamp(expression), mockTuples(value, values));
+  }
+
+  private ExprValue stddevPop(ExprValue value, ExprValue... values) {
+    when(expression.valueOf(any())).thenReturn(value, values);
+    when(expression.type()).thenReturn(DOUBLE);
+    return aggregation(dsl.stddevPop(expression), mockTuples(value, values));
+  }
+
+  private List<ExprValue> mockTuples(ExprValue value, ExprValue... values) {
+    List<ExprValue> mockTuples = new ArrayList<>();
+    when(tupleValue.bindingTuples()).thenReturn(tuple);
+    mockTuples.add(tupleValue);
+    for (ExprValue exprValue : values) {
+      mockTuples.add(tupleValue);
+    }
+    return mockTuples;
+  }
+}
diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/VarianceAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/VarianceAggregatorTest.java
new file mode 100644
index 0000000000..09fb8b8012
--- /dev/null
+++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/VarianceAggregatorTest.java
@@ -0,0 +1,190 @@
+/*
+ *   Licensed under the Apache License, Version 2.0 (the "License").
+ *   You may not use this file except in compliance with the License.
+ *   A copy of the License is located at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   or in the "license" file accompanying this file. This file is distributed
+ *   on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ *   express or implied. See the License for the specific language governing
+ *   permissions and limitations under the License.
+ */
+
+package org.opensearch.sql.expression.aggregation;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.when;
+import static org.opensearch.sql.data.model.ExprValueUtils.doubleValue;
+import static org.opensearch.sql.data.model.ExprValueUtils.integerValue;
+import static org.opensearch.sql.data.model.ExprValueUtils.missingValue;
+import static org.opensearch.sql.data.model.ExprValueUtils.nullValue;
+import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE;
+import static org.opensearch.sql.data.type.ExprCoreType.INTEGER;
+import static org.opensearch.sql.expression.DSL.ref;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.opensearch.sql.data.model.ExprValue;
+import org.opensearch.sql.data.model.ExprValueUtils;
+import org.opensearch.sql.exception.ExpressionEvaluationException;
+import org.opensearch.sql.expression.DSL;
+import org.opensearch.sql.expression.Expression;
+import org.opensearch.sql.storage.bindingtuple.BindingTuple;
+
+@ExtendWith(MockitoExtension.class)
+public class VarianceAggregatorTest extends AggregationTest {
+
+  @Mock Expression expression;
+
+  @Mock ExprValue tupleValue;
+
+  @Mock BindingTuple tuple;
+
+  @Test
+  public void variance_sample_field_expression() {
+    ExprValue result =
+        varianceSample(integerValue(1), integerValue(2), integerValue(3), integerValue(4));
+    assertEquals(1.6666666666666667, result.value());
+  }
+
+  @Test
+  public void variance_population_field_expression() {
+    ExprValue result =
+        variancePop(integerValue(1), integerValue(2), integerValue(3), integerValue(4));
+    assertEquals(1.25, result.value());
+  }
+
+  @Test
+  public void variance_sample_arithmetic_expression() {
+    ExprValue result =
+        aggregation(
+            dsl.varSamp(dsl.multiply(ref("integer_value", INTEGER), DSL.literal(10))), tuples);
+    assertEquals(166.66666666666666, result.value());
+  }
+
+  @Test
+  public void variance_pop_arithmetic_expression() {
+    ExprValue result =
+        aggregation(
+            dsl.varPop(dsl.multiply(ref("integer_value", INTEGER), DSL.literal(10))), tuples);
+    assertEquals(125d, result.value());
+  }
+
+  @Test
+  public void filtered_variance_sample() {
+    ExprValue result =
+        aggregation(
+            dsl.varSamp(ref("integer_value", INTEGER))
+                .condition(dsl.greater(ref("integer_value", INTEGER), DSL.literal(1))),
+            tuples);
+    assertEquals(1.0, result.value());
+  }
+
+  @Test
+  public void filtered_variance_pop() {
+    ExprValue result =
+        aggregation(
+            dsl.varPop(ref("integer_value", INTEGER))
+                .condition(dsl.greater(ref("integer_value", INTEGER), DSL.literal(1))),
+            tuples);
+    assertEquals(0.6666666666666666, result.value());
+  }
+
+  @Test
+  public void variance_sample_with_missing() {
+    ExprValue result = varianceSample(integerValue(2), integerValue(1), missingValue());
+    assertEquals(0.5, result.value());
+  }
+
+  @Test
+  public void variance_population_with_missing() {
+    ExprValue result = variancePop(integerValue(2), integerValue(1), missingValue());
+    assertEquals(0.25, result.value());
+  }
+
+  @Test
+  public void variance_sample_with_null() {
+    ExprValue result = varianceSample(doubleValue(3d), doubleValue(4d), nullValue());
+    assertEquals(0.5, result.value());
+  }
+
+  @Test
+  public void variance_pop_with_null() {
+    ExprValue result = variancePop(doubleValue(3d), doubleValue(4d), nullValue());
+    assertEquals(0.25, result.value());
+  }
+
+  @Test
+  public void variance_sample_with_all_missing_or_null() {
+    ExprValue result = varianceSample(missingValue(), nullValue());
+    assertTrue(result.isNull());
+  }
+
+  @Test
+  public void variance_pop_with_all_missing_or_null() {
+    ExprValue result = variancePop(missingValue(), nullValue());
+    assertTrue(result.isNull());
+  }
+
+  @Test
+  public void valueOf() {
+    ExpressionEvaluationException exception =
+        assertThrows(
+            ExpressionEvaluationException.class,
+            () -> dsl.avg(ref("double_value", DOUBLE)).valueOf(valueEnv()));
+    assertEquals("can't evaluate on aggregator: avg", exception.getMessage());
+  }
+
+  @Test
+  public void variance_sample_to_string() {
+    Aggregator avgAggregator = dsl.varSamp(ref("integer_value", INTEGER));
+    assertEquals("var_samp(integer_value)", avgAggregator.toString());
+  }
+
+  @Test
+  public void variance_pop_to_string() {
+    Aggregator avgAggregator = dsl.varPop(ref("integer_value", INTEGER));
+    assertEquals("var_pop(integer_value)", avgAggregator.toString());
+  }
+
+  @Test
+  public void variance_sample_nested_to_string() {
+    Aggregator avgAggregator =
+        dsl.varSamp(
+            dsl.multiply(
+                ref("integer_value", INTEGER), DSL.literal(ExprValueUtils.integerValue(10))));
+    assertEquals(
+        String.format("var_samp(*(%s, %d))", ref("integer_value", INTEGER), 10),
+        avgAggregator.toString());
+  }
+
+  private ExprValue varianceSample(ExprValue value, ExprValue... values) {
+    when(expression.valueOf(any())).thenReturn(value, values);
+    when(expression.type()).thenReturn(DOUBLE);
+    return aggregation(dsl.varSamp(expression), mockTuples(value, values));
+  }
+
+  private ExprValue variancePop(ExprValue value, ExprValue... values) {
+    when(expression.valueOf(any())).thenReturn(value, values);
+    when(expression.type()).thenReturn(DOUBLE);
+    return aggregation(dsl.varPop(expression), mockTuples(value, values));
+  }
+
+  private List<ExprValue> mockTuples(ExprValue value, ExprValue... values) {
+    List<ExprValue> mockTuples = new ArrayList<>();
+    when(tupleValue.bindingTuples()).thenReturn(tuple);
+    mockTuples.add(tupleValue);
+    for (ExprValue exprValue : values) {
+      mockTuples.add(tupleValue);
+    }
+    return mockTuples;
+  }
+}
diff --git a/docs/user/admin/settings.rst b/docs/user/admin/settings.rst
index 96f93c6020..b5da4e28e2 100644
--- a/docs/user/admin/settings.rst
+++ b/docs/user/admin/settings.rst
@@ -16,6 +16,35 @@ Introduction
 
 When OpenSearch bootstraps, SQL plugin will register a few settings in OpenSearch cluster settings. Most of the settings are able to change dynamically so you can control the behavior of SQL plugin without need to bounce your cluster. You can update the settings by sending requests to either ``_cluster/settings`` or ``_plugins/_query/settings`` endpoint, though the examples are sending to the latter.
 
+Breaking Change
+===============
+opendistro.sql.engine.new.enabled
+---------------------------------
+The opendistro.sql.engine.new.enabled setting is deprecated and will be removed then. From OpenSearch 1.0, the new engine is always enabled.
+
+opendistro.sql.query.analysis.enabled
+-------------------------------------
+The opendistro.sql.query.analysis.enabled setting is deprecated and will be removed then. From OpenSearch 1.0, the query analysis in legacy engine is disabled.
+
+opendistro.sql.query.analysis.semantic.suggestion
+-------------------------------------------------
+The opendistro.sql.query.analysis.semantic.suggestion setting is deprecated and will be removed then. From OpenSearch 1.0, the query analysis suggestion in legacy engine is disabled.
+
+opendistro.sql.query.analysis.semantic.threshold
+------------------------------------------------
+The opendistro.sql.query.analysis.semantic.threshold setting is deprecated and will be removed then. From OpenSearch 1.0, the query analysis threshold in legacy engine is disabled.
+
+opendistro.sql.query.response.format
+------------------------------------
+The opendistro.sql.query.response.format setting is deprecated and will be removed then. From OpenSearch 1.0, the query response format is default to JDBC format. `You can change the format by using query parameters<../interfaces/protocol.rst>`_.
+
+opendistro.sql.cursor.enabled
+-----------------------------
+The opendistro.sql.cursor.enabled setting is deprecated and will be removed then. From OpenSearch 1.0, the cursor feature is enabled by default.
+
+opendistro.sql.cursor.fetch_size
+--------------------------------
+The opendistro.sql.cursor.fetch_size setting is deprecated and will be removed then. From OpenSearch 1.0, the fetch_size in query body will decide whether create the cursor context. No cursor will be created if the fetch_size = 0.
 
 plugins.sql.enabled
 ======================
diff --git a/docs/user/dql/aggregations.rst b/docs/user/dql/aggregations.rst
index 98b565e1ec..1d6d172981 100644
--- a/docs/user/dql/aggregations.rst
+++ b/docs/user/dql/aggregations.rst
@@ -135,6 +135,228 @@ Besides regular identifiers, ``COUNT`` aggregate function also accepts arguments
 2. ``COUNT(*)`` will count the number of all its input rows.
 3. ``COUNT(1)`` is same as ``COUNT(*)`` because any non-null literal will count.
 
+Aggregation Functions
+=====================
+
+COUNT
+-----
+
+Description
+>>>>>>>>>>>
+
+Usage: Returns a count of the number of expr in the rows retrieved by a SELECT statement.
+
+Example::
+
+    os> SELECT gender, count(*) as countV FROM accounts GROUP BY gender;
+    fetched rows / total rows = 2/2
+    +----------+----------+
+    | gender   | countV   |
+    |----------+----------|
+    | F        | 1        |
+    | M        | 3        |
+    +----------+----------+
+
+SUM
+---
+
+Description
+>>>>>>>>>>>
+
+Usage: SUM(expr). Returns the sum of expr.
+
+Example::
+
+    os> SELECT gender, sum(age) as sumV FROM accounts GROUP BY gender;
+    fetched rows / total rows = 2/2
+    +----------+--------+
+    | gender   | sumV   |
+    |----------+--------|
+    | F        | 28     |
+    | M        | 101    |
+    +----------+--------+
+
+AVG
+---
+
+Description
+>>>>>>>>>>>
+
+Usage: AVG(expr). Returns the average value of expr.
+
+Example::
+
+    os> SELECT gender, avg(age) as avgV FROM accounts GROUP BY gender;
+    fetched rows / total rows = 2/2
+    +----------+--------------------+
+    | gender   | avgV               |
+    |----------+--------------------|
+    | F        | 28.0               |
+    | M        | 33.666666666666664 |
+    +----------+--------------------+
+
+MAX
+---
+
+Description
+>>>>>>>>>>>
+
+Usage: MAX(expr). Returns the maximum value of expr.
+
+Example::
+
+    os> SELECT max(age) as maxV FROM accounts;
+    fetched rows / total rows = 1/1
+    +--------+
+    | maxV   |
+    |--------|
+    | 36     |
+    +--------+
+
+MIN
+---
+
+Description
+>>>>>>>>>>>
+
+Usage: MIN(expr). Returns the minimum value of expr.
+
+Example::
+
+    os> SELECT min(age) as minV FROM accounts;
+    fetched rows / total rows = 1/1
+    +--------+
+    | minV   |
+    |--------|
+    | 28     |
+    +--------+
+
+VAR_POP
+-------
+
+Description
+>>>>>>>>>>>
+
+Usage: VAR_POP(expr). Returns the population standard variance of expr.
+
+Example::
+
+    os> SELECT var_pop(age) as varV FROM accounts;
+    fetched rows / total rows = 1/1
+    +--------+
+    | varV   |
+    |--------|
+    | 8.1875 |
+    +--------+
+
+VAR_SAMP
+--------
+
+Description
+>>>>>>>>>>>
+
+Usage: VAR_SAMP(expr). Returns the sample variance of expr.
+
+Example::
+
+    os> SELECT var_samp(age) as varV FROM accounts;
+    fetched rows / total rows = 1/1
+    +--------------------+
+    | varV               |
+    |--------------------|
+    | 10.916666666666666 |
+    +--------------------+
+
+VARIANCE
+--------
+
+Description
+>>>>>>>>>>>
+
+Usage: VARIANCE(expr). Returns the population standard variance of expr. VARIANCE() is a synonym VAR_POP() function.
+
+Example::
+
+    os> SELECT variance(age) as varV FROM accounts;
+    fetched rows / total rows = 1/1
+    +--------+
+    | varV   |
+    |--------|
+    | 8.1875 |
+    +--------+
+
+STDDEV_POP
+----------
+
+Description
+>>>>>>>>>>>
+
+Usage: STDDEV_POP(expr). Returns the population standard deviation of expr.
+
+Example::
+
+    os> SELECT stddev_pop(age) as stddevV FROM accounts;
+    fetched rows / total rows = 1/1
+    +--------------------+
+    | stddevV            |
+    |--------------------|
+    | 2.8613807855648994 |
+    +--------------------+
+
+STDDEV_SAMP
+-----------
+
+Description
+>>>>>>>>>>>
+
+Usage: STDDEV_SAMP(expr). Returns the sample standard deviation of expr.
+
+Example::
+
+    os> SELECT stddev_samp(age) as stddevV FROM accounts;
+    fetched rows / total rows = 1/1
+    +-------------------+
+    | stddevV           |
+    |-------------------|
+    | 3.304037933599835 |
+    +-------------------+
+
+STD
+---
+
+Description
+>>>>>>>>>>>
+
+Usage: STD(expr). Returns the population standard deviation of expr. STD() is a synonym STDDEV_POP() function.
+
+Example::
+
+    os> SELECT stddev_pop(age) as stddevV FROM accounts;
+    fetched rows / total rows = 1/1
+    +--------------------+
+    | stddevV            |
+    |--------------------|
+    | 2.8613807855648994 |
+    +--------------------+
+
+STDDEV
+------
+
+Description
+>>>>>>>>>>>
+
+Usage: STDDEV(expr). Returns the population standard deviation of expr. STDDEV() is a synonym STDDEV_POP() function.
+
+Example::
+
+    os> SELECT stddev(age) as stddevV FROM accounts;
+    fetched rows / total rows = 1/1
+    +--------------------+
+    | stddevV            |
+    |--------------------|
+    | 2.8613807855648994 |
+    +--------------------+
+
 HAVING Clause
 =============
 
diff --git a/docs/user/dql/window.rst b/docs/user/dql/window.rst
index 6d71f0637a..feb2aaa44e 100644
--- a/docs/user/dql/window.rst
+++ b/docs/user/dql/window.rst
@@ -20,7 +20,7 @@ A window function consists of 2 pieces: a function and a window definition. A wi
 
 There are three categories of common window functions:
 
-1. **Aggregate Functions**: COUNT(), MIN(), MAX(), AVG() and SUM().
+1. **Aggregate Functions**: COUNT(), MIN(), MAX(), AVG(), SUM(), STDDEV_POP, STDDEV_SAMP, VAR_POP and VAR_SAMP.
 2. **Ranking Functions**: ROW_NUMBER(), RANK(), DENSE_RANK(), PERCENT_RANK() and NTILE().
 3. **Analytic Functions**: CUME_DIST(), LAG() and LEAD().
 
@@ -146,6 +146,90 @@ Here is an example for ``SUM`` function::
     | M        | 39225     | 49091 |
     +----------+-----------+-------+
 
+STDDEV_POP
+----------
+
+Here is an example for ``STDDEV_POP`` function::
+
+    os> SELECT
+    ...   gender, balance,
+    ...   STDDEV_POP(balance) OVER(
+    ...     PARTITION BY gender ORDER BY balance
+    ... ) AS val
+    ... FROM accounts;
+    fetched rows / total rows = 4/4
+    +----------+-----------+--------------------+
+    | gender   | balance   | val                |
+    |----------+-----------+--------------------|
+    | F        | 32838     | 0.0                |
+    | M        | 4180      | 0.0                |
+    | M        | 5686      | 753.0              |
+    | M        | 39225     | 16177.091422406222 |
+    +----------+-----------+--------------------+
+
+STDDEV_SAMP
+-----------
+
+Here is an example for ``STDDEV_SAMP`` function::
+
+    os> SELECT
+    ...   gender, balance,
+    ...   STDDEV_SAMP(balance) OVER(
+    ...     PARTITION BY gender ORDER BY balance
+    ... ) AS val
+    ... FROM accounts;
+    fetched rows / total rows = 4/4
+    +----------+-----------+--------------------+
+    | gender   | balance   | val                |
+    |----------+-----------+--------------------|
+    | F        | 32838     | 0.0                |
+    | M        | 4180      | 0.0                |
+    | M        | 5686      | 1064.9028124669405 |
+    | M        | 39225     | 19812.809753624886 |
+    +----------+-----------+--------------------+
+
+VAR_POP
+-------
+
+Here is an example for ``SUM`` function::
+
+    os> SELECT
+    ...   gender, balance,
+    ...   VAR_POP(balance) OVER(
+    ...     PARTITION BY gender ORDER BY balance
+    ... ) AS val
+    ... FROM accounts;
+    fetched rows / total rows = 4/4
+    +----------+-----------+--------------------+
+    | gender   | balance   | val                |
+    |----------+-----------+--------------------|
+    | F        | 32838     | 0.0                |
+    | M        | 4180      | 0.0                |
+    | M        | 5686      | 567009.0           |
+    | M        | 39225     | 261698286.88888893 |
+    +----------+-----------+--------------------+
+
+VAR_SAMP
+--------
+
+Here is an example for ``SUM`` function::
+
+    os> SELECT
+    ...   gender, balance,
+    ...   VAR_SAMP(balance) OVER(
+    ...     PARTITION BY gender ORDER BY balance
+    ... ) AS val
+    ... FROM accounts;
+    fetched rows / total rows = 4/4
+    +----------+-----------+-------------------+
+    | gender   | balance   | val               |
+    |----------+-----------+-------------------|
+    | F        | 32838     | 0.0               |
+    | M        | 4180      | 0.0               |
+    | M        | 5686      | 1134018.0         |
+    | M        | 39225     | 392547430.3333334 |
+    +----------+-----------+-------------------+
+
 
 Ranking Functions
 =================
diff --git a/docs/user/ppl/cmd/stats.rst b/docs/user/ppl/cmd/stats.rst
index 3aca304fcd..f6dad255ef 100644
--- a/docs/user/ppl/cmd/stats.rst
+++ b/docs/user/ppl/cmd/stats.rst
@@ -38,6 +38,174 @@ stats <aggregation>... [by-clause]...
 * aggregation: mandatory. A aggregation function. The argument of aggregation must be field.
 * by-clause: optional. The one or more fields to group the results by. **Default**: If no <by-clause> is specified, the stats command returns only one row, which is the aggregation over the entire result set.
 
+
+Aggregation Functions
+=====================
+
+COUNT
+-----
+
+Description
+>>>>>>>>>>>
+
+Usage: Returns a count of the number of expr in the rows retrieved by a SELECT statement.
+
+Example::
+
+    os> source=accounts | stats count();
+    fetched rows / total rows = 1/1
+    +-----------+
+    | count()   |
+    |-----------|
+    | 4         |
+    +-----------+
+
+SUM
+---
+
+Description
+>>>>>>>>>>>
+
+Usage: SUM(expr). Returns the sum of expr.
+
+Example::
+
+    os> source=accounts | stats sum(age) by gender;
+    fetched rows / total rows = 2/2
+    +------------+----------+
+    | sum(age)   | gender   |
+    |------------+----------|
+    | 28         | F        |
+    | 101        | M        |
+    +------------+----------+
+
+AVG
+---
+
+Description
+>>>>>>>>>>>
+
+Usage: AVG(expr). Returns the average value of expr.
+
+Example::
+
+    os> source=accounts | stats avg(age) by gender;
+    fetched rows / total rows = 2/2
+    +--------------------+----------+
+    | avg(age)           | gender   |
+    |--------------------+----------|
+    | 28.0               | F        |
+    | 33.666666666666664 | M        |
+    +--------------------+----------+
+
+MAX
+---
+
+Description
+>>>>>>>>>>>
+
+Usage: MAX(expr). Returns the maximum value of expr.
+
+Example::
+
+    os> source=accounts | stats max(age);
+    fetched rows / total rows = 1/1
+    +------------+
+    | max(age)   |
+    |------------|
+    | 36         |
+    +------------+
+
+MIN
+---
+
+Description
+>>>>>>>>>>>
+
+Usage: MIN(expr). Returns the minimum value of expr.
+
+Example::
+
+    os> source=accounts | stats min(age);
+    fetched rows / total rows = 1/1
+    +------------+
+    | min(age)   |
+    |------------|
+    | 28         |
+    +------------+
+
+VAR_SAMP
+--------
+
+Description
+>>>>>>>>>>>
+
+Usage: VAR_SAMP(expr). Returns the sample variance of expr.
+
+Example::
+
+    os> source=accounts | stats var_samp(age);
+    fetched rows / total rows = 1/1
+    +--------------------+
+    | var_samp(age)      |
+    |--------------------|
+    | 10.916666666666666 |
+    +--------------------+
+
+VAR_POP
+-------
+
+Description
+>>>>>>>>>>>
+
+Usage: VAR_POP(expr). Returns the population standard variance of expr.
+
+Example::
+
+    os> source=accounts | stats var_pop(age);
+    fetched rows / total rows = 1/1
+    +----------------+
+    | var_pop(age)   |
+    |----------------|
+    | 8.1875         |
+    +----------------+
+
+STDDEV_SAMP
+-----------
+
+Description
+>>>>>>>>>>>
+
+Usage: STDDEV_SAMP(expr). Return the sample standard deviation of expr.
+
+Example::
+
+    os> source=accounts | stats stddev_samp(age);
+    fetched rows / total rows = 1/1
+    +--------------------+
+    | stddev_samp(age)   |
+    |--------------------|
+    | 3.304037933599835  |
+    +--------------------+
+
+STDDEV_POP
+----------
+
+Description
+>>>>>>>>>>>
+
+Usage: STDDEV_POP(expr). Return the population standard deviation of expr.
+
+Example::
+
+    os> source=accounts | stats stddev_pop(age);
+    fetched rows / total rows = 1/1
+    +--------------------+
+    | stddev_pop(age)    |
+    |--------------------|
+    | 2.8613807855648994 |
+    +--------------------+
+
 Example 1: Calculate the count of events
 ========================================
 
diff --git a/doctest/build.gradle b/doctest/build.gradle
index 7e72435b35..5df8df6011 100644
--- a/doctest/build.gradle
+++ b/doctest/build.gradle
@@ -31,7 +31,7 @@ task bootstrap(type: Exec) {
 }
 
 //evaluationDependsOn(':')
-task startES(type: SpawnProcessTask) {
+task startOpenSearch(type: SpawnProcessTask) {
     command "${path}/gradlew -p ${plugin_path} runRestTestCluster"
     ready 'started'
 }
@@ -45,10 +45,10 @@ task doctest(type: Exec, dependsOn: ['bootstrap']) {
     }
 }
 
-task stopES(type: KillProcessTask)
+task stopOpenSearch(type: KillProcessTask)
 
-doctest.dependsOn startES
-doctest.finalizedBy stopES
+doctest.dependsOn startOpenSearch
+doctest.finalizedBy stopOpenSearch
 
 build.dependsOn doctest
 clean.dependsOn(cleanBootstrap)
@@ -56,7 +56,7 @@ clean.dependsOn(cleanBootstrap)
 testClusters {
     docTestCluster {
         plugin ':plugin'
-        testDistribution = 'archive'
+        testDistribution = 'integ_test'
     }
 }
 tasks.register("runRestTestCluster", RunTask) {
diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java
new file mode 100644
index 0000000000..3cbb222afe
--- /dev/null
+++ b/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java
@@ -0,0 +1,40 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ *  The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ *
+ */
+
+package org.opensearch.sql.sql;
+
+import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK;
+import static org.opensearch.sql.util.MatcherUtils.rows;
+import static org.opensearch.sql.util.MatcherUtils.schema;
+import static org.opensearch.sql.util.MatcherUtils.verifyDataRows;
+import static org.opensearch.sql.util.MatcherUtils.verifySchema;
+
+import java.io.IOException;
+import org.json.JSONObject;
+import org.junit.jupiter.api.Test;
+import org.opensearch.sql.legacy.SQLIntegTestCase;
+
+public class AggregationIT extends SQLIntegTestCase {
+  @Override
+  protected void init() throws Exception {
+    loadIndex(Index.BANK);
+  }
+
+  @Test
+  void filteredAggregateWithSubquery() throws IOException {
+    JSONObject response = executeQuery(
+        "SELECT COUNT(*) FILTER(WHERE age > 35) FROM (SELECT * FROM " + TEST_INDEX_BANK
+            + ") AS a");
+    verifySchema(response, schema("COUNT(*)", null, "integer"));
+    verifyDataRows(response, rows(3));
+  }
+}
diff --git a/integ-test/src/test/resources/correctness/queries/aggregation.txt b/integ-test/src/test/resources/correctness/queries/aggregation.txt
index 6c6e5b73a1..45aa658783 100644
--- a/integ-test/src/test/resources/correctness/queries/aggregation.txt
+++ b/integ-test/src/test/resources/correctness/queries/aggregation.txt
@@ -5,4 +5,8 @@ SELECT SUM(AvgTicketPrice) FROM opensearch_dashboards_sample_data_flights
 SELECT MAX(AvgTicketPrice) FROM opensearch_dashboards_sample_data_flights
 SELECT MAX(timestamp) FROM opensearch_dashboards_sample_data_flights
 SELECT MIN(AvgTicketPrice) FROM opensearch_dashboards_sample_data_flights
-SELECT MIN(timestamp) FROM opensearch_dashboards_sample_data_flights
\ No newline at end of file
+SELECT MIN(timestamp) FROM opensearch_dashboards_sample_data_flights
+SELECT VAR_POP(AvgTicketPrice) FROM opensearch_dashboards_sample_data_flights
+SELECT VAR_SAMP(AvgTicketPrice) FROM opensearch_dashboards_sample_data_flights
+SELECT STDDEV_POP(AvgTicketPrice) FROM opensearch_dashboards_sample_data_flights
+SELECT STDDEV_SAMP(AvgTicketPrice) FROM opensearch_dashboards_sample_data_flights
\ No newline at end of file
diff --git a/integ-test/src/test/resources/correctness/queries/subquries.txt b/integ-test/src/test/resources/correctness/queries/subqueries.txt
similarity index 100%
rename from integ-test/src/test/resources/correctness/queries/subquries.txt
rename to integ-test/src/test/resources/correctness/queries/subqueries.txt
diff --git a/integ-test/src/test/resources/correctness/queries/window.txt b/integ-test/src/test/resources/correctness/queries/window.txt
index a8d134a254..c3f2715322 100644
--- a/integ-test/src/test/resources/correctness/queries/window.txt
+++ b/integ-test/src/test/resources/correctness/queries/window.txt
@@ -9,10 +9,18 @@ SELECT DistanceMiles, SUM(DistanceMiles) OVER () AS num FROM opensearch_dashboar
 SELECT DistanceMiles, AVG(DistanceMiles) OVER () AS num FROM opensearch_dashboards_sample_data_flights
 SELECT DistanceMiles, MAX(DistanceMiles) OVER () AS num FROM opensearch_dashboards_sample_data_flights
 SELECT DistanceMiles, MIN(DistanceMiles) OVER () AS num FROM opensearch_dashboards_sample_data_flights
+SELECT AvgTicketPrice, STDDEV_POP(AvgTicketPrice) OVER () AS num FROM opensearch_dashboards_sample_data_flights
+SELECT AvgTicketPrice, STDDEV_SAMP(AvgTicketPrice) OVER () AS num FROM opensearch_dashboards_sample_data_flights
+SELECT AvgTicketPrice, VAR_POP(AvgTicketPrice) OVER () AS num FROM opensearch_dashboards_sample_data_flights
+SELECT AvgTicketPrice, VAR_SAMP(AvgTicketPrice) OVER () AS num FROM opensearch_dashboards_sample_data_flights
 SELECT FlightDelayMin, DistanceMiles, SUM(DistanceMiles) OVER (ORDER BY FlightDelayMin) AS num FROM opensearch_dashboards_sample_data_flights
 SELECT FlightDelayMin, DistanceMiles, AVG(DistanceMiles) OVER (ORDER BY FlightDelayMin) AS num FROM opensearch_dashboards_sample_data_flights
 SELECT FlightDelayMin, DistanceMiles, MAX(DistanceMiles) OVER (ORDER BY FlightDelayMin) AS num FROM opensearch_dashboards_sample_data_flights
 SELECT FlightDelayMin, DistanceMiles, MIN(DistanceMiles) OVER (ORDER BY FlightDelayMin) AS num FROM opensearch_dashboards_sample_data_flights
+SELECT FlightDelayMin, AvgTicketPrice, STDDEV_POP(AvgTicketPrice) OVER (ORDER BY FlightDelayMin) AS num FROM opensearch_dashboards_sample_data_flights ORDER BY FlightDelayMin
+SELECT FlightDelayMin, AvgTicketPrice, STDDEV_SAMP(AvgTicketPrice) OVER (ORDER BY FlightDelayMin) AS num FROM opensearch_dashboards_sample_data_flights ORDER BY FlightDelayMin
+SELECT FlightDelayMin, AvgTicketPrice, VAR_POP(AvgTicketPrice) OVER (ORDER BY FlightDelayMin) AS num FROM opensearch_dashboards_sample_data_flights ORDER BY FlightDelayMin
+SELECT FlightDelayMin, AvgTicketPrice, VAR_SAMP(AvgTicketPrice) OVER (ORDER BY FlightDelayMin) AS num FROM opensearch_dashboards_sample_data_flights ORDER BY FlightDelayMin
 SELECT user, RANK() OVER (ORDER BY user) AS rnk FROM opensearch_dashboards_sample_data_ecommerce
 SELECT user, DENSE_RANK() OVER (ORDER BY user) AS rnk FROM opensearch_dashboards_sample_data_ecommerce
 SELECT user, COUNT(day_of_week_i) OVER (ORDER BY user) AS cnt FROM opensearch_dashboards_sample_data_ecommerce
@@ -20,6 +28,8 @@ SELECT user, SUM(day_of_week_i) OVER (ORDER BY user) AS num FROM opensearch_dash
 SELECT user, AVG(day_of_week_i) OVER (ORDER BY user) AS num FROM opensearch_dashboards_sample_data_ecommerce
 SELECT user, MAX(day_of_week_i) OVER (ORDER BY user) AS num FROM opensearch_dashboards_sample_data_ecommerce
 SELECT user, MIN(day_of_week_i) OVER (ORDER BY user) AS num FROM opensearch_dashboards_sample_data_ecommerce
+SELECT user, STDDEV_POP(day_of_week_i) OVER (ORDER BY user) AS num FROM opensearch_dashboards_sample_data_ecommerce ORDER BY user
+SELECT user, VAR_POP(day_of_week_i) OVER (ORDER BY user) AS num FROM opensearch_dashboards_sample_data_ecommerce ORDER BY user
 SELECT user, RANK() OVER (ORDER BY user DESC) AS rnk FROM opensearch_dashboards_sample_data_ecommerce
 SELECT user, DENSE_RANK() OVER (ORDER BY user DESC) AS rnk FROM opensearch_dashboards_sample_data_ecommerce
 SELECT user, COUNT(day_of_week_i) OVER (PARTITION BY user ORDER BY order_id) AS cnt FROM opensearch_dashboards_sample_data_ecommerce
@@ -27,6 +37,8 @@ SELECT user, SUM(day_of_week_i) OVER (PARTITION BY user ORDER BY order_id) AS nu
 SELECT user, AVG(day_of_week_i) OVER (PARTITION BY user ORDER BY order_id) AS num FROM opensearch_dashboards_sample_data_ecommerce
 SELECT user, MAX(day_of_week_i) OVER (PARTITION BY user ORDER BY order_id) AS num FROM opensearch_dashboards_sample_data_ecommerce
 SELECT user, MIN(day_of_week_i) OVER (PARTITION BY user ORDER BY order_id) AS num FROM opensearch_dashboards_sample_data_ecommerce
+SELECT user, STDDEV_POP(day_of_week_i) OVER (PARTITION BY user ORDER BY order_id) AS num FROM opensearch_dashboards_sample_data_ecommerce ORDER BY user
+SELECT user, VAR_POP(day_of_week_i) OVER (PARTITION BY user ORDER BY order_id) AS num FROM opensearch_dashboards_sample_data_ecommerce ORDER BY user
 SELECT customer_gender, user, ROW_NUMBER() OVER (PARTITION BY customer_gender ORDER BY user) AS num FROM opensearch_dashboards_sample_data_ecommerce
 SELECT customer_gender, user, RANK() OVER (PARTITION BY customer_gender ORDER BY user) AS num FROM opensearch_dashboards_sample_data_ecommerce
 SELECT customer_gender, user, DENSE_RANK() OVER (PARTITION BY customer_gender ORDER BY user) AS num FROM opensearch_dashboards_sample_data_ecommerce
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java
index 313347aec1..001363b476 100644
--- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java
@@ -63,7 +63,7 @@
 import java.util.List;
 import java.util.Map;
 import java.util.function.Function;
-import lombok.AllArgsConstructor;
+import lombok.Getter;
 import lombok.Setter;
 import org.opensearch.common.time.DateFormatters;
 import org.opensearch.sql.data.model.ExprBooleanValue;
@@ -86,11 +86,11 @@
 import org.opensearch.sql.opensearch.data.utils.Content;
 import org.opensearch.sql.opensearch.data.utils.ObjectContent;
 import org.opensearch.sql.opensearch.data.utils.OpenSearchJsonContent;
+import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser;
 
 /**
  * Construct ExprValue from OpenSearch response.
  */
-@AllArgsConstructor
 public class OpenSearchExprValueFactory {
   /**
    * The Mapping of Field and ExprType.
@@ -98,6 +98,10 @@ public class OpenSearchExprValueFactory {
   @Setter
   private Map<String, ExprType> typeMapping;
 
+  @Getter
+  @Setter
+  private OpenSearchAggregationResponseParser parser;
+
   private static final DateTimeFormatter DATE_TIME_FORMATTER =
       new DateTimeFormatterBuilder()
           .appendOptional(SQL_LITERAL_DATE_TIME_FORMAT)
@@ -131,6 +135,14 @@ public class OpenSearchExprValueFactory {
           .put(OPENSEARCH_BINARY, c -> new OpenSearchExprBinaryValue(c.stringValue()))
           .build();
 
+  /**
+   * Constructor of OpenSearchExprValueFactory.
+   */
+  public OpenSearchExprValueFactory(
+      Map<String, ExprType> typeMapping) {
+    this.typeMapping = typeMapping;
+  }
+
   /**
    * The struct construction has the following assumption. 1. The field has OpenSearch Object
    * data type. https://www.elastic.co/guide/en/elasticsearch/reference/current/object.html 2. The
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParser.java
deleted file mode 100644
index bb029cddb0..0000000000
--- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParser.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- *
- * Modifications Copyright OpenSearch Contributors. See
- * GitHub history for details.
- */
-
-/*
- *
- *    Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
- *
- *    Licensed under the Apache License, Version 2.0 (the "License").
- *    You may not use this file except in compliance with the License.
- *    A copy of the License is located at
- *
- *        http://www.apache.org/licenses/LICENSE-2.0
- *
- *    or in the "license" file accompanying this file. This file is distributed
- *    on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- *    express or implied. See the License for the specific language governing
- *    permissions and limitations under the License.
- *
- */
-
-package org.opensearch.sql.opensearch.response;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import lombok.experimental.UtilityClass;
-import org.opensearch.search.aggregations.Aggregation;
-import org.opensearch.search.aggregations.Aggregations;
-import org.opensearch.search.aggregations.bucket.composite.CompositeAggregation;
-import org.opensearch.search.aggregations.bucket.filter.Filter;
-import org.opensearch.search.aggregations.metrics.NumericMetricsAggregation;
-
-/**
- * AggregationResponseParser.
- */
-@UtilityClass
-public class OpenSearchAggregationResponseParser {
-
-  /**
-   * Parse Aggregations as a list of field and value map.
-   *
-   * @param aggregations aggregations
-   * @return a list of field and value map
-   */
-  public static List<Map<String, Object>> parse(Aggregations aggregations) {
-    List<Aggregation> aggregationList = aggregations.asList();
-    ImmutableList.Builder<Map<String, Object>> builder = new ImmutableList.Builder<>();
-    Map<String, Object> noBucketMap = new HashMap<>();
-
-    for (Aggregation aggregation : aggregationList) {
-      if (aggregation instanceof CompositeAggregation) {
-        for (CompositeAggregation.Bucket bucket :
-            ((CompositeAggregation) aggregation).getBuckets()) {
-          builder.add(parse(bucket));
-        }
-      } else {
-        noBucketMap.putAll(parseInternal(aggregation));
-      }
-
-    }
-    // Todo, there is no better way to difference the with/without bucket from aggregations result.
-    return noBucketMap.isEmpty() ? builder.build() : Collections.singletonList(noBucketMap);
-  }
-
-  private static Map<String, Object> parse(CompositeAggregation.Bucket bucket) {
-    Map<String, Object> resultMap = new HashMap<>();
-    // The NodeClient return InternalComposite
-
-    // build <groupKey, value> pair
-    resultMap.putAll(bucket.getKey());
-
-    // build <aggKey, value> pair
-    for (Aggregation aggregation : bucket.getAggregations()) {
-      resultMap.putAll(parseInternal(aggregation));
-    }
-
-    return resultMap;
-  }
-
-  private static Map<String, Object> parseInternal(Aggregation aggregation) {
-    Map<String, Object> resultMap = new HashMap<>();
-    if (aggregation instanceof NumericMetricsAggregation.SingleValue) {
-      resultMap.put(
-          aggregation.getName(),
-          handleNanValue(((NumericMetricsAggregation.SingleValue) aggregation).value()));
-    } else if (aggregation instanceof Filter) {
-      // parse sub-aggregations for FilterAggregation response
-      List<Aggregation> aggList = ((Filter) aggregation).getAggregations().asList();
-      aggList.forEach(internalAgg -> {
-        Map<String, Object> intermediateMap = parseInternal(internalAgg);
-        resultMap.put(internalAgg.getName(), intermediateMap.get(internalAgg.getName()));
-      });
-    } else {
-      throw new IllegalStateException("unsupported aggregation type " + aggregation.getType());
-    }
-    return resultMap;
-  }
-
-  @VisibleForTesting
-  protected static Object handleNanValue(double value) {
-    return Double.isNaN(value) ? null : value;
-  }
-}
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java
index fc7421aec3..156490d93a 100644
--- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java
@@ -103,7 +103,7 @@ public boolean isAggregationResponse() {
    */
   public Iterator<ExprValue> iterator() {
     if (isAggregationResponse()) {
-      return OpenSearchAggregationResponseParser.parse(aggregations).stream().map(entry -> {
+      return exprValueFactory.getParser().parse(aggregations).stream().map(entry -> {
         ImmutableMap.Builder<String, ExprValue> builder = new ImmutableMap.Builder<>();
         for (Map.Entry<String, Object> value : entry.entrySet()) {
           builder.put(value.getKey(), exprValueFactory.construct(value.getKey(), value.getValue()));
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/CompositeAggregationParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/CompositeAggregationParser.java
new file mode 100644
index 0000000000..00e8a5154c
--- /dev/null
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/CompositeAggregationParser.java
@@ -0,0 +1,51 @@
+/*
+ *   Licensed under the Apache License, Version 2.0 (the "License").
+ *   You may not use this file except in compliance with the License.
+ *   A copy of the License is located at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   or in the "license" file accompanying this file. This file is distributed
+ *   on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ *   express or implied. See the License for the specific language governing
+ *   permissions and limitations under the License.
+ */
+
+package org.opensearch.sql.opensearch.response.agg;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import org.opensearch.search.aggregations.Aggregations;
+import org.opensearch.search.aggregations.bucket.composite.CompositeAggregation;
+
+/**
+ * Composite Aggregation Parser which include composite aggregation and metric parsers.
+ */
+public class CompositeAggregationParser implements OpenSearchAggregationResponseParser {
+
+  private final MetricParserHelper metricsParser;
+
+  public CompositeAggregationParser(MetricParser... metricParserList) {
+    metricsParser = new MetricParserHelper(Arrays.asList(metricParserList));
+  }
+
+  public CompositeAggregationParser(List<MetricParser> metricParserList) {
+    metricsParser = new MetricParserHelper(metricParserList);
+  }
+
+  @Override
+  public List<Map<String, Object>> parse(Aggregations aggregations) {
+    return ((CompositeAggregation) aggregations.asList().get(0))
+        .getBuckets().stream().map(this::parse).collect(Collectors.toList());
+  }
+
+  private Map<String, Object> parse(CompositeAggregation.Bucket bucket) {
+    Map<String, Object> resultMap = new HashMap<>();
+    resultMap.putAll(bucket.getKey());
+    resultMap.putAll(metricsParser.parse(bucket.getAggregations()));
+    return resultMap;
+  }
+}
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/FilterParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/FilterParser.java
new file mode 100644
index 0000000000..cfcba82c18
--- /dev/null
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/FilterParser.java
@@ -0,0 +1,38 @@
+/*
+ *   Licensed under the Apache License, Version 2.0 (the "License").
+ *   You may not use this file except in compliance with the License.
+ *   A copy of the License is located at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   or in the "license" file accompanying this file. This file is distributed
+ *   on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ *   express or implied. See the License for the specific language governing
+ *   permissions and limitations under the License.
+ */
+
+package org.opensearch.sql.opensearch.response.agg;
+
+import java.util.Map;
+import lombok.Builder;
+import lombok.Getter;
+import org.opensearch.search.aggregations.Aggregation;
+import org.opensearch.search.aggregations.bucket.filter.Filter;
+
+/**
+ * {@link Filter} Parser.
+ * The current use case is filter aggregation, e.g. avg(age) filter(balance>0). The filter parser
+ * do nothing and return the result from metricsParser.
+ */
+@Builder
+public class FilterParser implements MetricParser {
+
+  private final MetricParser metricsParser;
+
+  @Getter private final String name;
+
+  @Override
+  public Map<String, Object> parse(Aggregation aggregations) {
+    return metricsParser.parse(((Filter) aggregations).getAggregations().asList().get(0));
+  }
+}
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParser.java
new file mode 100644
index 0000000000..15f05e5b05
--- /dev/null
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParser.java
@@ -0,0 +1,36 @@
+/*
+ *   Licensed under the Apache License, Version 2.0 (the "License").
+ *   You may not use this file except in compliance with the License.
+ *   A copy of the License is located at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   or in the "license" file accompanying this file. This file is distributed
+ *   on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ *   express or implied. See the License for the specific language governing
+ *   permissions and limitations under the License.
+ */
+
+package org.opensearch.sql.opensearch.response.agg;
+
+import java.util.Map;
+import org.opensearch.search.aggregations.Aggregation;
+
+/**
+ * Metric Aggregation Parser.
+ */
+public interface MetricParser {
+
+  /**
+   * Get the name of metric parser.
+   */
+  String getName();
+
+  /**
+   * Parse the {@link Aggregation}.
+   *
+   * @param aggregation {@link Aggregation}
+   * @return the map between metric name and metric value.
+   */
+  Map<String, Object> parse(Aggregation aggregation);
+}
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParserHelper.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParserHelper.java
new file mode 100644
index 0000000000..54b9305f49
--- /dev/null
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParserHelper.java
@@ -0,0 +1,56 @@
+/*
+ *   Licensed under the Apache License, Version 2.0 (the "License").
+ *   You may not use this file except in compliance with the License.
+ *   A copy of the License is located at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   or in the "license" file accompanying this file. This file is distributed
+ *   on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ *   express or implied. See the License for the specific language governing
+ *   permissions and limitations under the License.
+ */
+
+package org.opensearch.sql.opensearch.response.agg;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import lombok.RequiredArgsConstructor;
+import org.opensearch.search.aggregations.Aggregation;
+import org.opensearch.search.aggregations.Aggregations;
+import org.opensearch.sql.common.utils.StringUtils;
+
+/**
+ * Parse multiple metrics in one bucket.
+ */
+@RequiredArgsConstructor
+public class MetricParserHelper {
+
+  private final Map<String, MetricParser> metricParserMap;
+
+  public MetricParserHelper(List<MetricParser> metricParserList) {
+    metricParserMap =
+        metricParserList.stream().collect(Collectors.toMap(MetricParser::getName, m -> m));
+  }
+
+  /**
+   * Parse {@link Aggregations}.
+   *
+   * @param aggregations {@link Aggregations}
+   * @return the map between metric name and metric value.
+   */
+  public Map<String, Object> parse(Aggregations aggregations) {
+    Map<String, Object> resultMap = new HashMap<>();
+    for (Aggregation aggregation : aggregations) {
+      if (metricParserMap.containsKey(aggregation.getName())) {
+        resultMap.putAll(metricParserMap.get(aggregation.getName()).parse(aggregation));
+      } else {
+        throw new RuntimeException(StringUtils.format("couldn't parse field %s in aggregation "
+            + "response", aggregation.getName()));
+      }
+    }
+    return resultMap;
+  }
+}
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/NoBucketAggregationParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/NoBucketAggregationParser.java
new file mode 100644
index 0000000000..5756003523
--- /dev/null
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/NoBucketAggregationParser.java
@@ -0,0 +1,41 @@
+/*
+ *   Licensed under the Apache License, Version 2.0 (the "License").
+ *   You may not use this file except in compliance with the License.
+ *   A copy of the License is located at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   or in the "license" file accompanying this file. This file is distributed
+ *   on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ *   express or implied. See the License for the specific language governing
+ *   permissions and limitations under the License.
+ */
+
+package org.opensearch.sql.opensearch.response.agg;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import org.opensearch.search.aggregations.Aggregations;
+
+/**
+ * No Bucket Aggregation Parser which include only metric parsers.
+ */
+public class NoBucketAggregationParser implements OpenSearchAggregationResponseParser {
+
+  private final MetricParserHelper metricsParser;
+
+  public NoBucketAggregationParser(MetricParser... metricParserList) {
+    metricsParser = new MetricParserHelper(Arrays.asList(metricParserList));
+  }
+
+  public NoBucketAggregationParser(List<MetricParser> metricParserList) {
+    metricsParser = new MetricParserHelper(metricParserList);
+  }
+
+  @Override
+  public List<Map<String, Object>> parse(Aggregations aggregations) {
+    return Collections.singletonList(metricsParser.parse(aggregations));
+  }
+}
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/OpenSearchAggregationResponseParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/OpenSearchAggregationResponseParser.java
new file mode 100644
index 0000000000..3a19747ef3
--- /dev/null
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/OpenSearchAggregationResponseParser.java
@@ -0,0 +1,31 @@
+/*
+ *   Licensed under the Apache License, Version 2.0 (the "License").
+ *   You may not use this file except in compliance with the License.
+ *   A copy of the License is located at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   or in the "license" file accompanying this file. This file is distributed
+ *   on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ *   express or implied. See the License for the specific language governing
+ *   permissions and limitations under the License.
+ */
+
+package org.opensearch.sql.opensearch.response.agg;
+
+import java.util.List;
+import java.util.Map;
+import org.opensearch.search.aggregations.Aggregations;
+
+/**
+ * OpenSearch Aggregation Response Parser.
+ */
+public interface OpenSearchAggregationResponseParser {
+
+  /**
+   * Parse the OpenSearch Aggregation Response.
+   * @param aggregations Aggregations.
+   * @return aggregation result.
+   */
+  List<Map<String, Object>> parse(Aggregations aggregations);
+}
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java
new file mode 100644
index 0000000000..7536a24661
--- /dev/null
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java
@@ -0,0 +1,39 @@
+/*
+ *   Licensed under the Apache License, Version 2.0 (the "License").
+ *   You may not use this file except in compliance with the License.
+ *   A copy of the License is located at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   or in the "license" file accompanying this file. This file is distributed
+ *   on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ *   express or implied. See the License for the specific language governing
+ *   permissions and limitations under the License.
+ */
+
+package org.opensearch.sql.opensearch.response.agg;
+
+import static org.opensearch.sql.opensearch.response.agg.Utils.handleNanValue;
+
+import java.util.Collections;
+import java.util.Map;
+import lombok.Getter;
+import lombok.RequiredArgsConstructor;
+import org.opensearch.search.aggregations.Aggregation;
+import org.opensearch.search.aggregations.metrics.NumericMetricsAggregation;
+
+/**
+ * {@link NumericMetricsAggregation.SingleValue} metric parser.
+ */
+@RequiredArgsConstructor
+public class SingleValueParser implements MetricParser {
+
+  @Getter private final String name;
+
+  @Override
+  public Map<String, Object> parse(Aggregation agg) {
+    return Collections.singletonMap(
+        agg.getName(),
+        handleNanValue(((NumericMetricsAggregation.SingleValue) agg).value()));
+  }
+}
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java
new file mode 100644
index 0000000000..6cac2fbdc9
--- /dev/null
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java
@@ -0,0 +1,41 @@
+/*
+ *   Licensed under the Apache License, Version 2.0 (the "License").
+ *   You may not use this file except in compliance with the License.
+ *   A copy of the License is located at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   or in the "license" file accompanying this file. This file is distributed
+ *   on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ *   express or implied. See the License for the specific language governing
+ *   permissions and limitations under the License.
+ */
+
+package org.opensearch.sql.opensearch.response.agg;
+
+import static org.opensearch.sql.opensearch.response.agg.Utils.handleNanValue;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.function.Function;
+import lombok.Getter;
+import lombok.RequiredArgsConstructor;
+import org.opensearch.search.aggregations.Aggregation;
+import org.opensearch.search.aggregations.metrics.ExtendedStats;
+
+/**
+ * {@link ExtendedStats} metric parser.
+ */
+@RequiredArgsConstructor
+public class StatsParser implements MetricParser {
+
+  private final Function<ExtendedStats, Double> valueExtractor;
+
+  @Getter private final String name;
+
+  @Override
+  public Map<String, Object> parse(Aggregation agg) {
+    return Collections.singletonMap(
+        agg.getName(), handleNanValue(valueExtractor.apply((ExtendedStats) agg)));
+  }
+}
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java
new file mode 100644
index 0000000000..28b9d41e83
--- /dev/null
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java
@@ -0,0 +1,27 @@
+/*
+ *   Licensed under the Apache License, Version 2.0 (the "License").
+ *   You may not use this file except in compliance with the License.
+ *   A copy of the License is located at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   or in the "license" file accompanying this file. This file is distributed
+ *   on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ *   express or implied. See the License for the specific language governing
+ *   permissions and limitations under the License.
+ */
+
+package org.opensearch.sql.opensearch.response.agg;
+
+import lombok.experimental.UtilityClass;
+
+@UtilityClass
+public class Utils {
+  /**
+   * Utils to handle Nan Value.
+   * @return null if is Nan.
+   */
+  public static Object handleNanValue(double value) {
+    return Double.isNaN(value) ? null : value;
+  }
+}
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java
index eea8f50c99..7c774a804d 100644
--- a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java
@@ -13,6 +13,8 @@
 
 import static org.opensearch.common.unit.TimeValue.timeValueMinutes;
 
+import com.google.common.collect.ImmutableList;
+import java.util.List;
 import lombok.experimental.UtilityClass;
 import org.opensearch.common.settings.Setting;
 import org.opensearch.common.unit.ByteSizeValue;
@@ -81,4 +83,108 @@ public class LegacyOpenDistroSettings {
       Setting.Property.Dynamic,
       Setting.Property.Deprecated);
 
+  /**
+   * Deprecated and will be removed then.
+   * From OpenSearch 1.0, the new engine is always enabled.
+   */
+  public static final Setting<Boolean> SQL_NEW_ENGINE_ENABLED_SETTING = Setting.boolSetting(
+      LegacySettings.Key.SQL_NEW_ENGINE_ENABLED.getKeyValue(),
+      true,
+      Setting.Property.NodeScope,
+      Setting.Property.Dynamic,
+      Setting.Property.Deprecated);
+
+  /**
+   * Deprecated and will be removed then.
+   * From OpenSearch 1.0, the query analysis in legacy engine is disabled.
+   */
+  public static final Setting<Boolean> QUERY_ANALYSIS_ENABLED_SETTING = Setting.boolSetting(
+      LegacySettings.Key.QUERY_ANALYSIS_ENABLED.getKeyValue(),
+      false,
+      Setting.Property.NodeScope,
+      Setting.Property.Dynamic,
+      Setting.Property.Deprecated);
+
+  /**
+   * Deprecated and will be removed then.
+   * From OpenSearch 1.0, the query analysis suggestion in legacy engine is disabled.
+   */
+  public static final Setting<Boolean> QUERY_ANALYSIS_SEMANTIC_SUGGESTION_SETTING =
+      Setting.boolSetting(
+      LegacySettings.Key.QUERY_ANALYSIS_SEMANTIC_SUGGESTION.getKeyValue(),
+      false,
+      Setting.Property.NodeScope,
+      Setting.Property.Dynamic,
+      Setting.Property.Deprecated);
+
+  /**
+   * Deprecated and will be removed then.
+   * From OpenSearch 1.0, the query analysis threshold in legacy engine is disabled.
+   */
+  public static final Setting<Integer> QUERY_ANALYSIS_SEMANTIC_THRESHOLD_SETTING =
+      Setting.intSetting(
+          LegacySettings.Key.QUERY_ANALYSIS_SEMANTIC_THRESHOLD.getKeyValue(),
+          200,
+          Setting.Property.NodeScope,
+          Setting.Property.Dynamic,
+          Setting.Property.Deprecated);
+
+  /**
+   * Deprecated and will be removed then.
+   * From OpenSearch 1.0, the query response format is default to JDBC format.
+   */
+  public static final Setting<String> QUERY_RESPONSE_FORMAT_SETTING =
+      Setting.simpleString(
+          LegacySettings.Key.QUERY_RESPONSE_FORMAT.getKeyValue(),
+          "jdbc",
+          Setting.Property.NodeScope,
+          Setting.Property.Dynamic,
+          Setting.Property.Deprecated);
+
+  /**
+   * Deprecated and will be removed then.
+   * From OpenSearch 1.0, the cursor feature is enabled by default.
+   */
+  public static final Setting<Boolean> SQL_CURSOR_ENABLED_SETTING =
+      Setting.boolSetting(
+          LegacySettings.Key.SQL_CURSOR_ENABLED.getKeyValue(),
+          true,
+          Setting.Property.NodeScope,
+          Setting.Property.Dynamic,
+          Setting.Property.Deprecated);
+  /**
+   * Deprecated and will be removed then.
+   * From OpenSearch 1.0, the fetch_size in query body will decide whether create the cursor
+   * context. No cursor will be created if the fetch_size = 0.
+   */
+  public static final Setting<Integer> SQL_CURSOR_FETCH_SIZE_SETTING =
+      Setting.intSetting(
+          LegacySettings.Key.SQL_CURSOR_FETCH_SIZE.getKeyValue(),
+          1000,
+          Setting.Property.NodeScope,
+          Setting.Property.Dynamic,
+          Setting.Property.Deprecated);
+
+  /**
+   * Used by Plugin to init Setting.
+   */
+  public static List<Setting<?>> legacySettings() {
+    return new ImmutableList.Builder<Setting<?>>()
+        .add(SQL_ENABLED_SETTING)
+        .add(SQL_QUERY_SLOWLOG_SETTING)
+        .add(SQL_CURSOR_KEEPALIVE_SETTING)
+        .add(METRICS_ROLLING_WINDOW_SETTING)
+        .add(METRICS_ROLLING_INTERVAL_SETTING)
+        .add(PPL_ENABLED_SETTING)
+        .add(PPL_QUERY_MEMORY_LIMIT_SETTING)
+        .add(QUERY_SIZE_LIMIT_SETTING)
+        .add(SQL_NEW_ENGINE_ENABLED_SETTING)
+        .add(QUERY_ANALYSIS_ENABLED_SETTING)
+        .add(QUERY_ANALYSIS_SEMANTIC_SUGGESTION_SETTING)
+        .add(QUERY_ANALYSIS_SEMANTIC_THRESHOLD_SETTING)
+        .add(QUERY_RESPONSE_FORMAT_SETTING)
+        .add(SQL_CURSOR_ENABLED_SETTING)
+        .add(SQL_CURSOR_FETCH_SIZE_SETTING)
+        .build();
+  }
 }
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java
index 74e966637f..0198abe7a1 100644
--- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java
@@ -32,6 +32,7 @@
 import java.util.Map;
 import java.util.stream.Collectors;
 import lombok.RequiredArgsConstructor;
+import org.apache.commons.lang3.tuple.Pair;
 import org.opensearch.index.query.QueryBuilder;
 import org.opensearch.search.aggregations.AggregationBuilder;
 import org.opensearch.sql.common.setting.Settings;
@@ -43,6 +44,7 @@
 import org.opensearch.sql.opensearch.planner.logical.OpenSearchLogicalIndexScan;
 import org.opensearch.sql.opensearch.planner.logical.OpenSearchLogicalPlanOptimizerFactory;
 import org.opensearch.sql.opensearch.request.system.OpenSearchDescribeIndexRequest;
+import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser;
 import org.opensearch.sql.opensearch.storage.script.aggregation.AggregationQueryBuilder;
 import org.opensearch.sql.opensearch.storage.script.filter.FilterQueryBuilder;
 import org.opensearch.sql.opensearch.storage.script.sort.SortQueryBuilder;
@@ -163,7 +165,7 @@ public PhysicalPlan visitIndexAggregation(OpenSearchLogicalIndexAgg node,
       }
       AggregationQueryBuilder builder =
           new AggregationQueryBuilder(new DefaultExpressionSerializer());
-      List<AggregationBuilder> aggregationBuilder =
+      Pair<List<AggregationBuilder>, OpenSearchAggregationResponseParser> aggregationBuilder =
           builder.buildAggregationBuilder(node.getAggregatorList(),
               node.getGroupByList(), node.getSortList());
       context.pushDownAggregation(aggregationBuilder);
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexScan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexScan.java
index 99b11c21a4..57980f23b9 100644
--- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexScan.java
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexScan.java
@@ -40,6 +40,7 @@
 import lombok.EqualsAndHashCode;
 import lombok.Getter;
 import lombok.ToString;
+import org.apache.commons.lang3.tuple.Pair;
 import org.opensearch.index.query.BoolQueryBuilder;
 import org.opensearch.index.query.QueryBuilder;
 import org.opensearch.index.query.QueryBuilders;
@@ -55,6 +56,7 @@
 import org.opensearch.sql.opensearch.request.OpenSearchQueryRequest;
 import org.opensearch.sql.opensearch.request.OpenSearchRequest;
 import org.opensearch.sql.opensearch.response.OpenSearchResponse;
+import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser;
 import org.opensearch.sql.storage.TableScanOperator;
 
 /**
@@ -138,12 +140,14 @@ public void pushDown(QueryBuilder query) {
 
   /**
    * Push down aggregation to DSL request.
-   * @param aggregationBuilderList aggregation query.
+   * @param aggregationBuilder pair of aggregation query and aggregation parser.
    */
-  public void pushDownAggregation(List<AggregationBuilder> aggregationBuilderList) {
+  public void pushDownAggregation(
+      Pair<List<AggregationBuilder>, OpenSearchAggregationResponseParser> aggregationBuilder) {
     SearchSourceBuilder source = request.getSourceBuilder();
-    aggregationBuilderList.forEach(aggregationBuilder -> source.aggregation(aggregationBuilder));
+    aggregationBuilder.getLeft().forEach(builder -> source.aggregation(builder));
     source.size(0);
+    request.getExprValueFactory().setParser(aggregationBuilder.getRight());
   }
 
   /**
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java
index a89ba042ee..403f99e593 100644
--- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java
@@ -42,6 +42,7 @@
 import org.apache.commons.lang3.tuple.Pair;
 import org.opensearch.search.aggregations.AggregationBuilder;
 import org.opensearch.search.aggregations.AggregationBuilders;
+import org.opensearch.search.aggregations.AggregatorFactories;
 import org.opensearch.search.sort.SortOrder;
 import org.opensearch.sql.ast.tree.Sort;
 import org.opensearch.sql.data.type.ExprType;
@@ -50,6 +51,10 @@
 import org.opensearch.sql.expression.NamedExpression;
 import org.opensearch.sql.expression.ReferenceExpression;
 import org.opensearch.sql.expression.aggregation.NamedAggregator;
+import org.opensearch.sql.opensearch.response.agg.CompositeAggregationParser;
+import org.opensearch.sql.opensearch.response.agg.MetricParser;
+import org.opensearch.sql.opensearch.response.agg.NoBucketAggregationParser;
+import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser;
 import org.opensearch.sql.opensearch.storage.script.aggregation.dsl.BucketAggregationBuilder;
 import org.opensearch.sql.opensearch.storage.script.aggregation.dsl.MetricAggregationBuilder;
 import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer;
@@ -82,25 +87,35 @@ public AggregationQueryBuilder(
     this.metricBuilder = new MetricAggregationBuilder(serializer);
   }
 
-  /**
-   * Build AggregationBuilder.
-   */
-  public List<AggregationBuilder> buildAggregationBuilder(
-      List<NamedAggregator> namedAggregatorList,
-      List<NamedExpression> groupByList,
-      List<Pair<Sort.SortOption, Expression>> sortList) {
+  /** Build AggregationBuilder. */
+  public Pair<List<AggregationBuilder>, OpenSearchAggregationResponseParser>
+      buildAggregationBuilder(
+          List<NamedAggregator> namedAggregatorList,
+          List<NamedExpression> groupByList,
+          List<Pair<Sort.SortOption, Expression>> sortList) {
+
+    final Pair<AggregatorFactories.Builder, List<MetricParser>> metrics =
+        metricBuilder.build(namedAggregatorList);
+
     if (groupByList.isEmpty()) {
       // no bucket
-      return ImmutableList
-          .copyOf(metricBuilder.build(namedAggregatorList).getAggregatorFactories());
+      return Pair.of(
+          ImmutableList.copyOf(metrics.getLeft().getAggregatorFactories()),
+          new NoBucketAggregationParser(metrics.getRight()));
     } else {
-      final GroupSortOrder groupSortOrder = new GroupSortOrder(sortList);
-      return Collections.singletonList(AggregationBuilders.composite("composite_buckets",
-          bucketBuilder
-              .build(groupByList.stream().sorted(groupSortOrder).map(expr -> Pair.of(expr,
-                  groupSortOrder.apply(expr))).collect(Collectors.toList())))
-          .subAggregations(metricBuilder.build(namedAggregatorList))
-          .size(AGGREGATION_BUCKET_SIZE));
+      GroupSortOrder groupSortOrder = new GroupSortOrder(sortList);
+      return Pair.of(
+          Collections.singletonList(
+              AggregationBuilders.composite(
+                      "composite_buckets",
+                      bucketBuilder.build(
+                          groupByList.stream()
+                              .sorted(groupSortOrder)
+                              .map(expr -> Pair.of(expr, groupSortOrder.apply(expr)))
+                              .collect(Collectors.toList())))
+                  .subAggregations(metrics.getLeft())
+                  .size(AGGREGATION_BUCKET_SIZE)),
+          new CompositeAggregationParser(metrics.getRight()));
     }
   }
 
diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java
index f3807ae662..3d40258288 100644
--- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java
+++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java
@@ -30,31 +30,37 @@
 
 import static org.opensearch.sql.data.type.ExprCoreType.INTEGER;
 
+import java.util.ArrayList;
 import java.util.List;
+import org.apache.commons.lang3.tuple.Pair;
 import org.opensearch.search.aggregations.AggregationBuilder;
 import org.opensearch.search.aggregations.AggregationBuilders;
 import org.opensearch.search.aggregations.AggregatorFactories;
 import org.opensearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
+import org.opensearch.search.aggregations.metrics.ExtendedStats;
 import org.opensearch.search.aggregations.support.ValuesSourceAggregationBuilder;
 import org.opensearch.sql.expression.Expression;
 import org.opensearch.sql.expression.ExpressionNodeVisitor;
 import org.opensearch.sql.expression.LiteralExpression;
 import org.opensearch.sql.expression.ReferenceExpression;
 import org.opensearch.sql.expression.aggregation.NamedAggregator;
+import org.opensearch.sql.opensearch.response.agg.FilterParser;
+import org.opensearch.sql.opensearch.response.agg.MetricParser;
+import org.opensearch.sql.opensearch.response.agg.SingleValueParser;
+import org.opensearch.sql.opensearch.response.agg.StatsParser;
 import org.opensearch.sql.opensearch.storage.script.filter.FilterQueryBuilder;
 import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer;
 
 /**
- * Build the Metric Aggregation from {@link NamedAggregator}.
+ * Build the Metric Aggregation and List of {@link MetricParser} from {@link NamedAggregator}.
  */
 public class MetricAggregationBuilder
-    extends ExpressionNodeVisitor<AggregationBuilder, Object> {
+    extends ExpressionNodeVisitor<Pair<AggregationBuilder, MetricParser>, Object> {
 
   private final AggregationBuilderHelper<ValuesSourceAggregationBuilder<?>> helper;
   private final FilterQueryBuilder filterBuilder;
 
-  public MetricAggregationBuilder(
-      ExpressionSerializer serializer) {
+  public MetricAggregationBuilder(ExpressionSerializer serializer) {
     this.helper = new AggregationBuilderHelper<>(serializer);
     this.filterBuilder = new FilterQueryBuilder(serializer);
   }
@@ -65,55 +71,117 @@ public MetricAggregationBuilder(
    * @param aggregatorList aggregator list
    * @return AggregatorFactories.Builder
    */
-  public AggregatorFactories.Builder build(List<NamedAggregator> aggregatorList) {
+  public Pair<AggregatorFactories.Builder, List<MetricParser>> build(
+      List<NamedAggregator> aggregatorList) {
     AggregatorFactories.Builder builder = new AggregatorFactories.Builder();
+    List<MetricParser> metricParserList = new ArrayList<>();
     for (NamedAggregator aggregator : aggregatorList) {
-      builder.addAggregator(aggregator.accept(this, null));
+      Pair<AggregationBuilder, MetricParser> pair = aggregator.accept(this, null);
+      builder.addAggregator(pair.getLeft());
+      metricParserList.add(pair.getRight());
     }
-    return builder;
+    return Pair.of(builder, metricParserList);
   }
 
   @Override
-  public AggregationBuilder visitNamedAggregator(NamedAggregator node,
-                                                 Object context) {
+  public Pair<AggregationBuilder, MetricParser> visitNamedAggregator(
+      NamedAggregator node, Object context) {
     Expression expression = node.getArguments().get(0);
     Expression condition = node.getDelegated().condition();
     String name = node.getName();
 
     switch (node.getFunctionName().getFunctionName()) {
       case "avg":
-        return make(AggregationBuilders.avg(name), expression, condition, name);
+        return make(
+            AggregationBuilders.avg(name),
+            expression,
+            condition,
+            name,
+            new SingleValueParser(name));
       case "sum":
-        return make(AggregationBuilders.sum(name), expression, condition, name);
+        return make(
+            AggregationBuilders.sum(name),
+            expression,
+            condition,
+            name,
+            new SingleValueParser(name));
       case "count":
         return make(
-            AggregationBuilders.count(name), replaceStarOrLiteral(expression), condition, name);
+            AggregationBuilders.count(name),
+            replaceStarOrLiteral(expression),
+            condition,
+            name,
+            new SingleValueParser(name));
       case "min":
-        return make(AggregationBuilders.min(name), expression, condition, name);
+        return make(
+            AggregationBuilders.min(name),
+            expression,
+            condition,
+            name,
+            new SingleValueParser(name));
       case "max":
-        return make(AggregationBuilders.max(name), expression, condition, name);
+        return make(
+            AggregationBuilders.max(name),
+            expression,
+            condition,
+            name,
+            new SingleValueParser(name));
+      case "var_samp":
+        return make(
+            AggregationBuilders.extendedStats(name),
+            expression,
+            condition,
+            name,
+            new StatsParser(ExtendedStats::getVarianceSampling,name));
+      case "var_pop":
+        return make(
+            AggregationBuilders.extendedStats(name),
+            expression,
+            condition,
+            name,
+            new StatsParser(ExtendedStats::getVariancePopulation,name));
+      case "stddev_samp":
+        return make(
+            AggregationBuilders.extendedStats(name),
+            expression,
+            condition,
+            name,
+            new StatsParser(ExtendedStats::getStdDeviationSampling,name));
+      case "stddev_pop":
+        return make(
+            AggregationBuilders.extendedStats(name),
+            expression,
+            condition,
+            name,
+            new StatsParser(ExtendedStats::getStdDeviationPopulation,name));
       default:
         throw new IllegalStateException(
             String.format("unsupported aggregator %s", node.getFunctionName().getFunctionName()));
     }
   }
 
-  private AggregationBuilder make(ValuesSourceAggregationBuilder<?> builder,
-                                  Expression expression, Expression condition, String name) {
+  private Pair<AggregationBuilder, MetricParser> make(
+      ValuesSourceAggregationBuilder<?> builder,
+      Expression expression,
+      Expression condition,
+      String name,
+      MetricParser parser) {
     ValuesSourceAggregationBuilder aggregationBuilder =
         helper.build(expression, builder::field, builder::script);
     if (condition != null) {
-      return makeFilterAggregation(aggregationBuilder, condition, name);
+      return Pair.of(
+          makeFilterAggregation(aggregationBuilder, condition, name),
+          FilterParser.builder().name(name).metricsParser(parser).build());
     }
-    return aggregationBuilder;
+    return Pair.of(aggregationBuilder, parser);
   }
 
   /**
-   * Replace star or literal with OpenSearch metadata field "_index". Because:
-   * 1) Analyzer already converts * to string literal, literal check here can handle
-   *    both COUNT(*) and COUNT(1).
-   * 2) Value count aggregation on _index counts all docs (after filter), therefore
-   *    it has same semantics as COUNT(*) or COUNT(1).
+   * Replace star or literal with OpenSearch metadata field "_index". Because: 1) Analyzer already
+   * converts * to string literal, literal check here can handle both COUNT(*) and COUNT(1). 2)
+   * Value count aggregation on _index counts all docs (after filter), therefore it has same
+   * semantics as COUNT(*) or COUNT(1).
+   *
    * @param countArg count function argument
    * @return Reference to _index if literal, otherwise return original argument expression
    */
@@ -126,16 +194,15 @@ private Expression replaceStarOrLiteral(Expression countArg) {
 
   /**
    * Make builder to build FilterAggregation for aggregations with filter in the bucket.
+   *
    * @param subAggBuilder AggregationBuilder instance which the filter is applied to.
    * @param condition Condition expression in the filter.
    * @param name Name of the FilterAggregation instance to build.
    * @return {@link FilterAggregationBuilder}.
    */
-  private FilterAggregationBuilder makeFilterAggregation(AggregationBuilder subAggBuilder,
-                                                         Expression condition, String name) {
-    return AggregationBuilders
-        .filter(name, filterBuilder.build(condition))
+  private FilterAggregationBuilder makeFilterAggregation(
+      AggregationBuilder subAggBuilder, Expression condition, String name) {
+    return AggregationBuilders.filter(name, filterBuilder.build(condition))
         .subAggregation(subAggBuilder);
   }
-
 }
diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/AggregationResponseUtils.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/AggregationResponseUtils.java
index c8ef830635..173b33575c 100644
--- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/AggregationResponseUtils.java
+++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/AggregationResponseUtils.java
@@ -55,9 +55,11 @@
 import org.opensearch.search.aggregations.bucket.terms.ParsedStringTerms;
 import org.opensearch.search.aggregations.bucket.terms.StringTerms;
 import org.opensearch.search.aggregations.metrics.AvgAggregationBuilder;
+import org.opensearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder;
 import org.opensearch.search.aggregations.metrics.MaxAggregationBuilder;
 import org.opensearch.search.aggregations.metrics.MinAggregationBuilder;
 import org.opensearch.search.aggregations.metrics.ParsedAvg;
+import org.opensearch.search.aggregations.metrics.ParsedExtendedStats;
 import org.opensearch.search.aggregations.metrics.ParsedMax;
 import org.opensearch.search.aggregations.metrics.ParsedMin;
 import org.opensearch.search.aggregations.metrics.ParsedSum;
@@ -74,6 +76,8 @@ public class AggregationResponseUtils {
           .put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c))
           .put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c))
           .put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c))
+          .put(ExtendedStatsAggregationBuilder.NAME,
+              (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c))
           .put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c))
           .put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c))
           .put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c))
diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java
index b49bec4d44..120d48b601 100644
--- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java
+++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java
@@ -34,6 +34,8 @@
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertNull;
 import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.opensearch.sql.opensearch.response.AggregationResponseUtils.fromJson;
+import static org.opensearch.sql.opensearch.response.agg.Utils.handleNanValue;
 
 import com.google.common.collect.ImmutableMap;
 import java.util.List;
@@ -41,6 +43,13 @@
 import org.junit.jupiter.api.DisplayNameGeneration;
 import org.junit.jupiter.api.DisplayNameGenerator;
 import org.junit.jupiter.api.Test;
+import org.opensearch.search.aggregations.metrics.ExtendedStats;
+import org.opensearch.sql.opensearch.response.agg.CompositeAggregationParser;
+import org.opensearch.sql.opensearch.response.agg.FilterParser;
+import org.opensearch.sql.opensearch.response.agg.NoBucketAggregationParser;
+import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser;
+import org.opensearch.sql.opensearch.response.agg.SingleValueParser;
+import org.opensearch.sql.opensearch.response.agg.StatsParser;
 
 @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class)
 class OpenSearchAggregationResponseParserTest {
@@ -55,7 +64,10 @@ void no_bucket_one_metric_should_pass() {
         + "    \"value\": 40\n"
         + "  }\n"
         + "}";
-    assertThat(parse(response), contains(entry("max", 40d)));
+    NoBucketAggregationParser parser = new NoBucketAggregationParser(
+        new SingleValueParser("max")
+    );
+    assertThat(parse(parser, response), contains(entry("max", 40d)));
   }
 
   /**
@@ -71,7 +83,11 @@ void no_bucket_two_metric_should_pass() {
         + "    \"value\": 20\n"
         + "  }\n"
         + "}";
-    assertThat(parse(response),
+    NoBucketAggregationParser parser = new NoBucketAggregationParser(
+        new SingleValueParser("max"),
+        new SingleValueParser("min")
+    );
+    assertThat(parse(parser, response),
         contains(entry("max", 40d,"min", 20d)));
   }
 
@@ -104,7 +120,10 @@ void one_bucket_one_metric_should_pass() {
         + "    ]\n"
         + "  }\n"
         + "}";
-    assertThat(parse(response),
+
+    OpenSearchAggregationResponseParser parser = new CompositeAggregationParser(
+            new SingleValueParser("avg"));
+    assertThat(parse(parser, response),
         containsInAnyOrder(ImmutableMap.of("type", "cost", "avg", 20d),
             ImmutableMap.of("type", "sale", "avg", 105d)));
   }
@@ -139,7 +158,9 @@ void two_bucket_one_metric_should_pass() {
         + "    ]\n"
         + "  }\n"
         + "}";
-    assertThat(parse(response),
+    OpenSearchAggregationResponseParser parser = new CompositeAggregationParser(
+        new SingleValueParser("avg"));
+    assertThat(parse(parser, response),
         containsInAnyOrder(ImmutableMap.of("type", "cost", "region", "us", "avg", 20d),
             ImmutableMap.of("type", "sale", "region", "uk", "avg", 130d)));
   }
@@ -147,81 +168,132 @@ void two_bucket_one_metric_should_pass() {
   @Test
   void unsupported_aggregation_should_fail() {
     String response = "{\n"
-        + "  \"date_histogram#max\": {\n"
+        + "  \"date_histogram#date_histogram\": {\n"
         + "    \"value\": 40\n"
         + "  }\n"
         + "}";
-    IllegalStateException exception =
-        assertThrows(IllegalStateException.class, () -> parse(response));
-    assertEquals("unsupported aggregation type date_histogram", exception.getMessage());
+    NoBucketAggregationParser parser = new NoBucketAggregationParser(
+        new SingleValueParser("max")
+    );
+    RuntimeException exception =
+        assertThrows(RuntimeException.class, () -> parse(parser, response));
+    assertEquals(
+        "couldn't parse field date_histogram in aggregation response", exception.getMessage());
   }
 
   @Test
   void nan_value_should_return_null() {
-    assertNull(OpenSearchAggregationResponseParser.handleNanValue(Double.NaN));
+    assertNull(handleNanValue(Double.NaN));
   }
 
-  /**
-   * SELECT AVG(age) FILTER(WHERE age > 37) as filtered FROM accounts.
-   */
   @Test
   void filter_aggregation_should_pass() {
-    String response = "{\n" 
-            +     "    \"filter#filtered\" : {\n" 
-            +     "      \"doc_count\" : 3,\n" 
-            +     "      \"avg#filtered\" : {\n" 
-            +     "        \"value\" : 37.0\n" 
-            +     "      }\n" 
-            +     "    }\n" 
-            +     "  }";
-    assertThat(parse(response), contains(entry("filtered", 37.0)));
+    String response = "{\n"
+        +     "    \"filter#filtered\" : {\n"
+        +     "      \"doc_count\" : 3,\n"
+        +     "      \"avg#filtered\" : {\n"
+        +     "        \"value\" : 37.0\n"
+        +     "      }\n"
+        +     "    }\n"
+        +     "  }";
+    OpenSearchAggregationResponseParser parser =
+        new NoBucketAggregationParser(
+            FilterParser.builder()
+                .name("filtered")
+                .metricsParser(new SingleValueParser("filtered"))
+                .build());
+    assertThat(parse(parser, response), contains(entry("filtered", 37.0)));
   }
 
-  /**
-   * SELECT AVG(age) FILTER(WHERE age > 37) as filtered FROM accounts GROUP BY gender.
-   */
   @Test
   void filter_aggregation_group_by_should_pass() {
-    String response = "{\n" 
-            + "  \"composite#composite_buckets\":{\n" 
-            + "    \"after_key\":{\n" 
-            + "      \"gender\":\"m\"\n" 
-            + "    },\n" 
-            + "    \"buckets\":[\n" 
-            + "      {\n" 
-            + "        \"key\":{\n" 
-            + "          \"gender\":\"f\"\n" 
-            + "        },\n" 
-            + "        \"doc_count\":3,\n" 
-            + "        \"filter#filter\":{\n" 
-            + "          \"doc_count\":1,\n" 
-            + "          \"avg#avg\":{\n" 
-            + "            \"value\":39.0\n" 
-            + "          }\n" 
-            + "        }\n" 
-            + "      },\n" 
-            + "      {\n" 
-            + "        \"key\":{\n" 
-            + "          \"gender\":\"m\"\n" 
-            + "        },\n" 
-            + "        \"doc_count\":4,\n" 
-            + "        \"filter#filter\":{\n" 
-            + "          \"doc_count\":2,\n" 
-            + "          \"avg#avg\":{\n" 
-            + "            \"value\":36.0\n" 
-            + "          }\n" 
-            + "        }\n" 
-            + "      }\n" 
-            + "    ]\n" 
-            + "  }\n" 
-            + "}";
-    assertThat(parse(response), containsInAnyOrder(
+    String response = "{\n"
+        + "  \"composite#composite_buckets\":{\n"
+        + "    \"after_key\":{\n"
+        + "      \"gender\":\"m\"\n"
+        + "    },\n"
+        + "    \"buckets\":[\n"
+        + "      {\n"
+        + "        \"key\":{\n"
+        + "          \"gender\":\"f\"\n"
+        + "        },\n"
+        + "        \"doc_count\":3,\n"
+        + "        \"filter#filter\":{\n"
+        + "          \"doc_count\":1,\n"
+        + "          \"avg#avg\":{\n"
+        + "            \"value\":39.0\n"
+        + "          }\n"
+        + "        }\n"
+        + "      },\n"
+        + "      {\n"
+        + "        \"key\":{\n"
+        + "          \"gender\":\"m\"\n"
+        + "        },\n"
+        + "        \"doc_count\":4,\n"
+        + "        \"filter#filter\":{\n"
+        + "          \"doc_count\":2,\n"
+        + "          \"avg#avg\":{\n"
+        + "            \"value\":36.0\n"
+        + "          }\n"
+        + "        }\n"
+        + "      }\n"
+        + "    ]\n"
+        + "  }\n"
+        + "}";
+    OpenSearchAggregationResponseParser parser = new CompositeAggregationParser(
+        FilterParser.builder()
+            .name("filter")
+            .metricsParser(new SingleValueParser("avg"))
+            .build()
+    );
+    assertThat(parse(parser, response), containsInAnyOrder(
         entry("gender", "f", "avg", 39.0),
         entry("gender", "m", "avg", 36.0)));
   }
 
-  public List<Map<String, Object>> parse(String json) {
-    return OpenSearchAggregationResponseParser.parse(AggregationResponseUtils.fromJson(json));
+  /**
+   * SELECT MAX(age) as max, STDDEV(age) as min FROM accounts.
+   */
+  @Test
+  void no_bucket_max_and_extended_stats() {
+    String response = "{\n"
+        + "  \"extended_stats#esField\": {\n"
+        + "    \"count\": 2033,\n"
+        + "    \"min\": 0,\n"
+        + "    \"max\": 360,\n"
+        + "    \"avg\": 45.47958681751107,\n"
+        + "    \"sum\": 92460,\n"
+        + "    \"sum_of_squares\": 22059450,\n"
+        + "    \"variance\": 8782.295820390027,\n"
+        + "    \"variance_population\": 8782.295820390027,\n"
+        + "    \"variance_sampling\": 8786.61781636463,\n"
+        + "    \"std_deviation\": 93.71390409320287,\n"
+        + "    \"std_deviation_population\": 93.71390409320287,\n"
+        + "    \"std_deviation_sampling\": 93.73696078049805,\n"
+        + "    \"std_deviation_bounds\": {\n"
+        + "      \"upper\": 232.9073950039168,\n"
+        + "      \"lower\": -141.94822136889468,\n"
+        + "      \"upper_population\": 232.9073950039168,\n"
+        + "      \"lower_population\": -141.94822136889468,\n"
+        + "      \"upper_sampling\": 232.95350837850717,\n"
+        + "      \"lower_sampling\": -141.99433474348504\n"
+        + "    }\n"
+        + "  },\n"
+        + "  \"max#maxField\": {\n"
+        + "    \"value\": 360\n"
+        + "  }\n"
+        + "}";
+
+    NoBucketAggregationParser parser = new NoBucketAggregationParser(
+        new SingleValueParser("maxField"),
+        new StatsParser(ExtendedStats::getStdDeviation, "esField")
+    );
+    assertThat(parse(parser, response),
+        contains(entry("esField", 93.71390409320287, "maxField", 360D)));
+  }
+
+  public List<Map<String, Object>> parse(OpenSearchAggregationResponseParser parser, String json) {
+    return parser.parse(fromJson(json));
   }
 
   public Map<String, Object> entry(String name, Object value) {
diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java
index 184312afa1..c9cde4f634 100644
--- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java
+++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java
@@ -42,8 +42,6 @@
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.extension.ExtendWith;
 import org.mockito.Mock;
-import org.mockito.MockedStatic;
-import org.mockito.Mockito;
 import org.mockito.junit.jupiter.MockitoExtension;
 import org.opensearch.action.search.SearchResponse;
 import org.opensearch.search.SearchHit;
@@ -53,6 +51,7 @@
 import org.opensearch.sql.data.model.ExprTupleValue;
 import org.opensearch.sql.data.model.ExprValue;
 import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory;
+import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser;
 
 @ExtendWith(MockitoExtension.class)
 class OpenSearchResponseTest {
@@ -72,6 +71,9 @@ class OpenSearchResponseTest {
   @Mock
   private Aggregations aggregations;
 
+  @Mock
+  private OpenSearchAggregationResponseParser parser;
+
   private ExprTupleValue exprTupleValue1 = ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1",
       new ExprIntegerValue(1)));
 
@@ -147,26 +149,24 @@ void response_isnot_aggregation_when_aggregation_is_empty() {
 
   @Test
   void aggregation_iterator() {
-    try (
-        MockedStatic<OpenSearchAggregationResponseParser> mockedStatic = Mockito
-            .mockStatic(OpenSearchAggregationResponseParser.class)) {
-      when(OpenSearchAggregationResponseParser.parse(any()))
-          .thenReturn(Arrays.asList(ImmutableMap.of("id1", 1), ImmutableMap.of("id2", 2)));
-      when(searchResponse.getAggregations()).thenReturn(aggregations);
-      when(factory.construct(anyString(), any())).thenReturn(new ExprIntegerValue(1))
-          .thenReturn(new ExprIntegerValue(2));
-
-      int i = 0;
-      for (ExprValue hit : new OpenSearchResponse(searchResponse, factory)) {
-        if (i == 0) {
-          assertEquals(exprTupleValue1, hit);
-        } else if (i == 1) {
-          assertEquals(exprTupleValue2, hit);
-        } else {
-          fail("More search hits returned than expected");
-        }
-        i++;
+    when(parser.parse(any()))
+        .thenReturn(Arrays.asList(ImmutableMap.of("id1", 1), ImmutableMap.of("id2", 2)));
+    when(searchResponse.getAggregations()).thenReturn(aggregations);
+    when(factory.getParser()).thenReturn(parser);
+    when(factory.construct(anyString(), any()))
+        .thenReturn(new ExprIntegerValue(1))
+        .thenReturn(new ExprIntegerValue(2));
+
+    int i = 0;
+    for (ExprValue hit : new OpenSearchResponse(searchResponse, factory)) {
+      if (i == 0) {
+        assertEquals(exprTupleValue1, hit);
+      } else if (i == 1) {
+        assertEquals(exprTupleValue2, hit);
+      } else {
+        fail("More search hits returned than expected");
       }
+      i++;
     }
   }
 }
diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java
index f8f45f90c8..f264e5df34 100644
--- a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java
+++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java
@@ -33,6 +33,7 @@
 import static org.junit.jupiter.api.Assertions.assertNotEquals;
 import static org.junit.jupiter.api.Assertions.assertNotNull;
 import static org.opensearch.common.unit.TimeValue.timeValueMinutes;
+import static org.opensearch.sql.opensearch.setting.LegacyOpenDistroSettings.legacySettings;
 
 import java.util.List;
 import org.junit.jupiter.api.Test;
@@ -148,4 +149,10 @@ public void updateLegacySettingsFallback() {
     assertEquals(OpenSearchSettings.METRICS_ROLLING_WINDOW_SETTING.get(settings), 2000L);
     assertEquals(OpenSearchSettings.METRICS_ROLLING_INTERVAL_SETTING.get(settings), 100L);
   }
+
+
+  @Test
+  void legacySettingsShouldBeDeprecatedBeforeRemove() {
+    assertEquals(15, legacySettings().size());
+  }
 }
diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java
index 2242298bed..62643baad2 100644
--- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java
+++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java
@@ -423,13 +423,18 @@ private String buildQuery(List<NamedAggregator> namedAggregatorList,
   }
 
   @SneakyThrows
-  private String buildQuery(List<NamedAggregator> namedAggregatorList,
-                            List<NamedExpression> groupByList,
-                            List<Pair<Sort.SortOption, Expression>> sortList) {
+  private String buildQuery(
+      List<NamedAggregator> namedAggregatorList,
+      List<NamedExpression> groupByList,
+      List<Pair<Sort.SortOption, Expression>> sortList) {
     ObjectMapper objectMapper = new ObjectMapper();
-    return objectMapper.readTree(
-        queryBuilder.buildAggregationBuilder(namedAggregatorList, groupByList, sortList).get(0)
-            .toString())
+    return objectMapper
+        .readTree(
+            queryBuilder
+                .buildAggregationBuilder(namedAggregatorList, groupByList, sortList)
+                .getLeft()
+                .get(0)
+                .toString())
         .toPrettyString();
   }
 
diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java
index b956a2f5a0..95a2383475 100644
--- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java
+++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java
@@ -35,6 +35,10 @@
 import static org.opensearch.sql.expression.DSL.literal;
 import static org.opensearch.sql.expression.DSL.named;
 import static org.opensearch.sql.expression.DSL.ref;
+import static org.opensearch.sql.expression.aggregation.StdDevAggregator.stddevPopulation;
+import static org.opensearch.sql.expression.aggregation.StdDevAggregator.stddevSample;
+import static org.opensearch.sql.expression.aggregation.VarianceAggregator.variancePopulation;
+import static org.opensearch.sql.expression.aggregation.VarianceAggregator.varianceSample;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import java.util.Arrays;
@@ -53,6 +57,7 @@
 import org.opensearch.sql.expression.aggregation.MinAggregator;
 import org.opensearch.sql.expression.aggregation.NamedAggregator;
 import org.opensearch.sql.expression.aggregation.SumAggregator;
+import org.opensearch.sql.expression.aggregation.VarianceAggregator;
 import org.opensearch.sql.expression.function.FunctionName;
 import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer;
 
@@ -185,6 +190,74 @@ void should_build_max_aggregation() {
                     new MaxAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER)))));
   }
 
+  @Test
+  void should_build_varPop_aggregation() {
+    assertEquals(
+        "{\n"
+            + "  \"var_pop(age)\" : {\n"
+            + "    \"extended_stats\" : {\n"
+            + "      \"field\" : \"age\",\n"
+            + "      \"sigma\" : 2.0\n"
+            + "    }\n"
+            + "  }\n"
+            + "}",
+        buildQuery(
+            Arrays.asList(
+                named("var_pop(age)",
+                    variancePopulation(Arrays.asList(ref("age", INTEGER)), INTEGER)))));
+  }
+
+  @Test
+  void should_build_varSamp_aggregation() {
+    assertEquals(
+        "{\n"
+            + "  \"var_samp(age)\" : {\n"
+            + "    \"extended_stats\" : {\n"
+            + "      \"field\" : \"age\",\n"
+            + "      \"sigma\" : 2.0\n"
+            + "    }\n"
+            + "  }\n"
+            + "}",
+        buildQuery(
+            Arrays.asList(
+                named("var_samp(age)",
+                    varianceSample(Arrays.asList(ref("age", INTEGER)), INTEGER)))));
+  }
+
+  @Test
+  void should_build_stddevPop_aggregation() {
+    assertEquals(
+        "{\n"
+            + "  \"stddev_pop(age)\" : {\n"
+            + "    \"extended_stats\" : {\n"
+            + "      \"field\" : \"age\",\n"
+            + "      \"sigma\" : 2.0\n"
+            + "    }\n"
+            + "  }\n"
+            + "}",
+        buildQuery(
+            Arrays.asList(
+                named("stddev_pop(age)",
+                    stddevPopulation(Arrays.asList(ref("age", INTEGER)), INTEGER)))));
+  }
+
+  @Test
+  void should_build_stddevSamp_aggregation() {
+    assertEquals(
+        "{\n"
+            + "  \"stddev_samp(age)\" : {\n"
+            + "    \"extended_stats\" : {\n"
+            + "      \"field\" : \"age\",\n"
+            + "      \"sigma\" : 2.0\n"
+            + "    }\n"
+            + "  }\n"
+            + "}",
+        buildQuery(
+            Arrays.asList(
+                named("stddev_samp(age)",
+                    stddevSample(Arrays.asList(ref("age", INTEGER)), INTEGER)))));
+  }
+
   @Test
   void should_throw_exception_for_unsupported_aggregator() {
     when(aggregator.getFunctionName()).thenReturn(new FunctionName("unsupported_agg"));
@@ -211,7 +284,7 @@ void should_throw_exception_for_unsupported_exception() {
   private String buildQuery(List<NamedAggregator> namedAggregatorList) {
     ObjectMapper objectMapper = new ObjectMapper();
     return objectMapper.readTree(
-        aggregationBuilder.build(namedAggregatorList).toString())
+        aggregationBuilder.build(namedAggregatorList).getLeft().toString())
         .toPrettyString();
   }
 }
diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java
index 75b5184669..ad7d3f62ac 100644
--- a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java
+++ b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java
@@ -152,13 +152,7 @@ public List<ExecutorBuilder<?>> getExecutorBuilders(Settings settings) {
   @Override
   public List<Setting<?>> getSettings() {
     return new ImmutableList.Builder<Setting<?>>()
-        .add(LegacyOpenDistroSettings.SQL_ENABLED_SETTING)
-        .add(LegacyOpenDistroSettings.SQL_QUERY_SLOWLOG_SETTING)
-        .add(LegacyOpenDistroSettings.METRICS_ROLLING_WINDOW_SETTING)
-        .add(LegacyOpenDistroSettings.METRICS_ROLLING_INTERVAL_SETTING)
-        .add(LegacyOpenDistroSettings.PPL_ENABLED_SETTING)
-        .add(LegacyOpenDistroSettings.PPL_QUERY_MEMORY_LIMIT_SETTING)
-        .add(LegacyOpenDistroSettings.QUERY_SIZE_LIMIT_SETTING)
+        .addAll(LegacyOpenDistroSettings.legacySettings())
         .addAll(OpenSearchSettings.pluginSettings())
         .build();
   }
diff --git a/ppl/src/main/antlr/OpenSearchPPLLexer.g4 b/ppl/src/main/antlr/OpenSearchPPLLexer.g4
index 3874a0a50e..cb665f6c88 100644
--- a/ppl/src/main/antlr/OpenSearchPPLLexer.g4
+++ b/ppl/src/main/antlr/OpenSearchPPLLexer.g4
@@ -151,8 +151,10 @@ STDEV:                              'STDEV';
 STDEVP:                             'STDEVP';
 SUM:                                'SUM';
 SUMSQ:                              'SUMSQ';
-VAR:                                'VAR';
-VARP:                               'VARP';
+VAR_SAMP:                           'VAR_SAMP';
+VAR_POP:                            'VAR_POP';
+STDDEV_SAMP:                        'STDDEV_SAMP';
+STDDEV_POP:                         'STDDEV_POP';
 PERCENTILE:                         'PERCENTILE';
 FIRST:                              'FIRST';
 LAST:                               'LAST';
diff --git a/ppl/src/main/antlr/OpenSearchPPLParser.g4 b/ppl/src/main/antlr/OpenSearchPPLParser.g4
index 77aecf5a44..d552ad0756 100644
--- a/ppl/src/main/antlr/OpenSearchPPLParser.g4
+++ b/ppl/src/main/antlr/OpenSearchPPLParser.g4
@@ -139,7 +139,7 @@ statsFunction
     ;
 
 statsFunctionName
-    : AVG | COUNT | SUM | MIN | MAX
+    : AVG | COUNT | SUM | MIN | MAX | VAR_SAMP | VAR_POP | STDDEV_SAMP | STDDEV_POP
     ;
 
 percentileAggFunction
diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java
index 07ad97401e..71ef692abf 100644
--- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java
+++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java
@@ -335,6 +335,90 @@ public void testAggFuncCallExpr() {
         ));
   }
 
+  @Test
+  public void testVarAggregationShouldPass() {
+    assertEqual("source=t | stats var_samp(a) by b",
+        agg(
+            relation("t"),
+            exprList(
+                alias(
+                    "var_samp(a)",
+                    aggregate("var_samp", field("a"))
+                )
+            ),
+            emptyList(),
+            exprList(
+                alias(
+                    "b",
+                    field("b")
+                )),
+            defaultStatsArgs()
+        ));
+  }
+
+  @Test
+  public void testVarpAggregationShouldPass() {
+    assertEqual("source=t | stats var_pop(a) by b",
+        agg(
+            relation("t"),
+            exprList(
+                alias(
+                    "var_pop(a)",
+                    aggregate("var_pop", field("a"))
+                )
+            ),
+            emptyList(),
+            exprList(
+                alias(
+                    "b",
+                    field("b")
+                )),
+            defaultStatsArgs()
+        ));
+  }
+
+  @Test
+  public void testStdDevAggregationShouldPass() {
+    assertEqual("source=t | stats stddev_samp(a) by b",
+        agg(
+            relation("t"),
+            exprList(
+                alias(
+                    "stddev_samp(a)",
+                    aggregate("stddev_samp", field("a"))
+                )
+            ),
+            emptyList(),
+            exprList(
+                alias(
+                    "b",
+                    field("b")
+                )),
+            defaultStatsArgs()
+        ));
+  }
+
+  @Test
+  public void testStdDevPAggregationShouldPass() {
+    assertEqual("source=t | stats stddev_pop(a) by b",
+        agg(
+            relation("t"),
+            exprList(
+                alias(
+                    "stddev_pop(a)",
+                    aggregate("stddev_pop", field("a"))
+                )
+            ),
+            emptyList(),
+            exprList(
+                alias(
+                    "b",
+                    field("b")
+                )),
+            defaultStatsArgs()
+        ));
+  }
+
   @Test
   public void testPercentileAggFuncExpr() {
     assertEqual("source=t | stats percentile<1>(a)",
diff --git a/sql-cli/README.md b/sql-cli/README.md
index 9908bd096c..89f8ba5977 100644
--- a/sql-cli/README.md
+++ b/sql-cli/README.md
@@ -9,7 +9,7 @@
 
 The SQL CLI component in OpenSearch is a stand-alone Python application and can be launched by a 'wake' word `opensearchsql`. 
 
-It only supports [OpenSearch SQL Plugin](https://docs-beta.opensearch.org/docs/sql/)
+It only supports [OpenSearch SQL Plugin](https://docs-beta.opensearch.org/search-plugins/sql/index/)
 You must have the OpenSearch SQL plugin installed to your OpenSearch instance to connect. 
 Users can run this CLI from MacOS and Linux, and connect to any valid OpenSearch end-point such as Amazon Elasticsearch Service (AES).
 
@@ -75,7 +75,7 @@ You can also configure the following connection properties:
 
 * `endpoint`: You do not need to specify an option, anything that follows the launch command `opensearchsql` is considered as the endpoint. If you do not provide an endpoint, by default, the SQL CLI connects to [http://localhost:9200](http://localhost:9200/).
 * `-u/-w`: Supports username and password for HTTP basic authentication, such as:
-    * OpenSearch with [OpenSearch Security Plugin](https://docs-beta.opensearch.org/docs/opensearch/install/plugins/) installed
+    * OpenSearch with [OpenSearch Security Plugin](https://docs-beta.opensearch.org/security-plugin/index/) installed
     * Amazon Elasticsearch Service domain with [Fine Grained Access Control](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/fgac.html) enabled
 * `--aws-auth`: Turns on AWS sigV4 authentication to connect to an Amazon Elasticsearch Service endpoint. Use with the AWS CLI (`aws configure`) to retrieve the local AWS configuration to authenticate and connect.
 
diff --git a/sql-cli/development_guide.md b/sql-cli/development_guide.md
index 713c8ef60e..b49703e3a5 100644
--- a/sql-cli/development_guide.md
+++ b/sql-cli/development_guide.md
@@ -16,7 +16,7 @@ https uses 443 by default.
 - Prerequisites
     - Build the application
     - Start a local OpenSearch instance with 
-    [OpenSearch SQL plugin](https://docs-beta.opensearch.org/docs/sql/) installed
+    [OpenSearch SQL plugin](https://docs-beta.opensearch.org/search-plugins/sql/index/) installed
     and listening at http://localhost:9200.
 - Pytest
     - `pip install -r requirements-dev.txt` Install test frameworks including Pytest and mock.
diff --git a/sql-cli/setup.py b/sql-cli/setup.py
index e1b07133b7..1325db6766 100644
--- a/sql-cli/setup.py
+++ b/sql-cli/setup.py
@@ -56,7 +56,7 @@
     author_email="opensearch-infra@amazon.com",
     version=version,
     license="Apache 2.0",
-    url="https://docs-beta.opensearch.org/docs/sql/cli/",
+    url="https://docs-beta.opensearch.org/search-plugins/sql/cli/",
     packages=find_packages('src'),
     package_dir={'': 'src'},
     package_data={"opensearch_sql_cli": ["conf/clirc", "opensearch_literals/opensearch_literals.json"]},
diff --git a/sql-cli/tests/test_plan.md b/sql-cli/tests/test_plan.md
index 8b374b596f..a0b1ef9996 100644
--- a/sql-cli/tests/test_plan.md
+++ b/sql-cli/tests/test_plan.md
@@ -22,7 +22,7 @@
 
 * [ ] Test connection to a local OpenSearch instance
     * [ ] OpenSearch, no authentication
-    * [ ] OpenSearch, install [OpenSearch Security plugin](https://docs-beta.opensearch.org/docs/opensearch/install/plugins/) to enable authentication and SSL
+    * [ ] OpenSearch, install [OpenSearch Security plugin](https://docs-beta.opensearch.org/security-plugin/index/) to enable authentication and SSL
     * Run command like `opensearchsql <endpoint> -u <username> -w <password>` to connect to instance with authentication.
 * [ ] Test connection to [Amazon Elasticsearch domain](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-gsg.html) with
 [Fine Grained Access Control](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/fgac.html) enabled. 
diff --git a/sql-jdbc/docs/tableau.md b/sql-jdbc/docs/tableau.md
index f614a41a88..cb47e9e142 100644
--- a/sql-jdbc/docs/tableau.md
+++ b/sql-jdbc/docs/tableau.md
@@ -3,7 +3,7 @@
 ## Download and Installation
 
 * Download and install [Tableau Desktop](https://www.tableau.com/en-ca/products/desktop/download).
-* Install and configure [OpenSearch](https://docs-beta.opensearch.org/docs/install/).
+* Install and configure [OpenSearch](https://docs-beta.opensearch.org/opensearch/install/index/).
 * Download the [OpenSearch JDBC Driver](https://github.com/opensearch-project/sql/blob/master/sql-jdbc/README.md#download-and-installation).
 
 ## Setup
diff --git a/sql-odbc/docs/dev/run_tests.md b/sql-odbc/docs/dev/run_tests.md
index d2fa6bf824..c745a6f19f 100644
--- a/sql-odbc/docs/dev/run_tests.md
+++ b/sql-odbc/docs/dev/run_tests.md
@@ -2,13 +2,13 @@
 
 ## Requirements
 
-* Latest version of [OpenSearch](https://docs-beta.opensearch.org/docs/install/)
+* Latest version of [OpenSearch](https://docs-beta.opensearch.org/opensearch/install/index/)
 * [Required datasets loaded](#set-up-test-datasets)
 * [DSN configured](#set-up-dsn)
 
 ### Set up test datasets
 
-Loading a dataset requires an [OpenSearch](https://docs-beta.opensearch.org/docs/install/) service running with [OpenSearch Dashboards](https://docs-beta.opensearch.org/docs/opensearch-dashboards/). If either of these are missing, please refer to the documentation on how to set them up.
+Loading a dataset requires an [OpenSearch](https://docs-beta.opensearch.org/opensearch/install/index/) service running with [OpenSearch Dashboards](https://docs-beta.opensearch.org/dashboards/index/). If either of these are missing, please refer to the documentation on how to set them up.
 
 Note, if you wish to work with SSL/TLS, you need to configure OpenSearch and OpenSearch Dashboards to support it. See the [build instructions](./BUILD_INSTRUCTIONS.md) for more info.
 
diff --git a/sql-odbc/docs/dev/sign_installers.md b/sql-odbc/docs/dev/sign_installers.md
deleted file mode 100644
index fc1880ca44..0000000000
--- a/sql-odbc/docs/dev/sign_installers.md
+++ /dev/null
@@ -1,49 +0,0 @@
-# Signing Installers
-
-## Steps to sign odbc driver windows installer 
-
-- Get code signing certificate. Certificate must meet some [criteria](https://docs.microsoft.com/en-us/windows/win32/appxpkg/how-to-sign-a-package-using-signtool). Some options are listed below.
-  - Buy [Extended Validation (EV) Code Signing Certificate](https://docs.microsoft.com/en-us/windows-hardware/drivers/dashboard/get-a-code-signing-certificate#step-2-buy-a-new-code-signing-certificate)
-  - Create a [self-signed certificate](https://docs.microsoft.com/en-us/windows/msix/package/create-certificate-package-signing#create-a-self-signed-certificate) (For testing purpose only).
-- Install the certificate that you want to sign the file with.
-  - [Import-Certificate](https://docs.microsoft.com/en-us/powershell/module/pkiclient/import-certificate?view=win10-ps) can be used for this purpose
- 
-```
-// Import certificate as Trusted Publisher
-Import-Certificate -FilePath .\code_signing.crt -Cert Cert:\CurrentUser\TrustedPublisher
-
-// Import certificate as a Root certificate authority.
-Import-Certificate -FilePath .\code_signing.crt -Cert Cert:\CurrentUser\Root
-```
-
-- Sign the .msi file. 
-  - Sign installer using [SignTool](https://docs.microsoft.com/en-us/windows/msix/package/sign-app-package-using-signtool)
-
-```
-  signtool sign /sha1 <CertificateHash> '.\OpenSearch SQL ODBC Driver-<version>-Windows.msi' 
-```
-  
-  - Alternatively, [Set-AuthenticodeSignature](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.security/set-authenticodesignature?view=powershell-7) can be used for adding digital signature.
-
-```
-  Set-AuthenticodeSignature '.\OpenSearch SQL ODBC Driver-<version>-Windows.msi' -Certificate (Get-ChildItem Cert:\CurrentUser\My -CodeSigningCert) -TimestampServer "http://timestamp.verisign.com/scripts/timstamp.dll"
-```
-
-[Note](https://stackoverflow.com/questions/50956108/codesign-software-still-gives-a-warning-on-windows-10): If you have a standard code signing certificate, some time will be needed for your application to build trust. Microsoft affirms that an Extended Validation (EV) Code Signing Certificate allows to skip this period of trust building. According to Microsoft, extended validation certificates allow the developer to immediately establish reputation with SmartScreen. Otherwise, the users will see a warning like "Windows Defender Smartscreen prevented an unrecognized app from starting. Running this app might put your PC at risk.", with the two buttons: "Run anyway" and "Don't run". 
-
-
-## Steps to sign odbc driver macOS installer 
-
-- Get a [Developer ID Installer signing certificate](https://help.apple.com/xcode/mac/current/#/dev154b28f09)
-- Sign the installer package using `productsign`. Do not use `Developer ID Application certificate`.
-
-```
-productsign -sign "Developer ID Installer: Your Apple Account Name (**********)" "~/Desktop/OpenSearch SQL ODBC Driver-<version>-Darwin.pkg" "~/Desktop/signed-OpenSearch SQL ODBC Driver-<version>-Darwin.pkg"
-```
-
-- Test installer package using [spctl](http://www.manpagez.com/man/8/spctl/)
-```
-  spctl -a -v --type install "Desktop/OpenSearch SQL ODBC Driver-<version>-Darwin.pkg"
-```
-
-Reference: https://help.apple.com/xcode/mac/current/#/deve51ce7c3d
\ No newline at end of file
diff --git a/sql-odbc/docs/test/excel_connection.md b/sql-odbc/docs/test/excel_connection.md
index bc1ca1fbaa..562ac1c694 100644
--- a/sql-odbc/docs/test/excel_connection.md
+++ b/sql-odbc/docs/test/excel_connection.md
@@ -2,7 +2,7 @@
 
 ## Prerequisites
 * [Download and install](../../README.md) OpenSearch SQL ODBC Driver.
-* [Install and configure](https://docs-beta.opensearch.org/docs/install/) OpenSearch.
+* [Install and configure](https://docs-beta.opensearch.org/opensearch/install/index/) OpenSearch.
 * Open ODBC Data Source Administrator. Click on **System DSN** > **OpenSearch SQL ODBC DSN** > **Configure**.
 * Set all connection options & Click on **Test**. Connection test should return `Connection Successful`.
 
diff --git a/sql-odbc/docs/user/microsoft_excel_support.md b/sql-odbc/docs/user/microsoft_excel_support.md
index 292a2aadeb..1d53c114de 100644
--- a/sql-odbc/docs/user/microsoft_excel_support.md
+++ b/sql-odbc/docs/user/microsoft_excel_support.md
@@ -3,8 +3,8 @@
 ## Prerequisites
 
 * Microsoft Excel 2016 and higher
-* [OpenSearch](https://docs-beta.opensearch.org/docs/install/)
-* [OpenSearch SQL ODBC driver](https://docs-beta.opensearch.org/docs/sql/odbc/)
+* [OpenSearch](https://docs-beta.opensearch.org/opensearch/install/index/)
+* [OpenSearch SQL ODBC driver](https://docs-beta.opensearch.org/search-plugins/sql/odbc/)
 * A preconfigured [User or System DSN](../../README.md)
 
 ## Test Successful Connection
diff --git a/sql-odbc/docs/user/microsoft_excel_support_mac.md b/sql-odbc/docs/user/microsoft_excel_support_mac.md
index 1d241cf816..7d0e852c54 100644
--- a/sql-odbc/docs/user/microsoft_excel_support_mac.md
+++ b/sql-odbc/docs/user/microsoft_excel_support_mac.md
@@ -3,8 +3,8 @@
 ## Prerequisites
 
 * Microsoft Excel 2016 and higher
-* [OpenSearch](https://docs-beta.opensearch.org/docs/install/)
-* [OpenSearch SQL ODBC driver](https://docs-beta.opensearch.org/docs/sql/odbc/)
+* [OpenSearch](https://docs-beta.opensearch.org/opensearch/install/index/)
+* [OpenSearch SQL ODBC driver](https://docs-beta.opensearch.org/search-plugins/sql/odbc/)
 * A preconfigured [User or System DSN](mac_configure_dsn.md)
 
 ## Test Successful Connection
diff --git a/sql-odbc/docs/user/power_bi_support.md b/sql-odbc/docs/user/power_bi_support.md
index 26b169c1dc..72df78794b 100644
--- a/sql-odbc/docs/user/power_bi_support.md
+++ b/sql-odbc/docs/user/power_bi_support.md
@@ -2,8 +2,8 @@
 
 ## Prerequisites
 * Microsoft Power BI Desktop
-* [OpenSearch](https://docs-beta.opensearch.org/docs/install/)
-* [OpenSearch SQL ODBC driver](https://docs-beta.opensearch.org/docs/sql/odbc/)
+* [OpenSearch](https://docs-beta.opensearch.org/opensearch/install/index/)
+* [OpenSearch SQL ODBC driver](https://docs-beta.opensearch.org/search-plugins/sql/odbc/)
 * [SqlOdbcPBIConnector.mez](../../src/PowerBIConnector/bin/Release/) 
 * Optional: [sqlodbc_import.pbids](../../src/PowerBIConnector/PBIDSExamples) to help with repeated connections to the same server 
 
@@ -86,7 +86,7 @@ It will take you straight to the **Navigator** window for selecting the tables f
 
 ## Troubleshooting 
 
-* If you get an following error, please install [OpenSearch SQL ODBC Driver](https://docs-beta.opensearch.org/docs/sql/odbc/).
+* If you get an following error, please install [OpenSearch SQL ODBC Driver](https://docs-beta.opensearch.org/search-plugins/sql/odbc/).
 
 <img src="img/pbi_error_driver_not_installed.png" width="350">
 
diff --git a/sql-odbc/docs/user/tableau_support.md b/sql-odbc/docs/user/tableau_support.md
index 2b53c40ffc..94cdeae9a2 100644
--- a/sql-odbc/docs/user/tableau_support.md
+++ b/sql-odbc/docs/user/tableau_support.md
@@ -7,7 +7,7 @@ Connect an OpenSearch data source to Tableau Desktop via the Tableau Connector t
 ## Prerequisites
 
 * Download and Install [Tableau Desktop](https://www.tableau.com/products/desktop/download) 2020 and higher
-* Install and Configure [OpenSearch](https://docs-beta.opensearch.org/docs/install/)
+* Install and Configure [OpenSearch](https://docs-beta.opensearch.org/opensearch/install/index/)
 * Download and Install [OpenSearch SQL ODBC driver](../../README.md)
 * Download Tableau Connector for `SQL by OpenSearch for ES` ([opensearch_sql_odbc.taco](../../src/TableauConnector/opensearch_sql_odbc/opensearch_sql_odbc.taco)).
 Click on **Download** option for downloading `opensearch_sql_odbc.taco` file.
diff --git a/sql/src/main/antlr/OpenSearchSQLLexer.g4 b/sql/src/main/antlr/OpenSearchSQLLexer.g4
index 94f8e7c87a..426c77cf06 100644
--- a/sql/src/main/antlr/OpenSearchSQLLexer.g4
+++ b/sql/src/main/antlr/OpenSearchSQLLexer.g4
@@ -126,6 +126,13 @@ COUNT:                              'COUNT';
 MAX:                                'MAX';
 MIN:                                'MIN';
 SUM:                                'SUM';
+VAR_POP:                            'VAR_POP';
+VAR_SAMP:                           'VAR_SAMP';
+VARIANCE:                           'VARIANCE';
+STD:                                'STD';
+STDDEV:                             'STDDEV';
+STDDEV_POP:                         'STDDEV_POP';
+STDDEV_SAMP:                        'STDDEV_SAMP';
 
 
 // Common function Keywords
diff --git a/sql/src/main/antlr/OpenSearchSQLParser.g4 b/sql/src/main/antlr/OpenSearchSQLParser.g4
index 0ad08781bf..18c75b94ff 100644
--- a/sql/src/main/antlr/OpenSearchSQLParser.g4
+++ b/sql/src/main/antlr/OpenSearchSQLParser.g4
@@ -345,7 +345,7 @@ filterClause
     ;
 
 aggregationFunctionName
-    : AVG | COUNT | SUM | MIN | MAX
+    : AVG | COUNT | SUM | MIN | MAX | VAR_POP | VAR_SAMP | VARIANCE | STD | STDDEV | STDDEV_POP | STDDEV_SAMP
     ;
 
 mathematicalFunctionName
diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java
index a3c8494e7a..e4e8028f05 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java
@@ -410,6 +410,27 @@ public void filteredAggregation() {
     );
   }
 
+  @Test
+  public void canBuildVarSamp() {
+    assertEquals(
+        aggregate("var_samp", qualifiedName("age")),
+        buildExprAst("var_samp(age)"));
+  }
+
+  @Test
+  public void canBuildVarPop() {
+    assertEquals(
+        aggregate("var_pop", qualifiedName("age")),
+        buildExprAst("var_pop(age)"));
+  }
+
+  @Test
+  public void canBuildVariance() {
+    assertEquals(
+        aggregate("variance", qualifiedName("age")),
+        buildExprAst("variance(age)"));
+  }
+
   private Node buildExprAst(String expr) {
     OpenSearchSQLLexer lexer = new OpenSearchSQLLexer(new CaseInsensitiveCharStream(expr));
     OpenSearchSQLParser parser = new OpenSearchSQLParser(new CommonTokenStream(lexer));
diff --git a/workbench/DEVELOPER_GUIDE.md b/workbench/DEVELOPER_GUIDE.md
new file mode 100644
index 0000000000..9d564a3cd1
--- /dev/null
+++ b/workbench/DEVELOPER_GUIDE.md
@@ -0,0 +1,55 @@
+## Developer Guide
+
+So you want to contribute code to this project? Excellent! We're glad you're here. Here's what you need to do.
+
+### Setup
+
+1. Download OpenSearch for the version that matches the [OpenSearch Dashboards version specified in package.json](./package.json#L8).
+1. Download and install the most recent version of [OpenSearch SQL plugin](https://github.com/opensearch-project/sql).
+1. Download the OpenSearch Dashboards source code for the [version specified in package.json](./package.json#L8) you want to set up.
+
+   See the [OpenSearch Dashboards contributing guide](https://github.com/opensearch-project/OpenSearch-Dashboards/blob/main/CONTRIBUTING.md) to get started.
+   
+1. Change your node version to the version specified in `.node-version` inside the OpenSearch Dashboards root directory.
+1. cd into the OpenSearch Dashboards source code directory.
+1. Check out this package from version control into the `plugins` directory.
+```
+git clone git@github.com:opensearch-project/sql.git plugins --no-checkout
+cd plugins
+echo 'workbench/*' >> .git/info/sparse-checkout
+git config core.sparseCheckout true
+git checkout main
+```
+6. Run `yarn osd bootstrap` inside `OpenSearch-Dashboards/plugins/workbench`.
+
+Ultimately, your directory structure should look like this:
+
+```md
+.
+├── OpenSearch-Dashboards
+│   └── plugins
+│       └── workbench
+```
+
+
+## Build
+
+To build the plugin's distributable zip simply run `yarn build`.
+
+Example output: `./build/query-workbench-dashboards*.zip`
+
+
+## Run
+
+- `yarn start`
+
+  Starts OpenSearch Dashboards and includes this plugin. OpenSearch Dashboards will be available on `localhost:5601`.
+
+- `NODE_PATH=../../node_modules yarn test:jest`
+
+  Runs the plugin tests.
+
+
+### Submitting Changes
+
+See [CONTRIBUTING](CONTRIBUTING.md).
\ No newline at end of file
diff --git a/workbench/README.md b/workbench/README.md
index 739116b173..e8c50e6b58 100644
--- a/workbench/README.md
+++ b/workbench/README.md
@@ -5,70 +5,31 @@ The OpenSearch Dashboards Query Workbench enables you to query your OpenSearch d
 
 ## Documentation
 
-Please see our technical [documentation](https://docs-beta.opensearch.org/) to learn more about its features.
+Please see our technical [documentation](https://docs-beta.opensearch.org/search-plugins/sql/workbench/) to learn more about its features.
 
 
-## Setup
+## Contributing
 
-1. Download OpenSearch for the version that matches the [OpenSearch Dashboards version specified in package.json](./package.json#L8).
-1. Download and install the most recent version of [OpenSearch SQL plugin](https://github.com/opensearch-project/sql#open-distro-for-elasticsearch-sql).
-1. Download the OpenSearch Dashboards source code for the [version specified in package.json](./package.json#L8) you want to set up.
+See [developer guide](DEVELOPER_GUIDE.md) and [how to contribute to this project](CONTRIBUTING.md). 
 
-   See the [OpenSearch Dashboards contributing guide](https://github.com/opensearch-project/OpenSearch-Dashboards/blob/main/CONTRIBUTING.md) to get started.
-   
-1. Change your node version to the version specified in `.node-version` inside the OpenSearch Dashboards root directory.
-1. cd into the OpenSearch Dashboards source code directory.
-1. Check out this package from version control into the `plugins` directory.
-```
-git clone git@github.com:opensearch-project/sql.git plugins --no-checkout
-cd plugins
-echo 'workbench/*' >> .git/info/sparse-checkout
-git config core.sparseCheckout true
-git checkout main
-```
-6. Run `yarn osd bootstrap` inside `OpenSearch-Dashboards/plugins/workbench`.
+## Getting Help
 
-Ultimately, your directory structure should look like this:
+If you find a bug, or have a feature request, please don't hesitate to open an issue in this repository.
 
-```md
-.
-├── OpenSearch-Dashboards
-│   └── plugins
-│       └── workbench
-```
+For more information, see [project website](https://opensearch.org/) and [documentation](https://docs-beta.opensearch.org/). If you need help and are unsure where to open an issue, try [forums](https://discuss.opendistrocommunity.dev/).
 
+## Code of Conduct
 
-## Build
+This project has adopted the [Amazon Open Source Code of Conduct](CODE_OF_CONDUCT.md). For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq), or contact [opensource-codeofconduct@amazon.com](mailto:opensource-codeofconduct@amazon.com) with any additional questions or comments.
 
-To build the plugin's distributable zip simply run `yarn build`.
+## Security
 
-Example output: `./build/opensearch-query-workbench-*.zip`
-
-
-## Run
-
-- `yarn start`
-
-  Starts OpenSearch Dashboards and includes this plugin. OpenSearch Dashboards will be available on `localhost:5601`.
-
-- `NODE_PATH=../../node_modules yarn test:jest`
-
-  Runs the plugin tests.
-
-
-## Contributing to OpenSearch SQL Workbench
-
-- Refer to [CONTRIBUTING.md](./CONTRIBUTING.md).
-- We welcome you to get involved in development, documentation, testing the OpenSearch SQL Workbench plugin. See our [CONTRIBUTING.md](./CONTRIBUTING.md) and join in.
-
-## Bugs, Enhancements or Questions
-
-Please file an issue to report any bugs you may find, enhancements you may need or questions you may have [here](https://github.com/opensearch-project/sql/issues).
+If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public GitHub issue.
 
 ## License
 
-This code is licensed under the Apache 2.0 License. 
+This project is licensed under the [Apache v2.0 License](LICENSE.txt).
 
 ## Copyright
 
-Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+Copyright 2020-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
diff --git a/workbench/package.json b/workbench/package.json
index d2fb434259..54d009704e 100644
--- a/workbench/package.json
+++ b/workbench/package.json
@@ -29,6 +29,7 @@
     "react-double-scrollbar": "^0.0.15"
   },
   "devDependencies": {
+    "@testing-library/user-event": "^13.1.9",
     "@types/hapi-latest": "npm:@types/hapi@18.0.3",
     "@types/react-router-dom": "^5.1.5",
     "cypress": "^5.0.0",
diff --git a/workbench/public/components/Main/__snapshots__/main.test.tsx.snap b/workbench/public/components/Main/__snapshots__/main.test.tsx.snap
index e50ff9b06b..5f536eae83 100644
--- a/workbench/public/components/Main/__snapshots__/main.test.tsx.snap
+++ b/workbench/public/components/Main/__snapshots__/main.test.tsx.snap
@@ -98,7 +98,7 @@ exports[`<Main /> spec click clear button 1`] = `
           >
             <a
               class="euiButton euiButton--primary"
-              href="https://docs-beta.opensearch.org/docs/sql/"
+              href="https://docs-beta.opensearch.org/search-plugins/sql/index/"
               rel="noopener noreferrer"
               target="_blank"
             >
@@ -477,7 +477,7 @@ exports[`<Main /> spec click run button, and response causes an error 1`] = `
           >
             <a
               class="euiButton euiButton--primary"
-              href="https://docs-beta.opensearch.org/docs/sql/"
+              href="https://docs-beta.opensearch.org/search-plugins/sql/index/"
               rel="noopener noreferrer"
               target="_blank"
             >
@@ -856,7 +856,7 @@ exports[`<Main /> spec click run button, and response is not ok 1`] = `
           >
             <a
               class="euiButton euiButton--primary"
-              href="https://docs-beta.opensearch.org/docs/sql/"
+              href="https://docs-beta.opensearch.org/search-plugins/sql/index/"
               rel="noopener noreferrer"
               target="_blank"
             >
@@ -1235,7 +1235,7 @@ exports[`<Main /> spec click run button, and response is ok 1`] = `
           >
             <a
               class="euiButton euiButton--primary"
-              href="https://docs-beta.opensearch.org/docs/sql/"
+              href="https://docs-beta.opensearch.org/search-plugins/sql/index/"
               rel="noopener noreferrer"
               target="_blank"
             >
@@ -1614,7 +1614,7 @@ exports[`<Main /> spec click run button, response fills null and missing values
           >
             <a
               class="euiButton euiButton--primary"
-              href="https://docs-beta.opensearch.org/docs/sql/"
+              href="https://docs-beta.opensearch.org/search-plugins/sql/index/"
               rel="noopener noreferrer"
               target="_blank"
             >
@@ -1996,7 +1996,7 @@ exports[`<Main /> spec click translation button, and response is ok 1`] = `
           >
             <a
               class="euiButton euiButton--primary"
-              href="https://docs-beta.opensearch.org/docs/sql/"
+              href="https://docs-beta.opensearch.org/search-plugins/sql/index/"
               rel="noopener noreferrer"
               target="_blank"
             >
@@ -2375,7 +2375,7 @@ exports[`<Main /> spec renders the component 1`] = `
           >
             <a
               class="euiButton euiButton--primary"
-              href="https://docs-beta.opensearch.org/docs/sql/"
+              href="https://docs-beta.opensearch.org/search-plugins/sql/index/"
               rel="noopener noreferrer"
               target="_blank"
             >
diff --git a/workbench/public/components/Main/main.tsx b/workbench/public/components/Main/main.tsx
index a932bdd48a..fa581e61be 100644
--- a/workbench/public/components/Main/main.tsx
+++ b/workbench/public/components/Main/main.tsx
@@ -634,7 +634,7 @@ export class Main extends React.Component<MainProps, MainState> {
           updateSQLQueries={this.updateSQLQueries}
         />
       );
-      link = 'https://docs-beta.opensearch.org/docs/sql/';
+      link = 'https://docs-beta.opensearch.org/search-plugins/sql/index/';
       linkTitle = 'SQL documentation';
     } else {
       page = (
@@ -647,7 +647,7 @@ export class Main extends React.Component<MainProps, MainState> {
           updatePPLQueries={this.updatePPLQueries}
         />
       );
-      link = 'https://docs-beta.opensearch.org/docs/ppl/';
+      link = 'https://docs-beta.opensearch.org/search-plugins/ppl/index/';
       linkTitle = 'PPL documentation';
     }
 
diff --git a/workbench/yarn.lock b/workbench/yarn.lock
index cb07a0fb8a..a342b782cc 100644
--- a/workbench/yarn.lock
+++ b/workbench/yarn.lock
@@ -23,6 +23,13 @@
     chalk "^2.0.0"
     js-tokens "^4.0.0"
 
+"@babel/runtime@^7.12.5":
+  version "7.14.6"
+  resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.14.6.tgz#535203bc0892efc7dec60bdc27b2ecf6e409062d"
+  integrity sha512-/PCB2uJ7oM44tz8YhC4Z/6PeOKXp4K588f+5M3clr1M4zbqztlo0XEfJ2LEzj/FgwfgGcIdl8n7YYjTCI0BYwg==
+  dependencies:
+    regenerator-runtime "^0.13.4"
+
 "@cypress/listr-verbose-renderer@^0.4.1":
   version "0.4.1"
   resolved "https://registry.yarnpkg.com/@cypress/listr-verbose-renderer/-/listr-verbose-renderer-0.4.1.tgz#a77492f4b11dcc7c446a34b3e28721afd33c642a"
@@ -94,6 +101,13 @@
   dependencies:
     any-observable "^0.3.0"
 
+"@testing-library/user-event@^13.1.9":
+  version "13.1.9"
+  resolved "https://registry.yarnpkg.com/@testing-library/user-event/-/user-event-13.1.9.tgz#29e49a42659ac3c1023565ff56819e0153a82e99"
+  integrity sha512-NZr0zL2TMOs2qk+dNlqrAdbaRW5dAmYwd1yuQ4r7HpkVEOj0MWuUjDWwKhcLd/atdBy8ZSMHSKp+kXSQe47ezg==
+  dependencies:
+    "@babel/runtime" "^7.12.5"
+
 "@types/angular@1.6.50":
   version "1.6.50"
   resolved "https://registry.yarnpkg.com/@types/angular/-/angular-1.6.50.tgz#8b6599088d80f68ef0cad7d3a2062248ebe72b3d"
@@ -2183,6 +2197,11 @@ readable-stream@^2.2.2:
     string_decoder "~1.1.1"
     util-deprecate "~1.0.1"
 
+regenerator-runtime@^0.13.4:
+  version "0.13.7"
+  resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz#cac2dacc8a1ea675feaabaeb8ae833898ae46f55"
+  integrity sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew==
+
 regexpp@^2.0.1:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f"