Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
JackTan25 committed Dec 21, 2023
2 parents ffbbd3f + c1e1003 commit d72796d
Show file tree
Hide file tree
Showing 247 changed files with 5,709 additions and 1,240 deletions.
42 changes: 42 additions & 0 deletions .github/workflows/links.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
name: Links

on:
repository_dispatch:
workflow_dispatch:
schedule:
- cron: "00 18 * * *"

jobs:
linkChecker:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

- name: Restore lychee cache
id: restore-cache
uses: actions/cache/restore@v3
with:
path: .lycheecache
key: cache-lychee-${{ github.sha }}
restore-keys: cache-lychee-

- name: Link Checker
id: lychee
uses: lycheeverse/lychee-action@v1.8.0
with:
args: "--base . --cache --max-cache-age 1d . --exclude 'https?://twitter\\.com(?:/.*$)?$'"

- name: Save lychee cache
uses: actions/cache/save@v3
if: always()
with:
path: .lycheecache
key: ${{ steps.restore-cache.outputs.cache-primary-key }}

- name: Create Issue From File
if: env.lychee_exit_code != 0
uses: peter-evans/create-issue-from-file@v4
with:
title: Link Checker Report
content-filepath: ./lychee/out.md
labels: report, automated issue
2 changes: 1 addition & 1 deletion .github/workflows/pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ jobs:
token: ${{ github.token }}
identifier: 'pr-assistant-cla'
body: |
Pull request description must contain [CLA](https://docs.databend.com/doc/contributing/good-pr) like the following:
Pull request description must contain [CLA](https://docs.databend.com/dev/policies/cla/) like the following:
```
I hereby agree to the terms of the CLA available at: https://docs.databend.com/dev/policies/cla/
Expand Down
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -75,3 +75,6 @@ benchmark/clickbench/results

# z3
**/.z3-trace

# lychee
.lycheecache
48 changes: 47 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

72 changes: 36 additions & 36 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
<div align="center">

<h4 align="center">
<a href="https://docs.databend.com/doc/cloud">Databend Serverless Cloud (beta)</a> |
<a href="https://docs.databend.com/doc">Documentation</a> |
<a href="https://docs.databend.com/guides/cloud">Databend Serverless Cloud (beta)</a> |
<a href="https://docs.databend.com/">Documentation</a> |
<a href="https://benchmark.clickhouse.com/">Benchmarking</a> |
<a href="https://github.com/datafuselabs/databend/issues/11868">Roadmap (v1.3)</a>

Expand Down Expand Up @@ -50,7 +50,7 @@

- **Rich Data Support**: Handles diverse data formats and types, including JSON, CSV, Parquet, ARRAY, TUPLE, MAP, and JSON.

- **AI-Enhanced Analytics**: Offers advanced analytics capabilities with integrated [AI Functions](https://docs.databend.com/doc/sql-functions/ai-functions/).
- **AI-Enhanced Analytics**: Offers advanced analytics capabilities with integrated [AI Functions](https://docs.databend.com/sql/sql-functions/ai-functions/).

- **Community-Driven**: Benefit from a friendly, growing community that offers an easy-to-use platform for all your cloud analytics.

Expand Down Expand Up @@ -82,58 +82,58 @@ docker run --net=host datafuselabs/databend
<details>
<summary>Deploying Databend</summary>

- [Understanding Deployment Modes](https://docs.databend.com/doc/deploy/understanding-deployment-modes)
- [Deploying a Standalone Databend](https://docs.databend.com/doc/deploy/deploying-databend)
- [Expanding a Standalone Databend](https://docs.databend.com/doc/deploy/expanding-to-a-databend-cluster)
- [Databend Cloud (Beta)](https://docs.databend.com/cloud)
- [Understanding Deployment Modes](https://docs.databend.com/guides/deploy/understanding-deployment-modes)
- [Deploying a Standalone Databend](https://docs.databend.com/guides/deploy/deploying-databend)
- [Expanding a Standalone Databend](https://docs.databend.com/guides/deploy/expanding-to-a-databend-cluster)
- [Databend Cloud (Beta)](https://docs.databend.com/guides/cloud)
</details>

<details>
<summary>Connecting to Databend</summary>

- [Connecting to Databend with BendSQL](https://docs.databend.com/doc/sql-clients/bendsql)
- [Connecting to Databend with JDBC](https://docs.databend.com/doc/sql-clients/jdbc)
- [Connecting to Databend with MySQL-Compatible Clients](https://docs.databend.com/doc/sql-clients/mysql)
- [Connecting to Databend with BendSQL](https://docs.databend.com/guides/sql-clients/bendsql)
- [Connecting to Databend with JDBC](https://docs.databend.com/guides/sql-clients/jdbc)
- [Connecting to Databend with MySQL-Compatible Clients](https://docs.databend.com/guides/sql-clients/mysql)

</details>

<details>
<summary>Loading Data into Databend</summary>

- [How to Load Data from Local File](https://docs.databend.com/doc/load-data/load/local)
- [How to Load Data from Bucket](https://docs.databend.com/doc/load-data/load/s3)
- [How to Load Data from Stage](https://docs.databend.com/doc/load-data/load/stage)
- [How to Load Data from Remote Files](https://docs.databend.com/doc/load-data/load/http)
- [Querying Data in Staged Files](https://docs.databend.com/doc/load-data/transform/querying-stage)
- [Transforming Data During a Load](https://docs.databend.com/doc/load-data/transform/data-load-transform)
- [How to Unload Data from Databend](https://docs.databend.com/doc/load-data/unload)
- [How to Load Data from Local File](https://docs.databend.com/guides/load-data/load/local)
- [How to Load Data from Bucket](https://docs.databend.com/guides/load-data/load/s3)
- [How to Load Data from Stage](https://docs.databend.com/guides/load-data/load/stage)
- [How to Load Data from Remote Files](https://docs.databend.com/guides/load-data/load/http)
- [Querying Data in Staged Files](https://docs.databend.com/guides/load-data/transform/querying-stage)
- [Transforming Data During a Load](https://docs.databend.com/guides/load-data/transform/data-load-transform)
- [How to Unload Data from Databend](https://docs.databend.com/guides/unload-data/)

</details>

<details>
<summary>Loading Data Tools with Databend</summary>

- [Apache Kafka](https://docs.databend.com/doc/load-data/load-db/kafka)
- [Airbyte](https://docs.databend.com/doc/load-data/load-db/airbyte)
- [dbt](https://docs.databend.com/doc/load-data/load-db/dbt)
- [Debezium](https://docs.databend.com/doc/load-data/load-db/debezium)
- [Apache Flink CDC](https://docs.databend.com/doc/load-data/load-db/flink-cdc)
- [DataDog Vector](https://docs.databend.com/doc/load-data/load-db/vector)
- [Addax](https://docs.databend.com/doc/load-data/load-db/addax)
- [DataX](https://docs.databend.com/doc/load-data/load-db/datax)
- [Apache Kafka](https://docs.databend.com/guides/load-data/load-db/kafka)
- [Airbyte](https://docs.databend.com/guides/load-data/load-db/airbyte)
- [dbt](https://docs.databend.com/guides/load-data/load-db/dbt)
- [Debezium](https://docs.databend.com/guides/load-data/load-db/debezium)
- [Apache Flink CDC](https://docs.databend.com/guides/load-data/load-db/flink-cdc)
- [DataDog Vector](https://docs.databend.com/guides/load-data/load-db/vector)
- [Addax](https://docs.databend.com/guides/load-data/load-db/addax)
- [DataX](https://docs.databend.com/guides/load-data/load-db/datax)

</details>

<details>
<summary>Visualize Tools with Databend</summary>

- [Metabase](https://docs.databend.com/doc/visualize/metabase)
- [Tableau](https://docs.databend.com/doc/visualize/tableau)
- [Grafana](https://docs.databend.com/doc/visualize/grafana)
- [Jupyter Notebook](https://docs.databend.com/doc/visualize/jupyter)
- [Deepnote](https://docs.databend.com/doc/visualize/deepnote)
- [MindsDB](https://docs.databend.com/doc/visualize/mindsdb)
- [Redash](https://docs.databend.com/doc/visualize/redash)
- [Metabase](https://docs.databend.com/guides/visualize/metabase)
- [Tableau](https://docs.databend.com/guides/visualize/tableau)
- [Grafana](https://docs.databend.com/guides/visualize/grafana)
- [Jupyter Notebook](https://docs.databend.com/guides/visualize/jupyter)
- [Deepnote](https://docs.databend.com/guides/visualize/deepnote)
- [MindsDB](https://docs.databend.com/guides/visualize/mindsdb)
- [Redash](https://docs.databend.com/guides/visualize/redash)

</details>

Expand Down Expand Up @@ -226,8 +226,8 @@ Databend thrives on community contributions! Whether it's through ideas, code, o

Here are some resources to help you get started:

- [Building Databend From Source](https://docs.databend.com/doc/contributing/building-from-source)
- [The First Good Pull Request](https://docs.databend.com/doc/contributing/good-pr)
- [Building Databend From Source](https://docs.databend.com/guides/overview/community/contributor/building-from-source)
- [The First Good Pull Request](https://docs.databend.com/guides/overview/community/contributor/good-pr)


## 👥 Community
Expand All @@ -236,7 +236,7 @@ For guidance on using Databend, we recommend starting with the official document

- [Slack](https://link.databend.rs/join-slack) (For live discussion with the Community)
- [GitHub](https://github.com/datafuselabs/databend) (Feature/Bug reports, Contributions)
- [Twitter](https://twitter.com/DatabendLabs) (Get the news fast)
- [Twitter](https://twitter.com/DatabendLabs/) (Get the news fast)
- [I'm feeling lucky](https://link.databend.rs/i-m-feeling-lucky) (Pick up a good first issue now!)


Expand All @@ -258,7 +258,7 @@ Databend is released under a combination of two licenses: the [Apache License 2.

When contributing to Databend, you can find the relevant license header in each file.

For more information, see the [LICENSE](LICENSE) file and [Licensing FAQs](https://docs.databend.com/doc/enterprise/license).
For more information, see the [LICENSE](LICENSE) file and [Licensing FAQs](https://docs.databend.com/guides/overview/editions/dee/license).


## 🙏 Acknowledgement
Expand Down
2 changes: 1 addition & 1 deletion benchmark/tpch/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,4 +33,4 @@ databend-sqllogictests --handlers mysql --database tpch --run_dir tpch --bench

## More

[Benchmarking Databend using TPC-H](https://databend.rs/blog/2022/08/08/benchmark-tpc-h)
[Benchmarking Databend using TPC-H](https://www.databend.com/blog/2022/08/08/benchmark-tpc-h)
2 changes: 0 additions & 2 deletions src/common/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,11 @@
- [`base`](./base/) contains runtime, pool, allocator and rangemap.
- [`building`](./building/) sets up the environment for building components and internal use.
- [`cache`](./cache/) contains cache traits designed for memory and disk, and provides a basic LRU implementation.
- [`contexts`](./contexts/) is the context of the data access layer.
- [`exception`](./exception/), error handling and backtracking.
- [`grpc`](./grpc/) wraps some of the utility code snippets for grpc.
- [`hashtable`](./hashtable/), a linear probe hashtable, mainly used in scenarios such as `group by` aggregation functions and `join`.
- [`http`](./http/) is a common http handler that includes health check, cpu/memory profile and graceful shutdown.
- [`io`](./io/) focus on binary serialisation and deserialisation.
- [`macros`](./macros/) are some of the procedural macros used with `common_base::base::Runtime`
- [`metrics`](./metrics/) takes over the initialization of the `PrometheusRecorder` and owns the `PrometheusHandle`.
- [`storage`](./storage/) provides storage related types and functions.
- [`tracing`](./tracing/) handles logging and tracing.
6 changes: 6 additions & 0 deletions src/common/exception/src/exception_code.rs
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,11 @@ build_exceptions! {
NetworkPolicyAlreadyExists(2208),
IllegalNetworkPolicy(2209),
NetworkPolicyIsUsedByUser(2210),
UnknownPasswordPolicy(2211),
PasswordPolicyAlreadyExists(2212),
IllegalPasswordPolicy(2213),
PasswordPolicyIsUsedByUser(2214),
InvalidPassword(2215),

// Meta api error codes.
DatabaseAlreadyExists(2301),
Expand Down Expand Up @@ -307,6 +312,7 @@ build_exceptions! {
UnknownShareEndpointId(2716),
UnknownShareTable(2717),
CannotShareDatabaseCreatedFromShare(2718),
ShareStorageError(2719),

// Index error codes.
CreateIndexWithDropTime(2720),
Expand Down
12 changes: 12 additions & 0 deletions src/common/metrics/src/metrics/http.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,20 @@
// limitations under the License.

use std::sync::LazyLock;
use std::time::Duration;

use crate::register_counter;
use crate::register_counter_family;
use crate::register_histogram_family_in_seconds;
use crate::Counter;
use crate::Family;
use crate::Histogram;
use crate::VecLabels;

static QUERY_HTTP_REQUESTS_COUNT: LazyLock<Family<VecLabels, Counter>> =
LazyLock::new(|| register_counter_family("query_http_requests_count"));
static QUERY_HTTP_RESPONSE_DURATION: LazyLock<Family<VecLabels, Histogram>> =
LazyLock::new(|| register_histogram_family_in_seconds("query_http_response_duration_seconds"));
static QUERY_HTTP_SLOW_REQUESTS_COUNT: LazyLock<Family<VecLabels, Counter>> =
LazyLock::new(|| register_counter_family("query_http_slow_requests_count"));
static QUERY_HTTP_RESPONSE_ERRORS_COUNT: LazyLock<Family<VecLabels, Counter>> =
Expand All @@ -46,6 +51,13 @@ pub fn metrics_incr_http_response_errors_count(err: String, code: u16) {
.inc();
}

pub fn metrics_observe_http_response_duration(method: String, api: String, duration: Duration) {
let labels = vec![("method", method), ("api", api)];
QUERY_HTTP_RESPONSE_DURATION
.get_or_create(&labels)
.observe(duration.as_secs_f64());
}

pub fn metrics_incr_http_response_panics_count() {
QUERY_HTTP_RESPONSE_PANICS_COUNT.inc();
}
Loading

0 comments on commit d72796d

Please sign in to comment.