From 26d8088f116fcabb7325fad524828a8cedc70c4f Mon Sep 17 00:00:00 2001 From: Scott Anderson Date: Mon, 27 Nov 2023 14:05:15 -0700 Subject: [PATCH 1/2] chore(docs): fix typos through stdlib documentation --- libflux/go/libflux/buildinfo.gen.go | 26 ++++++++-------- stdlib/contrib/chobbs/discord/discord.flux | 2 +- stdlib/experimental/array/array.flux | 2 +- stdlib/experimental/experimental.flux | 8 ++--- stdlib/experimental/geo/geo.flux | 2 +- .../experimental/http/requests/requests.flux | 2 +- .../influxdata/influxdb/secrets/secrets.flux | 2 +- stdlib/influxdata/influxdb/tasks/tasks.flux | 2 +- stdlib/math/math.flux | 8 ++--- stdlib/pagerduty/pagerduty.flux | 2 +- stdlib/sql/sql.flux | 2 +- stdlib/strings/strings.flux | 2 +- stdlib/types/types.flux | 2 +- stdlib/universe/universe.flux | 30 +++++++++---------- 14 files changed, 46 insertions(+), 46 deletions(-) diff --git a/libflux/go/libflux/buildinfo.gen.go b/libflux/go/libflux/buildinfo.gen.go index 3b69dbe79a..f1890d350a 100644 --- a/libflux/go/libflux/buildinfo.gen.go +++ b/libflux/go/libflux/buildinfo.gen.go @@ -90,7 +90,7 @@ var sourceHashes = map[string]string{ "stdlib/contrib/bonitoo-io/tickscript/tickscript.flux": "ca93b020441699fcf74102a024a9bf31603ee67dd9cf01730b18f199beff5ab4", "stdlib/contrib/bonitoo-io/victorops/victorops.flux": "44d6674a86ceff7995153deca38db142fc0422b5a3526f5d0263a761d222269f", "stdlib/contrib/bonitoo-io/zenoss/zenoss.flux": "8f6de802b3176bf2524018e32ec5dc4eddd66db86abdeec837e9ca65ad7cc863", - "stdlib/contrib/chobbs/discord/discord.flux": "bbda9aaee1966d67d310dc099d00476adda777394a1af279d447e4524e0d5e58", + "stdlib/contrib/chobbs/discord/discord.flux": "339d0d91ab323265fe5cbbf63b7278375992fefe46fbe6c9a302c5b37f36e8ec", "stdlib/contrib/jsternberg/influxdb/influxdb.flux": "afc52f2e31d5e063e318b752d077c58189317c373494563ea0895cdcdea49074", "stdlib/contrib/qxip/clickhouse/clickhouse.flux": "8ad86d9c3c7a4271178d5e2fa9bb850856363cf470d92c3f5010b6de9e770db1", "stdlib/contrib/qxip/hash/hash.flux": "496d0ae212408a44f442bb5093fa92691d3f72927618ee1e841b52646db126c4", @@ -149,7 +149,7 @@ var sourceHashes = map[string]string{ "stdlib/experimental/aggregate/aggregate.flux": "f3c1fef3d40a78897aa99709ff671f64c65e6206f348759906298dc6e48360e8", "stdlib/experimental/aggregate/aggregate_test.flux": "dfebbe6d046de66d167c0a6ea97b30436fb6019f27fd68fd4106089cbe9b31fd", "stdlib/experimental/alignTime_test.flux": "b5cc60a2d67fcf77f0a3c94a90568aa61de8c20364d68d726d21a02b1456edce", - "stdlib/experimental/array/array.flux": "4f1ff39de39f2b40720261586f07c6a63f0fdfa87d1e0a561b9e78524f788243", + "stdlib/experimental/array/array.flux": "9732cf7d76b44f364dfe821ebbdb93a5e4182e8f276d93cf581c032b2b4bf175", "stdlib/experimental/array/array_test.flux": "e8dfb9b563af6e4020253a44cf4337df25fd5047cec644a26cd5f5e999e6aefb", "stdlib/experimental/bigtable/bigtable.flux": "3dff8e5951fb7b3635406ac175ecd0fd9ce877b58e2f92b6c98f5e8976c0f15f", "stdlib/experimental/bitwise/bitwise.flux": "dfe43f81b473dc26212d8615183376a5531630708c9884540d628fa7fe9b01e0", @@ -162,7 +162,7 @@ var sourceHashes = map[string]string{ "stdlib/experimental/distinct_test.flux": "c7358d31972d0931aef6735ea94d901827c13fbaaeb9b02ff255391b5f95ea30", "stdlib/experimental/dynamic/dynamic.flux": "4227d8f2e321ade187aadb283388e1bdec896c5b05d03ae9119eed3aae9dda0b", "stdlib/experimental/dynamic/dynamic_test.flux": "a6d5facd1670434fec0b2349131d23bfd72f52c29a8536ed60d90aee18cc2118", - "stdlib/experimental/experimental.flux": "65b7c015a47f5f5da48d23a64d73bdf8c6e299b0530ab71af236711369104566", + "stdlib/experimental/experimental.flux": "ea03c89a5ffa0f1668e9f4dbb70646ba43386f376611a4e91212f164d3cf317d", "stdlib/experimental/experimental_test.flux": "2763ee3d6f373a11ad4e77c4ecf1bbf32974f912e75043d0b7753112117a06da", "stdlib/experimental/fill_test.flux": "3e31ca59476018a527a33d01d50c492da29f4de095df21c77abdb43c05947baf", "stdlib/experimental/first_test.flux": "3bd1ff03bac6a45a3c525abb5ded3377f08195b6a5094caa42c8fb8b96aa6268", @@ -170,7 +170,7 @@ var sourceHashes = map[string]string{ "stdlib/experimental/geo/filterRowsNotStrict_test.flux": "890919ac33bde919f4d1141932c44d4fd9401c3f30cd24f19e2aa0ef09e418cc", "stdlib/experimental/geo/filterRowsPivoted_test.flux": "25d56c93b61818606810c0ec7279a03deec9712a44a81eeb86957067d7bb2ee0", "stdlib/experimental/geo/filterRowsStrict_test.flux": "397c4ff9001976d268876574160c256563a1d5983101d54f4ec7fb9b91c45c5f", - "stdlib/experimental/geo/geo.flux": "288792632573cc65b3972dbd1f8add370bc95d4eeaccf399fbb0c80f67c234b2", + "stdlib/experimental/geo/geo.flux": "ced8b3dd2a89e7ff837cb9d9c8d589095a9e89622d4afe303a231228ee828a21", "stdlib/experimental/geo/gridFilterLevel_test.flux": "f9d7c9b79a91cfcff7b4cd29a3b511fc397cb8666481a1ac40f0d8f28d94e146", "stdlib/experimental/geo/gridFilter_test.flux": "6de30bc0d5672a18d4d4e0351efabb9d24b783b71945faa6f67302ac4409b225", "stdlib/experimental/geo/groupByArea_test.flux": "cce3a07b36b7b30a51b1f55ff495cc86ef4f673d95928e8b18641ba3a8b2db30", @@ -191,7 +191,7 @@ var sourceHashes = map[string]string{ "stdlib/experimental/histogram_quantile_test.flux": "7c4ad5de961f1c770d691747d07fce87ef972deec684f8795b899f1dd6140f05", "stdlib/experimental/histogram_test.flux": "5243c4729bed4c21e5751be7dbb3593bad7244e41ed12329bdcaca94fb7ea10e", "stdlib/experimental/http/http.flux": "da8f8545cdd41f18dccd041a0bbcf421046095ab020115a2db5fd08862037935", - "stdlib/experimental/http/requests/requests.flux": "579f5d578722a425f350db50dc2697a8bce7bcddc3737d094ae96126b6f96c56", + "stdlib/experimental/http/requests/requests.flux": "abbead056617c6fc3344d63de7de8ca3c6153b4f50e75ec44536f2c0db9179cc", "stdlib/experimental/influxdb/influxdb.flux": "503f68779c9d8a4c307d371cce8071826d0462a00e486190b3dfa681f9b620dc", "stdlib/experimental/integral_test.flux": "4c9b6c866884839027946e93793689b8b3d328e2e777f8ee2f8973ffd72be036", "stdlib/experimental/iox/iox.flux": "43e7b3246ca9ff2f0fd63176faa7bab6f35b0b38a25601c91347039d428abb85", @@ -257,13 +257,13 @@ var sourceHashes = map[string]string{ "stdlib/influxdata/influxdb/schema/influxRawQuery_test.flux": "f8adcc12be9c2a5b43a16fad9b69ce92fa01ef64f33ed38ea0561dd49e225172", "stdlib/influxdata/influxdb/schema/schema.flux": "4d51b7ee24730fbfdc87f6496f5b5747ec34376dd21dbf8072dc8068b2349e9e", "stdlib/influxdata/influxdb/schema/schema_test.flux": "c394122f9c42cd4664631d6ef3d309f9fcb2322960b9bf60b5ed4eacce60c92d", - "stdlib/influxdata/influxdb/secrets/secrets.flux": "9adb9658f7e6bbf6c2a8fa9acb5f447cd9ec4c08094cb041e7203eb9db315588", + "stdlib/influxdata/influxdb/secrets/secrets.flux": "a6120ac859cabf8c897b6bebe904a8c3b44fbe2e0d996640a77b50fa89ba790a", "stdlib/influxdata/influxdb/secrets/secrets_test.flux": "989664f2796f6e530819a217dbb2823c76d756ab65d611380d671f9017ca00b5", "stdlib/influxdata/influxdb/tasks/last_success_duration_no_option_test.flux": "a96e9e5a5d98f9ea3b8dca5ba8f1cd1d8f638e175765301f5bdcf2f4a51365a4", "stdlib/influxdata/influxdb/tasks/last_success_duration_option_test.flux": "0341a21b4fce8a63b039f4d6f89df08aae03fe3e69416e78b61a1f6320339384", "stdlib/influxdata/influxdb/tasks/last_success_with_option_test.flux": "ca89eb78908567e574ac36b30b07dfcb740133544b62a2c11fda719468af3e4f", "stdlib/influxdata/influxdb/tasks/last_success_without_option_test.flux": "5425eeb07d2822f8615ce86ca23af4ae04d85e53c026245197d448e9ab9dbb77", - "stdlib/influxdata/influxdb/tasks/tasks.flux": "925ba21a605f927c4f6d1091ef77bcd6c0aa7bae9bec44dccbc87540bbba9f0a", + "stdlib/influxdata/influxdb/tasks/tasks.flux": "874d856fc8a472b26eb7a962cafccbe0578c6d7b47b57b476ad558451cde1c33", "stdlib/influxdata/influxdb/to_test.flux": "fcf5253c42f7668988e6b951d4ec11c5ba754ee19ba1fcc67ec3e0b3c0d3d579", "stdlib/influxdata/influxdb/v1/v1.flux": "ed99baa0231340cd65d0495030084aa9e92ec9c6cc62b31fb35702ed1ff39864", "stdlib/internal/boolean/boolean.flux": "3d4d27a591d9063bfbc79a9302463e5715b6481748708a98df8a54e53871ab69", @@ -285,9 +285,9 @@ var sourceHashes = map[string]string{ "stdlib/join/large_join_test.flux": "b879c2c91f9415370bc21ff64f9b95171d9b08dceec995b1d92950345782e7d6", "stdlib/json/json.flux": "180dec063b8042db9fafb7a9e633a00451f0343ccf565ed8bf9650bebcd12674", "stdlib/kafka/kafka.flux": "c93e5a56f16d56d69f905e8fd3b02156ccb41742bb513c9d6fd82b26187ab5e8", - "stdlib/math/math.flux": "bffaf6ef29cf82034b5360c03218837a280d0525c94a17b0e842763f65080bf4", + "stdlib/math/math.flux": "7b1ac1bec89caf0f65fe3d69356e9e9fb06ab48041ee29123201fa344570a3bc", "stdlib/math/math_test.flux": "1d84dabb6cb343bc6fce8968f8081a8e5b7f81d457e6c2204f764fbef0f018a4", - "stdlib/pagerduty/pagerduty.flux": "80835277bb9f5e3f3a19d01f187d7fea71d636084032e01972ec6154be404e44", + "stdlib/pagerduty/pagerduty.flux": "bf4551d72ff94128435a5c2fcd5f1ef01fddcf8df4c725ee4ca0ba7553fa51c7", "stdlib/pagerduty/pagerduty_test.flux": "05171f407c0e6126c694d4109fae38a194f70568b5f86138a1f5e55ba1c50621", "stdlib/planner/aggregate_window_max_eval_test.flux": "94500e1317564dc35624c2b336a97ddbd2cf55b3d5e2dc7cf59cc3291ad79aca", "stdlib/planner/aggregate_window_max_push_test.flux": "0f51d1bcf032ac6d6076048b633d1c5a6f460b35970ed69c47f1ed1fdbf0b618", @@ -342,9 +342,9 @@ var sourceHashes = map[string]string{ "stdlib/sampledata/sampledata_test.flux": "70ba091a51d036a8cc25931808a39d81cf5d6dd17c2c7a5a081eb83fcfe5dd20", "stdlib/slack/slack.flux": "395d96eed6a8f8f6b275e8cc2f46f20326b92eff3cbb6a6583f216f146296d55", "stdlib/socket/socket.flux": "89b9c350c5cf8df5056a45623309be681c5472eadb9088ff5ee59a8c50b879b2", - "stdlib/sql/sql.flux": "b1d5ed22a2db2046b98d8fb43b8b84e32a613681943260b8990564f12998c7a6", + "stdlib/sql/sql.flux": "5b8a6176c2775e09abccdbef24cd2db68e60c34d493b451a217100b1e84cac0e", "stdlib/sql/sql_test.flux": "69b61fc1208bb986c14f2e50b004c5bf5004f9cdcfefad0313b5d3981184e1a3", - "stdlib/strings/strings.flux": "15f029bbc9e907e09fdb41507df50208d5ad10c216256da68c4595d818483f2d", + "stdlib/strings/strings.flux": "29d51dce3549cdb3862358bbef2a9e2676257e21572ef066221bdc6ddd06e042", "stdlib/strings/strings_test.flux": "d972d0b63017d808de0c7342e98ea6e4a48e2564236d4a3eef4bc9fd0bb5ef5e", "stdlib/system/system.flux": "ce0b1ed2fa5cbf52345b3eccfd54a7637021d361a1c6c7f204b51fcfe4d9683a", "stdlib/testing/basics/basics_test.flux": "f7ebc70ffe84cda2fa72da66b70b756d1af49cf281a75693758e840013bec229", @@ -427,7 +427,7 @@ var sourceHashes = map[string]string{ "stdlib/testing/usage/writes_test.flux": "ea7c8932381b626dcd543fd9e03e93069e33ff102e49db92df084eee61a359b5", "stdlib/timezone/timezone.flux": "71a3cc48cd1fe3797515fcaca342e2eb3a79545e1cdd302d5dbd3f631204d542", "stdlib/types/is_type_test.flux": "29aba0f8fdd089b09f5abc697290c06d2e498874bec6d02f8dbb2b0a0f5a2223", - "stdlib/types/types.flux": "508ec49b09414ad3fdf49b3ca7a05d89e8b72c4d8794c73c335f27025b07b3c2", + "stdlib/types/types.flux": "a491844668c2119dd8a6767c2d475327e6794402b82fef83094f3d6c8e6572c8", "stdlib/universe/aggregate_empty_window_count_test.flux": "c0cebc3c5c3c6ba5b64f0c396a73270e0d53a4370c8d90302bdf7856c1c774d8", "stdlib/universe/aggregate_empty_window_first_test.flux": "8318f95aaec7a2e07385ed0e713de7056c448aef3daacf8f0408af13609ac096", "stdlib/universe/aggregate_empty_window_last_test.flux": "3082e38b9d5fb6093c434db1f784da6b97100e5ff073a23cfc08dd7df68db9b7", @@ -610,7 +610,7 @@ var sourceHashes = map[string]string{ "stdlib/universe/union_heterogeneous_test.flux": "7d8b47b3e96b859a5fed5985c051e2a3fdc947d3d6ff9cc104e40821581fb0cb", "stdlib/universe/union_test.flux": "f008260d48db70212ce64d3f51f4cf031532a9a67c1ba43242dbc4d43ef31293", "stdlib/universe/unique_test.flux": "c108ab7b0e4b0b77f0a320c8c4dacb8cfbccae8b389425754e9583e69cd64ee3", - "stdlib/universe/universe.flux": "395a29e760746d1bafdc439b47345a8367c01f691a2b6bcb6c6d3f90bc022631", + "stdlib/universe/universe.flux": "f075f65e16502c8f9897ef1762664021abf62201e80c64cd89b854b7e70a0692", "stdlib/universe/universe_truncateTimeColumn_test.flux": "8acb700c612e9eba87c0525b33fd1f0528e6139cc912ed844932caef25d37b56", "stdlib/universe/window_aggregate_test.flux": "c8f66f7ee188bb2e979e5a8b526057b653922197ae441658f7c7f11251c96576", "stdlib/universe/window_default_start_align_test.flux": "0aaf612796fbb5ac421579151ad32a8861f4494a314ea615d0ccedd18067b980", diff --git a/stdlib/contrib/chobbs/discord/discord.flux b/stdlib/contrib/chobbs/discord/discord.flux index 8537cc5854..5c041e6aed 100644 --- a/stdlib/contrib/chobbs/discord/discord.flux +++ b/stdlib/contrib/chobbs/discord/discord.flux @@ -120,7 +120,7 @@ send = ( // ``` // // ## Metadata -// tags: notifcation endpoints, transformations +// tags: notification endpoints, transformations // endpoint = (webhookToken, webhookID, username, avatar_url="") => (mapFn) => diff --git a/stdlib/experimental/array/array.flux b/stdlib/experimental/array/array.flux index 22bf15a18a..1e543cbee4 100644 --- a/stdlib/experimental/array/array.flux +++ b/stdlib/experimental/array/array.flux @@ -58,7 +58,7 @@ from = array.from // concat appends two arrays and returns a new array. // -// **Deprecated**: Experimetnal `array.concat()` is deprecated in favor of +// **Deprecated**: Experimental `array.concat()` is deprecated in favor of // [`array.concat()`](https://docs.influxdata.com/flux/v0.x/stdlib/array/concat). // // ## Parameters diff --git a/stdlib/experimental/experimental.flux b/stdlib/experimental/experimental.flux index cd7da82986..e14eb477cf 100644 --- a/stdlib/experimental/experimental.flux +++ b/stdlib/experimental/experimental.flux @@ -412,7 +412,7 @@ builtin join : (left: stream[A], right: stream[B], fn: (left: A, right: B) => C) // ##### Applicable use cases // - Write to an InfluxDB bucket and query the written data in a single Flux script. // -// _**Note:** `experimental.chain()` does not gaurantee that data written to +// _**Note:** `experimental.chain()` does not guarantee that data written to // InfluxDB is immediately queryable. A delay between when data is written and // when it is queryable may cause a query using `experimental.chain()` to fail. // @@ -938,7 +938,7 @@ builtin spread : (<-tables: stream[{T with _value: A}]) => stream[{T with _value // column for each input table. // // ## Standard deviation modes -// The following modes are avaialable when calculating the standard deviation of data. +// The following modes are available when calculating the standard deviation of data. // // ##### sample // Calculate the sample standard deviation where the data is considered to be @@ -1198,7 +1198,7 @@ builtin min : (<-tables: stream[{T with _value: A}]) => stream[{T with _value: A // - Outputs a single table for each input table. // - Outputs a single record for each unique value in an input table. // - Leaves group keys, columns, and values unmodified. -// - Drops emtpy tables. +// - Drops empty tables. // // ## Parameters // - tables: Input data. Default is piped-forward data (`<-`). @@ -1254,7 +1254,7 @@ builtin unique : (<-tables: stream[{T with _value: A}]) => stream[{T with _value // - tables: Input data. Default is piped-forward data (`<-`). // // ## Examples -// ### Create a histgram from input data +// ### Create a histogram from input data // ``` // import "experimental" // import "sampledata" diff --git a/stdlib/experimental/geo/geo.flux b/stdlib/experimental/geo/geo.flux index f64a9284ed..04e75030eb 100644 --- a/stdlib/experimental/geo/geo.flux +++ b/stdlib/experimental/geo/geo.flux @@ -179,7 +179,7 @@ package geo import "experimental" import "influxdata/influxdb/v1" -// units defines the unit of measurment used in geotemporal operations. +// units defines the unit of measurement used in geotemporal operations. // // ## Metadata // introduced: 0.78.0 diff --git a/stdlib/experimental/http/requests/requests.flux b/stdlib/experimental/http/requests/requests.flux index 166c61a824..87676211fc 100644 --- a/stdlib/experimental/http/requests/requests.flux +++ b/stdlib/experimental/http/requests/requests.flux @@ -2,7 +2,7 @@ // // **Deprecated**: This package is deprecated in favor of [`requests`](https://docs.influxdata.com/flux/v0.x/stdlib/http/requests/). // Do not mix usage of this experimental package with the `requests` package as the `defaultConfig` is not shared between the two packages. -// This experimental package is completely superceded by the `requests` package so there should be no need to mix them. +// This experimental package is completely superseded by the `requests` package so there should be no need to mix them. // // ## Metadata // introduced: 0.152.0 diff --git a/stdlib/influxdata/influxdb/secrets/secrets.flux b/stdlib/influxdata/influxdb/secrets/secrets.flux index 2154d326fc..e8085ed460 100644 --- a/stdlib/influxdata/influxdb/secrets/secrets.flux +++ b/stdlib/influxdata/influxdb/secrets/secrets.flux @@ -14,7 +14,7 @@ package secrets // // ## Examples // -// ### Retrive a key from the InfluxDB secret store +// ### Retrieve a key from the InfluxDB secret store // ```no_run // import "influxdata/influxdb/secrets" // diff --git a/stdlib/influxdata/influxdb/tasks/tasks.flux b/stdlib/influxdata/influxdb/tasks/tasks.flux index 653ced3af9..f57c57885e 100644 --- a/stdlib/influxdata/influxdb/tasks/tasks.flux +++ b/stdlib/influxdata/influxdb/tasks/tasks.flux @@ -25,7 +25,7 @@ builtin _lastSuccess : (orTime: T, lastSuccessTime: time) => time where T: Timea // // ## Examples // -// ### Return the time an InfluxDB task last succesfully ran +// ### Return the time an InfluxDB task last successfully ran // ```no_run // import "influxdata/influxdb/tasks" // diff --git a/stdlib/math/math.flux b/stdlib/math/math.flux index 207e12928d..4eca39e2c0 100644 --- a/stdlib/math/math.flux +++ b/stdlib/math/math.flux @@ -537,7 +537,7 @@ builtin cosh : (x: float) => float // // ## Examples // -// ### Return the maximum difference betwee two values +// ### Return the maximum difference between two values // ```no_run // import "math" // @@ -1191,7 +1191,7 @@ builtin isNaN : (f: float) => bool // builtin j0 : (x: float) => float -// j1 is a funciton that returns the order-one Bessel function for the first kind. +// j1 is a function that returns the order-one Bessel function for the first kind. // // ## Parameters // - x: Value to operate on. @@ -1223,7 +1223,7 @@ builtin j0 : (x: float) => float // builtin j1 : (x: float) => float -// jn returns the order-n Bessel funciton of the first kind. +// jn returns the order-n Bessel function of the first kind. // // ## Parameters // - n: Order number. @@ -1566,7 +1566,7 @@ builtin logb : (x: float) => float // builtin mMax : (x: float, y: float) => float -// mMin is a function taht returns the lessser of `x` or `y`. +// mMin is a function that returns the lesser of `x` or `y`. // // ## Parameters // - x: x-value to use in the operation. diff --git a/stdlib/pagerduty/pagerduty.flux b/stdlib/pagerduty/pagerduty.flux index 153f380b33..73a6a51722 100644 --- a/stdlib/pagerduty/pagerduty.flux +++ b/stdlib/pagerduty/pagerduty.flux @@ -54,7 +54,7 @@ option defaultURL = "https://events.pagerduty.com/v2/enqueue" // // ## Examples // -// ### Convert a status level to a PagerDuty serverity +// ### Convert a status level to a PagerDuty severity // ```no_run // import "pagerduty" // diff --git a/stdlib/sql/sql.flux b/stdlib/sql/sql.flux index f29cb8cab2..b34dc2925a 100644 --- a/stdlib/sql/sql.flux +++ b/stdlib/sql/sql.flux @@ -60,7 +60,7 @@ // authentication credentials using one of the following methods: // // - The `GOOGLE_APPLICATION_CREDENTIALS` environment variable that identifies the -// location of yur credential JSON file. +// location of your credential JSON file. // - Provide your BigQuery credentials using the `credentials` URL parameters in your BigQuery DSN. // // #### BigQuery credential URL parameter diff --git a/stdlib/strings/strings.flux b/stdlib/strings/strings.flux index 84611b6ecd..a6a880cb55 100644 --- a/stdlib/strings/strings.flux +++ b/stdlib/strings/strings.flux @@ -391,7 +391,7 @@ builtin compare : (v: string, t: string) => int // ## Parameters // // - v: String value to search. -// - substr: Substring to count occurences of. +// - substr: Substring to count occurrences of. // // The function counts only non-overlapping instances of `substr`. // diff --git a/stdlib/types/types.flux b/stdlib/types/types.flux index db759d1e26..b8b7957a49 100644 --- a/stdlib/types/types.flux +++ b/stdlib/types/types.flux @@ -130,7 +130,7 @@ builtin isType : (v: A, type: string) => bool where A: Basic // isNumeric tests if a value is a numeric type (int, uint, or float). // // This is a helper function to test or filter for values that can be used in -// arithmatic operations or aggregations. +// arithmetic operations or aggregations. // // ## Parameters // - v: Value to test. diff --git a/stdlib/universe/universe.flux b/stdlib/universe/universe.flux index e88aee0480..3b6a3c3091 100644 --- a/stdlib/universe/universe.flux +++ b/stdlib/universe/universe.flux @@ -700,7 +700,7 @@ builtin first : (<-tables: stream[A], ?column: string) => stream[A] where A: Rec // group regroups input data by modifying group key of input tables. // -// **Note**: Group does not gaurantee sort order. +// **Note**: Group does not guarantee sort order. // To ensure data is sorted correctly, use `sort()` after `group()`. // // ## Parameters @@ -711,7 +711,7 @@ builtin first : (<-tables: stream[A], ?column: string) => stream[A] where A: Rec // // - mode: Grouping mode. Default is `by`. // -// **Avaliable modes**: +// **Available modes**: // - **by**: Group by columns defined in the `columns` parameter. // - **except**: Group by all columns _except_ those in defined in the // `columns` parameter. @@ -1196,7 +1196,7 @@ builtin join : (<-tables: A, ?method: string, ?on: [string]) => stream[B] where // // ## Examples // -// ### Caclulate Kaufman's Adaptive Moving Average for input data +// ### Calculate Kaufman's Adaptive Moving Average for input data // ``` // import "sampledata" // @@ -1215,7 +1215,7 @@ builtin kaufmansAMA : (<-tables: stream[A], n: int, ?column: string) => stream[B // keep returns a stream of tables containing only the specified columns. // -// Columns in the group key that are not specifed in the `columns` parameter or +// Columns in the group key that are not specified in the `columns` parameter or // identified by the `fn` parameter are removed from the group key and dropped // from output tables. `keep()` is the inverse of `drop()`. // @@ -1696,7 +1696,7 @@ builtin limit : (<-tables: stream[A], n: int, ?offset: int) => stream[A] // the group key. // // #### Preserve columns -// `map()` drops any columns that are not mapped explictly by column label or +// `map()` drops any columns that are not mapped explicitly by column label or // implicitly using the `with` operator in the `fn` function. // The `with` operator updates a record property if it already exists, creates // a new record property if it doesn’t exist, and includes all existing @@ -1917,7 +1917,7 @@ builtin movingAverage : ( // - q: Quantile to compute. Must be between `0.0` and `1.0`. // - method: Computation method. Default is `estimate_tdigest`. // -// **Avaialable methods**: +// **Available methods**: // // - **estimate_tdigest**: Aggregate method that uses a // [t-digest data structure](https://github.com/tdunning/t-digest) to @@ -2465,7 +2465,7 @@ builtin skew : (<-tables: stream[A], ?column: string) => stream[B] where A: Reco // builtin spread : (<-tables: stream[A], ?column: string) => stream[B] where A: Record, B: Record -// sort orders rows in each intput table based on values in specified columns. +// sort orders rows in each input table based on values in specified columns. // // #### Output data // One output table is produced for each input table. @@ -2585,7 +2585,7 @@ builtin stateTracking : ( // - mode: Standard deviation mode or type of standard deviation to calculate. // Default is `sample`. // -// **Availble modes:** +// **Available modes:** // // - **sample**: Calculate the sample standard deviation where the data is // considered part of a larger population. @@ -3122,7 +3122,7 @@ builtin findRecord : (<-tables: stream[A], fn: (key: B) => bool, idx: int) => A // ### Convert all values in a column to booleans // If converting the `_value` column to boolean types, use `toBool()`. // If converting columns other than `_value`, use `map()` to iterate over each -// row and `bool()` to covert a column value to a boolean type. +// row and `bool()` to convert a column value to a boolean type. // // ``` // # import "sampledata" @@ -3234,7 +3234,7 @@ builtin duration : (v: A) => duration // ### Convert all values in a column to floats // If converting the `_value` column to float types, use `toFloat()`. // If converting columns other than `_value`, use `map()` to iterate over each -// row and `float()` to covert a column value to a float type. +// row and `float()` to convert a column value to a float type. // // ``` // # import "sampledata" @@ -3294,7 +3294,7 @@ builtin _vectorizedFloat : (v: vector[A]) => vector[float] // ### Convert all values in a column to integers // If converting the `_value` column to integer types, use `toInt()`. // If converting columns other than `_value`, use `map()` to iterate over each -// row and `int()` to covert a column value to a integer type. +// row and `int()` to convert a column value to a integer type. // // ``` // # import "sampledata" @@ -3331,7 +3331,7 @@ builtin int : (v: A) => int // ### Convert all values in a column to strings // If converting the `_value` column to string types, use `toString()`. // If converting columns other than `_value`, use `map()` to iterate over each -// row and `string()` to covert a column value to a string type. +// row and `string()` to convert a column value to a string type. // // ``` // # import "sampledata" @@ -3373,7 +3373,7 @@ builtin string : (v: A) => string // ### Convert all values in a column to time // If converting the `_value` column to time types, use `toTime()`. // If converting columns other than `_value`, use `map()` to iterate over each -// row and `time()` to covert a column value to a time type. +// row and `time()` to convert a column value to a time type. // // ``` // # import "sampledata" @@ -3422,7 +3422,7 @@ builtin time : (v: A) => time // ### Convert all values in a column to unsigned integers // If converting the `_value` column to uint types, use `toUInt()`. // If converting columns other than `_value`, use `map()` to iterate over each -// row and `uint()` to covert a column value to a uint type. +// row and `uint()` to convert a column value to a uint type. // // ``` // # import "sampledata" @@ -3951,7 +3951,7 @@ increase = (tables=<-, columns=["_value"]) => // - column: Column to use to compute the median. Default is `_value`. // - method: Computation method. Default is `estimate_tdigest`. // -// **Avaialable methods**: +// **Available methods**: // // - **estimate_tdigest**: Aggregate method that uses a // [t-digest data structure](https://github.com/tdunning/t-digest) to From 6e780968d1159023e6753c8bf636d5fb49e75390 Mon Sep 17 00:00:00 2001 From: Scott Anderson Date: Mon, 27 Nov 2023 17:44:22 -0700 Subject: [PATCH 2/2] chore(docs): fix broken post examples in http/requests packages --- libflux/go/libflux/buildinfo.gen.go | 4 ++-- stdlib/experimental/http/requests/requests.flux | 4 ++-- stdlib/http/requests/requests.flux | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/libflux/go/libflux/buildinfo.gen.go b/libflux/go/libflux/buildinfo.gen.go index f1890d350a..5fdea4cdf7 100644 --- a/libflux/go/libflux/buildinfo.gen.go +++ b/libflux/go/libflux/buildinfo.gen.go @@ -191,7 +191,7 @@ var sourceHashes = map[string]string{ "stdlib/experimental/histogram_quantile_test.flux": "7c4ad5de961f1c770d691747d07fce87ef972deec684f8795b899f1dd6140f05", "stdlib/experimental/histogram_test.flux": "5243c4729bed4c21e5751be7dbb3593bad7244e41ed12329bdcaca94fb7ea10e", "stdlib/experimental/http/http.flux": "da8f8545cdd41f18dccd041a0bbcf421046095ab020115a2db5fd08862037935", - "stdlib/experimental/http/requests/requests.flux": "abbead056617c6fc3344d63de7de8ca3c6153b4f50e75ec44536f2c0db9179cc", + "stdlib/experimental/http/requests/requests.flux": "abfd2db0374889335ad876eff9bce06732597bfc2a42ca916b12ea1ef4e7bf70", "stdlib/experimental/influxdb/influxdb.flux": "503f68779c9d8a4c307d371cce8071826d0462a00e486190b3dfa681f9b620dc", "stdlib/experimental/integral_test.flux": "4c9b6c866884839027946e93793689b8b3d328e2e777f8ee2f8973ffd72be036", "stdlib/experimental/iox/iox.flux": "43e7b3246ca9ff2f0fd63176faa7bab6f35b0b38a25601c91347039d428abb85", @@ -234,7 +234,7 @@ var sourceHashes = map[string]string{ "stdlib/generate/generate.flux": "2f705b8e44f0b8c64478ad59ccfab09d05c04666b1ab7279629afe1c35c75002", "stdlib/http/http.flux": "b1e02153693002f80134f6bd76108afa11f7a0c4ee041bcfc17c373c1a6c1338", "stdlib/http/http_path_encode_endpoint_test.flux": "cb1e075f9d0f5d2a2d46b6cec6a8e34eb932f3d31b5494f48a3135c9503d4038", - "stdlib/http/requests/requests.flux": "9a343c4783777c7a0ab392e754b59627feeb210c577a7a30f787a4513009a0e3", + "stdlib/http/requests/requests.flux": "f88df79b7918f5ab92860197118070bc05f9bbbd9a7261462cecd367dfeeb704", "stdlib/influxdata/influxdb/influxdb.flux": "101d6ab960d5e1fed6622ccfe8adea1911c317666aa7daa5a2a29a8f96abdd8b", "stdlib/influxdata/influxdb/monitor/check_test.flux": "9f36d19fd834a8230beaba8782a721697b2b0e1b408ee3f948387b0b8d702bfb", "stdlib/influxdata/influxdb/monitor/deadman_add_test.flux": "4bc5d036f028cd9aa078ed653c61be8b43992779ead1ca5bb26e554d428ffe8a", diff --git a/stdlib/experimental/http/requests/requests.flux b/stdlib/experimental/http/requests/requests.flux index 87676211fc..d046d3ae43 100644 --- a/stdlib/experimental/http/requests/requests.flux +++ b/stdlib/experimental/http/requests/requests.flux @@ -181,8 +181,8 @@ do = // // response = // requests.post( -// url: "https://goolnk.com/api/v1/shorten", -// body: json.encode(v: {url: "http://www.influxdata.com"}), +// url: "https://reqres.in/api/users", +// body: json.encode(v: {name: "doc brown", job: "time traveler"}), // headers: ["Content-Type": "application/json"], // ) // diff --git a/stdlib/http/requests/requests.flux b/stdlib/http/requests/requests.flux index dbdf5a2ebc..e0a1c00dcf 100644 --- a/stdlib/http/requests/requests.flux +++ b/stdlib/http/requests/requests.flux @@ -173,8 +173,8 @@ do = ( // // response = // requests.post( -// url: "https://goolnk.com/api/v1/shorten", -// body: json.encode(v: {url: "http://www.influxdata.com"}), +// url: "https://reqres.in/api/users", +// body: json.encode(v: {name: "doc brown", job: "time traveler"}), // headers: ["Content-Type": "application/json"], // ) //