Skip to content

Commit

Permalink
Remove "whitelist" and "blacklist" terminology from repository where …
Browse files Browse the repository at this point in the history
…possible

For context, see:

 - https://tools.ietf.org/html/draft-knodel-terminology-01
 - https://developers.google.com/style/inclusive-documentation#features-and-users

Blocked by configuration of other projects and vendored code:

 - tox-dev/tox#1491
 - jenkinsci/ghprb-plugin#784
 - Generated storage_v1_messages.py (undocumented how we generated this!?)
 - Many golang vendored modules
  • Loading branch information
kennknowles authored and yirutang committed Jul 23, 2020
1 parent 2f896ad commit 9c40c21
Show file tree
Hide file tree
Showing 8 changed files with 14 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -218,11 +218,11 @@ public String create(PipelineOptions options) {
/**
* Specifies whether worker pools should be started with public IP addresses.
*
* <p>WARNING: This feature is experimental. You must be whitelisted to use it.
* <p>WARNING: This feature is experimental. You must be allowlisted to use it.
*/
@Description(
"Specifies whether worker pools should be started with public IP addresses. WARNING:"
+ "This feature is experimental. You must be whitelisted to use it.")
+ "This feature is experimental. You must be allowlisted to use it.")
@Experimental
@JsonIgnore
@Nullable
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ page at http://checkstyle.sourceforge.net/config.html -->
</module>

<!--
IllegalImport cannot blacklist classes, and c.g.api.client.util is used for some shaded
IllegalImport cannot forbid classes, and c.g.api.client.util is used for some shaded
code and some useful code. So we need to fall back to Regexp.
-->
<module name="RegexpSinglelineJava">
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -490,7 +490,7 @@ private void doCheck(String context, TypeDescriptor<?> type, Schema schema) {

private void checkString(String context, TypeDescriptor<?> type) {
// For types that are encoded as strings, we need to make sure they're in an approved
// whitelist. For other types that are annotated @Stringable, Avro will just use the
// list. For other types that are annotated @Stringable, Avro will just use the
// #toString() methods, which has no guarantees of determinism.
if (!DETERMINISTIC_STRINGABLE_CLASSES.contains(type.getRawType())) {
reportError(context, "%s may not have deterministic #toString()", type);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ private Set<SqlOperatorId> getDeclaredOperators() {
Comparator.comparing((SqlOperatorId operator) -> operator.name()),
Comparator.comparing((SqlOperatorId operator) -> operator.kind())));

/** Smoke test that the whitelists and utility functions actually work. */
/** Smoke test that the allowlist and utility functions actually work. */
@Test
@SqlOperatorTest(name = "CARDINALITY", kind = "OTHER_FUNCTION")
public void testAnnotationEquality() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import static com.google.zetasql.ZetaSQLResolvedNodeKind.ResolvedNodeKind.RESOLVED_CREATE_FUNCTION_STMT;
import static com.google.zetasql.ZetaSQLResolvedNodeKind.ResolvedNodeKind.RESOLVED_QUERY_STMT;
import static org.apache.beam.sdk.extensions.sql.zetasql.SqlStdOperatorMappingTable.ZETASQL_BUILTIN_FUNCTION_WHITELIST;
import static org.apache.beam.sdk.extensions.sql.zetasql.SqlStdOperatorMappingTable.ZETASQL_BUILTIN_FUNCTION_ALLOWLIST;
import static org.apache.beam.sdk.extensions.sql.zetasql.ZetaSqlCalciteTranslationUtils.toZetaType;

import com.google.common.collect.ImmutableList;
Expand Down Expand Up @@ -170,7 +170,7 @@ private void addBuiltinFunctionsToCatalog(SimpleCatalog catalog, AnalyzerOptions
ZetaSQLBuiltinFunctionOptions zetasqlBuiltinFunctionOptions =
new ZetaSQLBuiltinFunctionOptions(options.getLanguageOptions());

ZETASQL_BUILTIN_FUNCTION_WHITELIST.forEach(
ZETASQL_BUILTIN_FUNCTION_ALLOWLIST.forEach(
zetasqlBuiltinFunctionOptions::includeFunctionSignatureId);

catalog.addZetaSQLFunctions(zetasqlBuiltinFunctionOptions);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
/** SqlStdOperatorMappingTable. */
@Internal
public class SqlStdOperatorMappingTable {
static final List<FunctionSignatureId> ZETASQL_BUILTIN_FUNCTION_WHITELIST =
static final List<FunctionSignatureId> ZETASQL_BUILTIN_FUNCTION_ALLOWLIST =
ImmutableList.of(
FunctionSignatureId.FN_AND,
FunctionSignatureId.FN_ANY_VALUE,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ num_tokens: 1
# See http://wiki.apache.org/cassandra/HintedHandoff
# May either be "true" or "false" to enable globally
hinted_handoff_enabled: true
# When hinted_handoff_enabled is true, a black list of data centers that will not
# When hinted_handoff_enabled is true, a blocklist of data centers that will not
# perform hinted handoff
#hinted_handoff_disabled_datacenters:
# - DC1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1258,9 +1258,9 @@ public WriteRecords<K, V> withPublishTimestampFunction(
* transform ties checkpointing semantics in compatible Beam runners and transactions in Kafka
* (version 0.11+) to ensure a record is written only once. As the implementation relies on
* runners checkpoint semantics, not all the runners are compatible. The sink throws an
* exception during initialization if the runner is not whitelisted. Flink runner is one of the
* runners whose checkpoint semantics are not compatible with current implementation (hope to
* provide a solution in near future). Dataflow runner and Spark runners are whitelisted as
* exception during initialization if the runner is not explicitly allowed. Flink runner is one
* of the runners whose checkpoint semantics are not compatible with current implementation
* (hope to provide a solution in near future). Dataflow runner and Spark runners are
* compatible.
*
* <p>Note on performance: Exactly-once sink involves two shuffles of the records. In addition
Expand Down Expand Up @@ -1337,9 +1337,9 @@ public void validate(PipelineOptions options) {
}
throw new UnsupportedOperationException(
runner
+ " is not whitelisted among runners compatible with Kafka exactly-once sink. "
+ " is not a runner known to be compatible with Kafka exactly-once sink. "
+ "This implementation of exactly-once sink relies on specific checkpoint guarantees. "
+ "Only the runners with known to have compatible checkpoint semantics are whitelisted.");
+ "Only the runners with known to have compatible checkpoint semantics are allowed.");
}
}

Expand Down

0 comments on commit 9c40c21

Please sign in to comment.