diff --git a/Integrations/python/deephaven/doc/io/deephaven/api/TableOperations.json b/Integrations/python/deephaven/doc/io/deephaven/api/TableOperations.json index 627c39f9638..fc9d178a4c2 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/api/TableOperations.json +++ b/Integrations/python/deephaven/doc/io/deephaven/api/TableOperations.json @@ -1,14 +1,21 @@ { "className": "io.deephaven.api.TableOperations", "methods": { + "absSumBy": "*Overload 1* \n :return: TableOperations.TOPS\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", "aggAllBy": "*Overload 1* \n :param spec: io.deephaven.api.agg.spec.AggSpec\n :return: TableOperations.TOPS\n \n*Overload 2* \n :param spec: io.deephaven.api.agg.spec.AggSpec\n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param spec: io.deephaven.api.agg.spec.AggSpec\n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param spec: io.deephaven.api.agg.spec.AggSpec\n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", "aggBy": "*Overload 1* \n :param aggregation: io.deephaven.api.agg.Aggregation\n :return: TableOperations.TOPS\n \n*Overload 2* \n :param aggregation: io.deephaven.api.agg.Aggregation\n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param aggregation: io.deephaven.api.agg.Aggregation\n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param aggregations: java.util.Collection\n :return: TableOperations.TOPS\n \n*Overload 5* \n :param aggregations: java.util.Collection\n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 6* \n :param aggregations: java.util.Collection\n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", "aj": "**Incompatible overloads text - text from the first overload:**\n\nPerform an as-of join with the rightTable.\n\n \n Delegates to aj(Object, Collection, Collection, AsOfJoinRule).\n\n*Overload 1* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\").\n :return: (TableOperations.TOPS) a new table joined according to the specification in columnsToMatch\n \n*Overload 2* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\").\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the left side that need to be added to the right\n side as a result of the match.\n :return: (TableOperations.TOPS) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 3* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (TableOperations.TOPS) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 4* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :param asOfJoinRule: (io.deephaven.api.AsOfJoinRule) - The binary search operator for the last match pair.\n :return: (TableOperations.TOPS) a new table joined according to the specification in columnsToMatch and columnsToAdd", + "avgBy": "*Overload 1* \n :return: TableOperations.TOPS\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", "countBy": "*Overload 1* \n :param countColumnName: java.lang.String\n :return: TableOperations.TOPS\n \n*Overload 2* \n :param countColumnName: java.lang.String\n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param countColumnName: java.lang.String\n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param countColumnName: java.lang.String\n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", "exactJoin": "**Incompatible overloads text - text from the first overload:**\n\nPerform an exact-join with the rightTable.\n\n \n Delegates to exactJoin(Object, Collection, Collection).\n\n*Overload 1* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :return: (TableOperations.TOPS) the exact-joined table\n \n*Overload 2* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the right side that need to be added to the left\n side as a result of the match.\n :return: (TableOperations.TOPS) the exact-joined table\n \n*Overload 3* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (TableOperations.TOPS) the exact-joined table", + "firstBy": "*Overload 1* \n :return: TableOperations.TOPS\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", "groupBy": "*Overload 1* \n :return: TableOperations.TOPS\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", "head": ":param size: long\n:return: TableOperations.TOPS", "join": "**Incompatible overloads text - text from the first overload:**\n\nPerform a cross join with the rightTable.\n\n \n Delegates to join(Object, Collection, Collection, int).\n\n*Overload 1* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :return: (TableOperations.TOPS) a new table joined according to the specification in columnsToMatch and includes all non-key-columns from\n the right table\n \n*Overload 2* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the right side that need to be added to the left\n side as a result of the match.\n :return: (TableOperations.TOPS) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 3* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (TableOperations.TOPS) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 4* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :param reserveBits: (int) - The number of bits to reserve for rightTable groups.\n :return: (TableOperations.TOPS) a new table joined according to the specification in columnsToMatch and columnsToAdd", + "lastBy": "*Overload 1* \n :return: TableOperations.TOPS\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", + "maxBy": "*Overload 1* \n :return: TableOperations.TOPS\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", + "medianBy": "*Overload 1* \n :return: TableOperations.TOPS\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", + "minBy": "*Overload 1* \n :return: TableOperations.TOPS\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", "naturalJoin": "**Incompatible overloads text - text from the first overload:**\n\nPerform a natural-join with the rightTable.\n\n \n Delegates to naturalJoin(Object, Collection, Collection).\n\n*Overload 1* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :return: (TableOperations.TOPS) the natural-joined table\n \n*Overload 2* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the right side that need to be added to the left\n side as a result of the match.\n :return: (TableOperations.TOPS) the natural-joined table\n \n*Overload 3* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (TableOperations.TOPS) the natural-joined table", "raj": "**Incompatible overloads text - text from the first overload:**\n\nPerform a reverse-as-of join with the rightTable.\n\n \n Delegates to raj(Object, Collection, Collection, ReverseAsOfJoinRule).\n\n*Overload 1* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\").\n :return: (TableOperations.TOPS) a new table joined according to the specification in columnsToMatch\n \n*Overload 2* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\").\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the left side that need to be added to the right\n side as a result of the match.\n :return: (TableOperations.TOPS) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 3* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (TableOperations.TOPS) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 4* \n :param rightTable: (TableOperations.TABLE) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :param reverseAsOfJoinRule: (io.deephaven.api.ReverseAsOfJoinRule) - The binary search operator for the last match pair.\n :return: (TableOperations.TOPS) a new table joined according to the specification in columnsToMatch and columnsToAdd", "reverse": ":return: TableOperations.TOPS", @@ -17,13 +24,18 @@ "snapshot": "**Incompatible overloads text - text from the first overload:**\n\nSnapshot baseTable, triggered by this table, and return a new table as a result.\n\n \n Delegates to snapshot(Object, boolean, Collection).\n\n*Overload 1* \n :param baseTable: (TableOperations.TABLE) - The table to be snapshotted\n :param stampColumns: (java.lang.String...) - The columns forming the \"snapshot key\", i.e. some subset of this Table's columns to be\n included in the result at snapshot time. As a special case, an empty stampColumns is taken to mean\n \"include all columns\".\n :return: (TableOperations.TOPS) The result table\n \n*Overload 2* \n :param baseTable: (TableOperations.TABLE) - The table to be snapshotted\n :param doInitialSnapshot: (boolean) - Take the first snapshot now (otherwise wait for a change event)\n :param stampColumns: (java.lang.String...) - The columns forming the \"snapshot key\", i.e. some subset of this Table's columns to be\n included in the result at snapshot time. As a special case, an empty stampColumns is taken to mean\n \"include all columns\".\n :return: (TableOperations.TOPS) The result table\n \n*Overload 3* \n :param baseTable: (TableOperations.TABLE) - The table to be snapshotted\n :param doInitialSnapshot: (boolean) - Take the first snapshot now (otherwise wait for a change event)\n :param stampColumns: (java.util.Collection) - The columns forming the \"snapshot key\", i.e. some subset of this Table's columns to be\n included in the result at snapshot time. As a special case, an empty stampColumns is taken to mean\n \"include all columns\".\n :return: (TableOperations.TOPS) The result table", "sort": "*Overload 1* \n :param columnsToSortBy: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 2* \n :param columnsToSortBy: java.util.Collection\n :return: TableOperations.TOPS", "sortDescending": ":param columnsToSortBy: java.lang.String...\n:return: TableOperations.TOPS", + "stdBy": "*Overload 1* \n :return: TableOperations.TOPS\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", + "sumBy": "*Overload 1* \n :return: TableOperations.TOPS\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", "tail": ":param size: long\n:return: TableOperations.TOPS", "update": "*Overload 1* \n :param columns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 2* \n :param columns: java.util.Collection\n :return: TableOperations.TOPS", "updateView": "*Overload 1* \n :param columns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 2* \n :param columns: java.util.Collection\n :return: TableOperations.TOPS", + "varBy": "*Overload 1* \n :return: TableOperations.TOPS\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", "view": "*Overload 1* \n :param columns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 2* \n :param columns: java.util.Collection\n :return: TableOperations.TOPS", + "wavgBy": "*Overload 1* \n :param weightColumn: java.lang.String\n :return: TableOperations.TOPS\n \n*Overload 2* \n :param weightColumn: java.lang.String\n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param weightColumn: java.lang.String\n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param weightColumn: java.lang.String\n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS", "where": "*Overload 1* \n :param filters: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 2* \n :param filters: java.util.Collection\n :return: TableOperations.TOPS", "whereIn": "**Incompatible overloads text - text from the first overload:**\n\nFilters this table based on the set of values in the rightTable.\n\n \n Delegates to whereIn(Object, Collection).\n\n*Overload 1* \n :param rightTable: (TableOperations.TABLE) - the filtering table.\n :param columnsToMatch: (java.lang.String...) - the columns to match between the two tables\n :return: (TableOperations.TOPS) a new table filtered on right table\n \n*Overload 2* \n :param rightTable: (TableOperations.TABLE) - the filtering table.\n :param columnsToMatch: (java.util.Collection) - the columns to match between the two tables\n :return: (TableOperations.TOPS) a new table filtered on right table", - "whereNotIn": "**Incompatible overloads text - text from the first overload:**\n\nFilters this table based on the set of values not in the rightTable.\n\n \n Delegates to whereNotIn(Object, Collection).\n\n*Overload 1* \n :param rightTable: (TableOperations.TABLE) - the filtering table.\n :param columnsToMatch: (java.lang.String...) - the columns to match between the two tables\n :return: (TableOperations.TOPS) a new table filtered on right table\n \n*Overload 2* \n :param rightTable: (TableOperations.TABLE) - the filtering table.\n :param columnsToMatch: (java.util.Collection) - the columns to match between the two tables\n :return: (TableOperations.TOPS) a new table filtered on right table" + "whereNotIn": "**Incompatible overloads text - text from the first overload:**\n\nFilters this table based on the set of values not in the rightTable.\n\n \n Delegates to whereNotIn(Object, Collection).\n\n*Overload 1* \n :param rightTable: (TableOperations.TABLE) - the filtering table.\n :param columnsToMatch: (java.lang.String...) - the columns to match between the two tables\n :return: (TableOperations.TOPS) a new table filtered on right table\n \n*Overload 2* \n :param rightTable: (TableOperations.TABLE) - the filtering table.\n :param columnsToMatch: (java.util.Collection) - the columns to match between the two tables\n :return: (TableOperations.TOPS) a new table filtered on right table", + "wsumBy": "*Overload 1* \n :param weightColumn: java.lang.String\n :return: TableOperations.TOPS\n \n*Overload 2* \n :param weightColumn: java.lang.String\n :param groupByColumns: java.lang.String...\n :return: TableOperations.TOPS\n \n*Overload 3* \n :param weightColumn: java.lang.String\n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperations.TOPS\n \n*Overload 4* \n :param weightColumn: java.lang.String\n :param groupByColumns: java.util.Collection\n :return: TableOperations.TOPS" }, "path": "io.deephaven.api.TableOperations", "text": "Table operations is a user-accessible api for modifying tables or building up table operations.", diff --git a/Integrations/python/deephaven/doc/io/deephaven/api/TableOperationsAdapter.json b/Integrations/python/deephaven/doc/io/deephaven/api/TableOperationsAdapter.json index e7db73bec9d..6eaf5184297 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/api/TableOperationsAdapter.json +++ b/Integrations/python/deephaven/doc/io/deephaven/api/TableOperationsAdapter.json @@ -1,15 +1,22 @@ { "className": "io.deephaven.api.TableOperationsAdapter", "methods": { + "absSumBy": "*Overload 1* \n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "aggAllBy": "*Overload 1* \n :param spec: io.deephaven.api.agg.spec.AggSpec\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param spec: io.deephaven.api.agg.spec.AggSpec\n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param spec: io.deephaven.api.agg.spec.AggSpec\n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param spec: io.deephaven.api.agg.spec.AggSpec\n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "aggBy": "*Overload 1* \n :param aggregation: io.deephaven.api.agg.Aggregation\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param aggregation: io.deephaven.api.agg.Aggregation\n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param aggregation: io.deephaven.api.agg.Aggregation\n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param aggregations: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 5* \n :param aggregations: java.util.Collection\n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 6* \n :param aggregations: java.util.Collection\n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "aj": "**Incompatible overloads text - text from the first overload:**\n\nPerform an as-of join with the rightTable.\n\n \n Delegates to TableOperations.aj(Object, Collection, Collection, AsOfJoinRule).\n\n*Overload 1* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\").\n :return: (TableOperationsAdapter.TOPS_1) a new table joined according to the specification in columnsToMatch\n \n*Overload 2* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\").\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the left side that need to be added to the right\n side as a result of the match.\n :return: (TableOperationsAdapter.TOPS_1) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 3* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (TableOperationsAdapter.TOPS_1) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 4* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :param asOfJoinRule: (io.deephaven.api.AsOfJoinRule) - The binary search operator for the last match pair.\n :return: (TableOperationsAdapter.TOPS_1) a new table joined according to the specification in columnsToMatch and columnsToAdd", + "avgBy": "*Overload 1* \n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "countBy": "*Overload 1* \n :param countColumnName: java.lang.String\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param countColumnName: java.lang.String\n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param countColumnName: java.lang.String\n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param countColumnName: java.lang.String\n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "delegate": ":return: TableOperationsAdapter.TOPS_2", "exactJoin": "**Incompatible overloads text - text from the first overload:**\n\nPerform an exact-join with the rightTable.\n\n \n Delegates to TableOperations.exactJoin(Object, Collection, Collection).\n\n*Overload 1* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :return: (TableOperationsAdapter.TOPS_1) the exact-joined table\n \n*Overload 2* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the right side that need to be added to the left\n side as a result of the match.\n :return: (TableOperationsAdapter.TOPS_1) the exact-joined table\n \n*Overload 3* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (TableOperationsAdapter.TOPS_1) the exact-joined table", + "firstBy": "*Overload 1* \n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "groupBy": "*Overload 1* \n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "head": ":param size: long\n:return: TableOperationsAdapter.TOPS_1", "join": "**Incompatible overloads text - text from the first overload:**\n\nPerform a cross join with the rightTable.\n\n \n Delegates to TableOperations.join(Object, Collection, Collection, int).\n\n*Overload 1* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :return: (TableOperationsAdapter.TOPS_1) a new table joined according to the specification in columnsToMatch and includes all non-key-columns from\n the right table\n \n*Overload 2* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the right side that need to be added to the left\n side as a result of the match.\n :return: (TableOperationsAdapter.TOPS_1) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 3* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (TableOperationsAdapter.TOPS_1) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 4* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :param reserveBits: (int) - The number of bits to reserve for rightTable groups.\n :return: (TableOperationsAdapter.TOPS_1) a new table joined according to the specification in columnsToMatch and columnsToAdd", + "lastBy": "*Overload 1* \n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", + "maxBy": "*Overload 1* \n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", + "medianBy": "*Overload 1* \n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", + "minBy": "*Overload 1* \n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "naturalJoin": "**Incompatible overloads text - text from the first overload:**\n\nPerform a natural-join with the rightTable.\n\n \n Delegates to TableOperations.naturalJoin(Object, Collection, Collection).\n\n*Overload 1* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :return: (TableOperationsAdapter.TOPS_1) the natural-joined table\n \n*Overload 2* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the right side that need to be added to the left\n side as a result of the match.\n :return: (TableOperationsAdapter.TOPS_1) the natural-joined table\n \n*Overload 3* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (TableOperationsAdapter.TOPS_1) the natural-joined table", "raj": "**Incompatible overloads text - text from the first overload:**\n\nPerform a reverse-as-of join with the rightTable.\n\n \n Delegates to TableOperations.raj(Object, Collection, Collection, ReverseAsOfJoinRule).\n\n*Overload 1* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\").\n :return: (TableOperationsAdapter.TOPS_1) a new table joined according to the specification in columnsToMatch\n \n*Overload 2* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\").\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the left side that need to be added to the right\n side as a result of the match.\n :return: (TableOperationsAdapter.TOPS_1) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 3* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (TableOperationsAdapter.TOPS_1) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 4* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :param reverseAsOfJoinRule: (io.deephaven.api.ReverseAsOfJoinRule) - The binary search operator for the last match pair.\n :return: (TableOperationsAdapter.TOPS_1) a new table joined according to the specification in columnsToMatch and columnsToAdd", "reverse": ":return: TableOperationsAdapter.TOPS_1", @@ -18,13 +25,18 @@ "snapshot": "**Incompatible overloads text - text from the first overload:**\n\nSnapshot baseTable, triggered by this table, and return a new table as a result.\n\n \n Delegates to TableOperations.snapshot(Object, boolean, Collection).\n\n*Overload 1* \n :param baseTable: (TableOperationsAdapter.TABLE_1) - The table to be snapshotted\n :param stampColumns: (java.lang.String...) - The columns forming the \"snapshot key\", i.e. some subset of this Table's columns to be\n included in the result at snapshot time. As a special case, an empty stampColumns is taken to mean\n \"include all columns\".\n :return: (TableOperationsAdapter.TOPS_1) The result table\n \n*Overload 2* \n :param baseTable: (TableOperationsAdapter.TABLE_1) - The table to be snapshotted\n :param doInitialSnapshot: (boolean) - Take the first snapshot now (otherwise wait for a change event)\n :param stampColumns: (java.lang.String...) - The columns forming the \"snapshot key\", i.e. some subset of this Table's columns to be\n included in the result at snapshot time. As a special case, an empty stampColumns is taken to mean\n \"include all columns\".\n :return: (TableOperationsAdapter.TOPS_1) The result table\n \n*Overload 3* \n :param baseTable: (TableOperationsAdapter.TABLE_1) - The table to be snapshotted\n :param doInitialSnapshot: (boolean) - Take the first snapshot now (otherwise wait for a change event)\n :param stampColumns: (java.util.Collection) - The columns forming the \"snapshot key\", i.e. some subset of this Table's columns to be\n included in the result at snapshot time. As a special case, an empty stampColumns is taken to mean\n \"include all columns\".\n :return: (TableOperationsAdapter.TOPS_1) The result table", "sort": "*Overload 1* \n :param columnsToSortBy: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param columnsToSortBy: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "sortDescending": ":param columnsToSortBy: java.lang.String...\n:return: TableOperationsAdapter.TOPS_1", + "stdBy": "*Overload 1* \n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", + "sumBy": "*Overload 1* \n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "tail": ":param size: long\n:return: TableOperationsAdapter.TOPS_1", "update": "*Overload 1* \n :param columns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param columns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "updateView": "*Overload 1* \n :param columns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param columns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", + "varBy": "*Overload 1* \n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "view": "*Overload 1* \n :param columns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param columns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", + "wavgBy": "*Overload 1* \n :param weightColumn: java.lang.String\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param weightColumn: java.lang.String\n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param weightColumn: java.lang.String\n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param weightColumn: java.lang.String\n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "where": "*Overload 1* \n :param filters: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param filters: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1", "whereIn": "**Incompatible overloads text - text from the first overload:**\n\nFilters this table based on the set of values in the rightTable.\n\n \n Delegates to TableOperations.whereIn(Object, Collection).\n\n*Overload 1* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - the filtering table.\n :param columnsToMatch: (java.lang.String...) - the columns to match between the two tables\n :return: (TableOperationsAdapter.TOPS_1) a new table filtered on right table\n \n*Overload 2* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - the filtering table.\n :param columnsToMatch: (java.util.Collection) - the columns to match between the two tables\n :return: (TableOperationsAdapter.TOPS_1) a new table filtered on right table", - "whereNotIn": "**Incompatible overloads text - text from the first overload:**\n\nFilters this table based on the set of values not in the rightTable.\n\n \n Delegates to TableOperations.whereNotIn(Object, Collection).\n\n*Overload 1* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - the filtering table.\n :param columnsToMatch: (java.lang.String...) - the columns to match between the two tables\n :return: (TableOperationsAdapter.TOPS_1) a new table filtered on right table\n \n*Overload 2* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - the filtering table.\n :param columnsToMatch: (java.util.Collection) - the columns to match between the two tables\n :return: (TableOperationsAdapter.TOPS_1) a new table filtered on right table" + "whereNotIn": "**Incompatible overloads text - text from the first overload:**\n\nFilters this table based on the set of values not in the rightTable.\n\n \n Delegates to TableOperations.whereNotIn(Object, Collection).\n\n*Overload 1* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - the filtering table.\n :param columnsToMatch: (java.lang.String...) - the columns to match between the two tables\n :return: (TableOperationsAdapter.TOPS_1) a new table filtered on right table\n \n*Overload 2* \n :param rightTable: (TableOperationsAdapter.TABLE_1) - the filtering table.\n :param columnsToMatch: (java.util.Collection) - the columns to match between the two tables\n :return: (TableOperationsAdapter.TOPS_1) a new table filtered on right table", + "wsumBy": "*Overload 1* \n :param weightColumn: java.lang.String\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 2* \n :param weightColumn: java.lang.String\n :param groupByColumns: java.lang.String...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 3* \n :param weightColumn: java.lang.String\n :param groupByColumns: io.deephaven.api.Selectable...\n :return: TableOperationsAdapter.TOPS_1\n \n*Overload 4* \n :param weightColumn: java.lang.String\n :param groupByColumns: java.util.Collection\n :return: TableOperationsAdapter.TOPS_1" }, "path": "io.deephaven.api.TableOperationsAdapter", "typeName": "class" diff --git a/Integrations/python/deephaven/doc/io/deephaven/appmode/ApplicationContext.json b/Integrations/python/deephaven/doc/io/deephaven/appmode/ApplicationContext.json deleted file mode 100644 index e486c74e896..00000000000 --- a/Integrations/python/deephaven/doc/io/deephaven/appmode/ApplicationContext.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "className": "io.deephaven.appmode.ApplicationContext", - "methods": { - "get": ":return: io.deephaven.appmode.ApplicationState", - "initialize": ":param initializer: java.util.function.Consumer", - "runUnderContext": ":param context: io.deephaven.appmode.ApplicationState\n:param runner: java.lang.Runnable" - }, - "path": "io.deephaven.appmode.ApplicationContext", - "text": "This application context can be used to get access to the application state from within script applications.\n \nget() is only valid during the initial invocation of a script application during start up of\n Application Mode. Scripts may dynamically add fields after start up by capturing and using the ApplicationState after\n the script after having returned execution control to the begin the server process.\n \n Each application owns and manages a unique ApplicationState.", - "typeName": "class" -} \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/rowset/impl/rsp/RspArray.json b/Integrations/python/deephaven/doc/io/deephaven/engine/rowset/impl/rsp/RspArray.json index 2b0115864c9..93a4cb71d88 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/rowset/impl/rsp/RspArray.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/rowset/impl/rsp/RspArray.json @@ -80,6 +80,6 @@ "valuesToString": ":return: java.lang.String" }, "path": "io.deephaven.engine.rowset.impl.rsp.RspArray", - "text": "A set representation for long values using Regular Space Partitioning (RSP) of the long space in \"blocks\" of (2^16)\n elements.\n \n\n Modeled heavily after roaringbitmap.RoaringArray (keeping API method names and semantics as much as possible), with\n modifications for:\n \n\n* Full \"unsigned long\" 64 bit range (as opposed to 32 bit in RoaringArray).\n* Spans of all bits set (\"AllSet\") that can be arbitrarily big (ie, not constrained to 2^16 = RB Container\n size).\n\n\n The handling of unsigned values follows RB; ie, key values are compared/sorted as unsigned longs.\n \n\n Definitions:\n \n\n* A \"block\" is a particular interval [n*2^16, (n+1)*2^16 - 1] of the long domain.\n* A \"span\" is a partition of the domain consisting of one or more consecutive blocks; a span is a subset of\n the domain represented by an interval [n*2^16, (n+m)*2^16 - 1], m >= 1.\n * Full blocks are blocks whose domain are fully contained in the set, ie, the set contains every possible value in\n the block's interval (as a bitmap, it would be \"all ones\").\n* Spans of full blocks are represented by a single \"full blocks span\" object (just a Long) which knows how many\n 2^16 ranges it has (it's \"full blocks span len\" (\"flen\") is the number of full blocks in the span).\n* Individual blocks that are not completely full are stored in an RB Container; their \"full blocks span len\" is\n zero.\n\n\n Our internal representation uses two parallel arrays:\n \n\n* a long[] spanInfos array that contains the information for the offset to the values in the span,\n which we call the span's \"key\". For instance, a full block span that represents all the long values in [65536,\n 196607] has as its key the value 65536.\n* an Object[] spans array that contains the actual spans. At the most basic level, a span can be\n either a full block span or a container of values (but there is nuance in exactly how to represent them, see\n below).\n\n\n We use several optimizations to reduce memory utilization for sparse sets. Details follow.\n \n\n The long[] spanInfos and Object[] spans data members of this class are used, combined, to\n represent the offset (key) and span values in the set, against that offset. The two arrays are used, together, as\n parallel arrays and the information for a given conceptual span is contained in both of them for the same\n corresponding rowSet i.\n \n\n There are two basic cases for a span: it is either a full blocks span, containing a >=1 number of full blocks, or it\n is a container, containing individual values in the particular 2^16 block corresponding to the span's key.\n \n\n There are four ways total that these two cases can be represented between the long in the `spanInfos` array and the\n Object in the `spans` array. Given a span at position `i`:\n \n\n* If the corresponding Object spans[i] is of type Long, then the long spanInfos[i] value\n is the key for the span (with its lower 16 bits as zero), and the Long value represents how many full blocks are\n present. Example, the set [ 0, 2^50 - 1 ] is represented as spanInfo==0 and span==Long(2^34).\n* As an optimization to conserve memory, if the Object spans[i] is the Object reference with value\n FULL_BLOCK_SPAN_MARKER (a singleton and final marker Object defined statically in this file). then the\n upper 48 bits of the long spanInfo[i] value represent the key for the span, and the lower 16 bits of the\n long spanInfo[i] value represent the full block span length. Example, the set [ 65536, 196607 ] is\n represented by spanInfo==65538 and span==FULL_BLOCK_SPAN_MARKER (note\n 196607 == 65536*3 - 1, so the set is 2 full blocks, and 65538 == 65536 | 2.\n* If the corresponding Object spans[i] is null, then the long spanInfos[i] represents the\n single value present in the span (note in this case, its upper 16 bits still corresponds to its key). Example, the\n set { 65537 } is represented by spanInfo==65537 and span==null.\n* If the corresponding Object spans[i] is of type short[] or of type\n Container, then it represents a container of multiple values in a single block (but not all of the\n possible values in the block, since in that case it would be a full block span as above). In this case the higher 48\n bits of its corresponding spanInfo represent the key for the span. Depending on the actual type of span there are two\n subcases:\n\n \n* If spans[i] is of type Container, then the values in the roaringbitmaps container\n object are part of the set, considered against its key offset. The key is represented in the higher 48 bits of its\n corresponding spaninfo. The lower 16 bits of spanInfo are zero in this case. Example, the set [ 100,000-100,010,\n 100,020-100,030 ] is represented by spaInfo==65536,\n span==RunContainer({34464-34474, 34484-34494})\n* If spans[i] is of type short[], then an ArrayContainer with the\n short[] contents needs to be reconstructed. The lower 16 bits of the spanInfo value are used to\n represent the other data members of ArrayContainer. This case exists as an optimization to reduce memory utilization\n for sparse blocks. For details of this reconstruction please see the code for the definition of the SpanView class\n below.\n\n\n\n\n Notes:\n \n\n* Our version of RB Container supports a \"shared\" boolean flag that is used to implement copy-on-write (COW)\n semantics and allow operation results to share containers in COW fashion.\n* We extended the Container class hierarchy to include specializations for empty, single value, single range, and\n two values containers. These are immutable; empty is used only as a way to return empty results, and are never actual\n stored in the spans array. For details, please see the Container class definition and derived class hierarchy.", + "text": "A set representation for long values using Regular Space Partitioning (RSP) of the long space in \"blocks\" of (2^16)\n elements.\n \n\n Modeled heavily after roaringbitmap.RoaringArray (keeping API method names and semantics as much as possible), with\n modifications for:\n \n\n* Full \"unsigned long\" 64 bit range (as opposed to 32 bit in RoaringArray).\n* Spans of all bits set (\"AllSet\") that can be arbitrarily big (ie, not constrained to 2^16 = RB Container\n size).\n\n\n The handling of unsigned values follows RB; ie, key values are compared/sorted as unsigned longs.\n \n\n Definitions:\n \n\n* A \"block\" is a particular interval [n*2^16, (n+1)*2^16 - 1] of the long domain.\n* A \"span\" is a partition of the domain consisting of one or more consecutive blocks; a span is a subset of\n the domain represented by an interval [n*2^16, (n+m)*2^16 - 1], m >= 1.\n * Full blocks are blocks whose domain are fully contained in the set, ie, the set contains every possible value in\n the block's interval (as a bitmap, it would be \"all ones\").\n* Spans of full blocks are represented by a single \"full blocks span\" object (just a Long) which knows how many\n 2^16 ranges it has (it's \"full blocks span len\" (\"flen\") is the number of full blocks in the span).\n* Individual blocks that are not completely full are stored in an RB Container; their \"full blocks span len\" is\n zero.\n\n\n Our internal representation uses two parallel arrays:\n \n\n* a long[] spanInfos array that contains the information for the offset to the values in the span,\n which we call the span's \"key\". For instance, a full block span that represents all the long values in [65536,\n 196607] has as its key the value 65536.\n* an Object[] spans array that contains the actual spans. At the most basic level, a span can be\n either a full block span or a container of values (but there is nuance in exactly how to represent them, see\n below).\n\n\n We use several optimizations to reduce memory utilization for sparse sets. Details follow.\n \n\n The long[] spanInfos and Object[] spans data members of this class are used, combined, to\n represent the offset (key) and span values in the set, against that offset. The two arrays are used, together, as\n parallel arrays and the information for a given conceptual span is contained in both of them for the same\n corresponding rowSet i.\n \n\n There are two basic cases for a span: it is either a full blocks span, containing a >=1 number of full blocks, or it\n is a container, containing individual values in the particular 2^16 block corresponding to the span's key.\n \n\n There are four ways total that these two cases can be represented between the long in the `spanInfos` array and the\n Object in the `spans` array. Given a span at position `i`:\n \n\n* If the corresponding Object spans[i] is of type Long, then the long spanInfos[i] value\n is the key for the span (with its lower 16 bits as zero), and the Long value represents how many full blocks are\n present. Example, the set [ 0, 2^50 - 1 ] is represented as spanInfo==0 and span==Long(2^34).\n* As an optimization to conserve memory, if the Object spans[i] is the Object reference with value\n FULL_BLOCK_SPAN_MARKER (a singleton and final marker Object defined statically in this file). then the\n upper 48 bits of the long spanInfo[i] value represent the key for the span, and the lower 16 bits of the\n long spanInfo[i] value represent the full block span length. Example, the set [ 65536, 196607 ] is\n represented by spanInfo==65538 and span==FULL_BLOCK_SPAN_MARKER (note\n 196607 == 65536*3 - 1, so the set is 2 full blocks, and 65538 == 65536 | 2.\n* If the corresponding Object spans[i] is null, then the long spanInfos[i] represents the\n single value present in the span (note in this case, its upper 16 bits still corresponds to its key). Example, the\n set { 65537 } is represented by spanInfo==65537 and span==null.\n* If the corresponding Object spans[i] is of type short[] or of type\n Container, then it represents a container of multiple values in a single block (but not all of the\n possible values in the block, since in that case it would be a full block span as above). In this case the higher 48\n bits of its corresponding spanInfo represent the key for the span. Depending on the actual type of span there are two\n subcases:\n\n \n* If spans[i] is of type Container, then the values in the roaringbitmaps container\n object are part of the set, considered against its key offset. The key is represented in the higher 48 bits of its\n corresponding spaninfo. The lower 16 bits of spanInfo are zero in this case. Example, the set [ 100,000-100,010,\n 100,020-100,030 ] is represented by spaInfo==65536,\n span==RunContainer({34464-34474, 34484-34494})\n* If spans[i] is of type short[], then an ArrayContainer with the\n short[] contents needs to be reconstructed. The lower 16 bits of the spanInfo value are used to\n represent the other data members of ArrayContainer. This case exists as an optimization to reduce memory utilization\n for sparse blocks. For details of this reconstruction please see the code for the definition of the SpanView class\n below.\n\n\n\n\n Notes:\n \n\n* Our version of RB Container supports a \"shared\" boolean flag that is used to implement copy-on-write (COW)\n semantics and allow operation results to share containers in COW fashion.\n* We extended the Container class hierarchy to include specializations for empty, single value, single range, and\n two values containers. These are immutable; empty and singleton are used only as a way to return empty and singleton\n results, and are never actually stored in the spans array. For details, please see the Container class definition and\n derived class hierarchy.", "typeName": "class" } \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/TableWithDefaults/AggHolder.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/TableWithDefaults/AggHolder.json deleted file mode 100644 index 666d88c7aa9..00000000000 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/TableWithDefaults/AggHolder.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "className": "io.deephaven.engine.table.impl.TableWithDefaults$AggHolder", - "methods": {}, - "path": "io.deephaven.engine.table.impl.TableWithDefaults.AggHolder", - "typeName": "class" -} \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/AggregationContextFactory.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/AggregationContextFactory.json index 66542e9d9fa..f6455c10772 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/AggregationContextFactory.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/AggregationContextFactory.json @@ -1,7 +1,6 @@ { "className": "io.deephaven.engine.table.impl.by.AggregationContextFactory", "methods": { - "allowKeyOnlySubstitution": "Should we allow substitution with a KeyOnlyAggregationFactory (e.g. selectDistinct) when there are only\n key columns? Instances whose operators could have side effects or are already KeyOnlyAggregationFactory\n should return false.\n\n:return: (boolean) Whether to allow a KeyOnlyAggregationFactory to be substituted for this when there are only key\n columns", "makeAggregationContext": "Make an AggregationContext for this aggregation.\n\n:param table: (io.deephaven.engine.table.Table) - The source Table to aggregate\n:param groupByColumns: (java.lang.String...) - The key column names\n:return: (io.deephaven.engine.table.impl.by.AggregationContext) A new or safely reusable AggregationContext" }, "path": "io.deephaven.engine.table.impl.by.AggregationContextFactory", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/FirstOrLastByAggregationFactory.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/FirstOrLastByAggregationFactory.json index 42146d1a132..5aef2a96f18 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/FirstOrLastByAggregationFactory.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/FirstOrLastByAggregationFactory.json @@ -1,7 +1,6 @@ { "className": "io.deephaven.engine.table.impl.by.FirstOrLastByAggregationFactory", "methods": { - "allowKeyOnlySubstitution": "Should we allow substitution with a KeyOnlyAggregationFactory (e.g. selectDistinct) when there are only\n key columns? Instances whose operators could have side effects or are already KeyOnlyAggregationFactory\n should return false.\n\n:return: (boolean) Whether to allow a KeyOnlyAggregationFactory to be substituted for this when there are only key\n columns", "makeAggregationContext": "Make an AggregationContext for this aggregation.\n\n:param table: (io.deephaven.engine.table.Table) - The source Table to aggregate\n:param groupByColumns: (java.lang.String...) - The key column names\n:return: (io.deephaven.engine.table.impl.by.AggregationContext) A new or safely reusable AggregationContext", "toString": ":return: java.lang.String" }, diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/FormulaAggregationFactory.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/FormulaAggregationFactory.json index 88d55483eae..53c51a67d79 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/FormulaAggregationFactory.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/FormulaAggregationFactory.json @@ -1,7 +1,6 @@ { "className": "io.deephaven.engine.table.impl.by.FormulaAggregationFactory", "methods": { - "allowKeyOnlySubstitution": "Should we allow substitution with a KeyOnlyAggregationFactory (e.g. selectDistinct) when there are only\n key columns? Instances whose operators could have side effects or are already KeyOnlyAggregationFactory\n should return false.\n\n:return: (boolean) Whether to allow a KeyOnlyAggregationFactory to be substituted for this when there are only key\n columns", "applyToAllBy": "*Overload 1* \n :param inputTable: io.deephaven.engine.table.impl.QueryTable\n :param formula: java.lang.String\n :param columnParamName: java.lang.String\n :param groupByColumnNames: java.lang.String...\n :return: io.deephaven.engine.table.impl.QueryTable\n \n*Overload 2* \n :param inputTable: io.deephaven.engine.table.impl.QueryTable\n :param formula: java.lang.String\n :param columnParamName: java.lang.String\n :param groupByColumns: io.deephaven.engine.table.impl.select.SelectColumn[]\n :return: io.deephaven.engine.table.impl.QueryTable\n \n*Overload 3* \n :param aggregationControl: io.deephaven.engine.table.impl.by.AggregationControl\n :param inputTable: io.deephaven.engine.table.impl.QueryTable\n :param formula: java.lang.String\n :param columnParamName: java.lang.String\n :param groupByColumnNames: java.lang.String...\n :return: io.deephaven.engine.table.impl.QueryTable\n \n*Overload 4* \n :param aggregationControl: io.deephaven.engine.table.impl.by.AggregationControl\n :param inputTable: io.deephaven.engine.table.impl.QueryTable\n :param formula: java.lang.String\n :param columnParamName: java.lang.String\n :param groupByColumns: io.deephaven.engine.table.impl.select.SelectColumn[]\n :return: io.deephaven.engine.table.impl.QueryTable", "makeAggregationContext": "Make an AggregationContext for this aggregation.\n\n:param inputTable: (io.deephaven.engine.table.Table) - The source Table to aggregate\n:param groupByColumnNames: (java.lang.String...) - The key column names\n:return: (io.deephaven.engine.table.impl.by.AggregationContext) A new or safely reusable AggregationContext", "toString": ":return: java.lang.String" diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/FreezeByAggregationFactory.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/FreezeByAggregationFactory.json index ad6a81affad..487704321aa 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/FreezeByAggregationFactory.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/FreezeByAggregationFactory.json @@ -1,7 +1,6 @@ { "className": "io.deephaven.engine.table.impl.by.FreezeByAggregationFactory", "methods": { - "allowKeyOnlySubstitution": "Should we allow substitution with a KeyOnlyAggregationFactory (e.g. selectDistinct) when there are only\n key columns? Instances whose operators could have side effects or are already KeyOnlyAggregationFactory\n should return false.\n\n:return: (boolean) Whether to allow a KeyOnlyAggregationFactory to be substituted for this when there are only key\n columns", "makeAggregationContext": "Make an AggregationContext for this aggregation.\n\n:param table: (io.deephaven.engine.table.Table) - The source Table to aggregate\n:param groupByColumns: (java.lang.String...) - The key column names\n:return: (io.deephaven.engine.table.impl.by.AggregationContext) A new or safely reusable AggregationContext", "toString": ":return: java.lang.String" }, diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/GroupByAggregationFactory.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/GroupByAggregationFactory.json index 1c0ce31ba87..8f6b88fad5c 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/GroupByAggregationFactory.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/GroupByAggregationFactory.json @@ -1,7 +1,6 @@ { "className": "io.deephaven.engine.table.impl.by.GroupByAggregationFactory", "methods": { - "allowKeyOnlySubstitution": "Should we allow substitution with a KeyOnlyAggregationFactory (e.g. selectDistinct) when there are only\n key columns? Instances whose operators could have side effects or are already KeyOnlyAggregationFactory\n should return false.\n\n:return: (boolean) Whether to allow a KeyOnlyAggregationFactory to be substituted for this when there are only key\n columns", "by": "*Overload 1* \n :param inputTable: io.deephaven.engine.table.impl.QueryTable\n :param groupByColumnNames: java.lang.String...\n :return: io.deephaven.engine.table.impl.QueryTable\n \n*Overload 2* \n :param inputTable: io.deephaven.engine.table.impl.QueryTable\n :param groupByColumns: io.deephaven.engine.table.impl.select.SelectColumn[]\n :return: io.deephaven.engine.table.impl.QueryTable\n \n*Overload 3* \n :param aggregationControl: io.deephaven.engine.table.impl.by.AggregationControl\n :param inputTable: io.deephaven.engine.table.impl.QueryTable\n :param groupByColumnNames: java.lang.String...\n :return: io.deephaven.engine.table.impl.QueryTable\n \n*Overload 4* \n :param aggregationControl: io.deephaven.engine.table.impl.by.AggregationControl\n :param inputTable: io.deephaven.engine.table.impl.QueryTable\n :param groupByColumns: io.deephaven.engine.table.impl.select.SelectColumn[]\n :return: io.deephaven.engine.table.impl.QueryTable", "getInstance": ":return: io.deephaven.engine.table.impl.by.AggregationContextFactory", "makeAggregationContext": "Make an AggregationContext for this aggregation.\n\n:param inputTable: (io.deephaven.engine.table.Table) - The source Table to aggregate\n:param groupByColumnNames: (java.lang.String...) - The key column names\n:return: (io.deephaven.engine.table.impl.by.AggregationContext) A new or safely reusable AggregationContext", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/IterativeChunkedAggregationOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/IterativeChunkedAggregationOperator.json index 5d3dea1a571..eeeb8d18088 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/IterativeChunkedAggregationOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/IterativeChunkedAggregationOperator.json @@ -15,6 +15,7 @@ "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param context: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param context: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", "requiresRowKeys": "Whether the operator requires row keys. This implies that the operator must process shifts (i.e.\n shiftChunk(io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext, io.deephaven.chunk.Chunk, io.deephaven.chunk.Chunk, io.deephaven.chunk.LongChunk, io.deephaven.chunk.LongChunk, io.deephaven.chunk.IntChunk, io.deephaven.chunk.IntChunk, io.deephaven.chunk.IntChunk, io.deephaven.chunk.WritableBooleanChunk)), and must observe modifications even when its input columns (if any) are not modified (i.e.\n modifyRowKeys(io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext, io.deephaven.chunk.LongChunk, io.deephaven.chunk.IntChunk, io.deephaven.chunk.IntChunk, io.deephaven.chunk.IntChunk, io.deephaven.chunk.WritableBooleanChunk)).\n\n:return: (boolean) true if the operator requires row keys, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "resetForStep": "Reset any per-step internal state. Note that the arguments to this method should not be mutated in any way.\n\n:param upstream: (io.deephaven.engine.table.TableUpdate) - The upstream ShiftAwareListener.Update", "shiftChunk": "**Incompatible overloads text - text from the first overload:**\n\nCalled with shifted row keys when requiresRowKeys() returns true, including shifted same-slot modifies.\n\n*Overload 1* \n :param context: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param previousValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param newValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param preShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to aggregate into, parallel with startPositions and length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param context: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param previousValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param newValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param preShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the result should be considered modified", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled.", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/NonKeyColumnAggregationFactory.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/NonKeyColumnAggregationFactory.json index b3a4e59bf85..7dc86a5f927 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/NonKeyColumnAggregationFactory.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/NonKeyColumnAggregationFactory.json @@ -1,7 +1,6 @@ { "className": "io.deephaven.engine.table.impl.by.NonKeyColumnAggregationFactory", "methods": { - "allowKeyOnlySubstitution": "Should we allow substitution with a KeyOnlyAggregationFactory (e.g. selectDistinct) when there are only\n key columns? Instances whose operators could have side effects or are already KeyOnlyAggregationFactory\n should return false.\n\n:return: (boolean) Whether to allow a KeyOnlyAggregationFactory to be substituted for this when there are only key\n columns", "makeAggregationContext": "Make an AggregationContext for this aggregation.\n\n:param table: (io.deephaven.engine.table.Table) - The source Table to aggregate\n:param groupByColumns: (java.lang.String...) - The key column names\n:return: (io.deephaven.engine.table.impl.by.AggregationContext) A new or safely reusable AggregationContext", "toString": ":return: java.lang.String" }, diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/SortedFirstOrLastByAggregationFactory.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/SortedFirstOrLastByAggregationFactory.json index 73d9f390814..c2b4076d2c0 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/SortedFirstOrLastByAggregationFactory.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/SortedFirstOrLastByAggregationFactory.json @@ -1,7 +1,6 @@ { "className": "io.deephaven.engine.table.impl.by.SortedFirstOrLastByAggregationFactory", "methods": { - "allowKeyOnlySubstitution": "Should we allow substitution with a KeyOnlyAggregationFactory (e.g. selectDistinct) when there are only\n key columns? Instances whose operators could have side effects or are already KeyOnlyAggregationFactory\n should return false.\n\n:return: (boolean) Whether to allow a KeyOnlyAggregationFactory to be substituted for this when there are only key\n columns", "makeAggregationContext": "Make an AggregationContext for this aggregation.\n\n:param table: (io.deephaven.engine.table.Table) - The source Table to aggregate\n:param groupByColumns: (java.lang.String...) - The key column names\n:return: (io.deephaven.engine.table.impl.by.AggregationContext) A new or safely reusable AggregationContext", "toString": ":return: java.lang.String" }, diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/StreamFirstChunkedOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/StreamFirstChunkedOperator.json index caf3585dd53..e52b8ce1796 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/StreamFirstChunkedOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/StreamFirstChunkedOperator.json @@ -1,9 +1,10 @@ { "className": "io.deephaven.engine.table.impl.by.StreamFirstChunkedOperator", "methods": { - "addChunk": "Aggregate a chunk of data into the result columns.\n\n*Overload 1* \n :param context: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to aggregate into, parallel with startPositions and length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param context: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the addition\n :param values: (io.deephaven.chunk.Chunk) - the values to aggregate\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "addChunk": "Aggregate a chunk of data into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to aggregate into, parallel with startPositions and length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param context: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the addition\n :param values: (io.deephaven.chunk.Chunk) - the values to aggregate\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", "addRowSet": ":param context: io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext\n:param rowSet: io.deephaven.engine.rowset.RowSet\n:param destination: long\n:return: boolean", "ensureCapacity": "Ensure that this operator can handle destinations up to tableSize - 1.\n\n:param tableSize: (long) - the new size of the table", + "makeBucketedContext": "Make a IterativeChunkedAggregationOperator.BucketedContext suitable for this operator if necessary.\n\n:param size: (int) - The maximum size of input chunks that will be used with the result context\n:return: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) A new IterativeChunkedAggregationOperator.BucketedContext, or null if none is necessary", "propagateInitialState": "Perform any internal state keeping needed for destinations that were added during initialization.\n\n:param resultTable: (io.deephaven.engine.table.impl.QueryTable) - The result QueryTable after initialization", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled.", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/WeightedAverageSumAggregationFactory.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/WeightedAverageSumAggregationFactory.json index fafabd227d2..a3d7fe5aeb5 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/WeightedAverageSumAggregationFactory.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/WeightedAverageSumAggregationFactory.json @@ -1,7 +1,6 @@ { "className": "io.deephaven.engine.table.impl.by.WeightedAverageSumAggregationFactory", "methods": { - "allowKeyOnlySubstitution": "Should we allow substitution with a KeyOnlyAggregationFactory (e.g. selectDistinct) when there are only\n key columns? Instances whose operators could have side effects or are already KeyOnlyAggregationFactory\n should return false.\n\n:return: (boolean) Whether to allow a KeyOnlyAggregationFactory to be substituted for this when there are only key\n columns", "makeAggregationContext": "Make an AggregationContext for this aggregation.\n\n:param table: (io.deephaven.engine.table.Table) - The source Table to aggregate\n:param groupByColumns: (java.lang.String...) - The key column names\n:return: (io.deephaven.engine.table.impl.by.AggregationContext) A new or safely reusable AggregationContext", "toString": ":return: java.lang.String" }, diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteChunkedCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteChunkedCountDistinctOperator.json index 5adce7da523..7e2dd148f9a 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteChunkedCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteChunkedCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.ByteChunkedCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteRollupCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteRollupCountDistinctOperator.json index 5eb12fa3e82..cc0b42f0cff 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteRollupCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteRollupCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.ByteRollupCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharChunkedCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharChunkedCountDistinctOperator.json index 875f77dfc95..3df0fbf6934 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharChunkedCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharChunkedCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.CharChunkedCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharRollupCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharRollupCountDistinctOperator.json index 413d2fa5df5..f54c3fe3d77 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharRollupCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharRollupCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.CharRollupCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleChunkedCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleChunkedCountDistinctOperator.json index 9339203a1f2..1227a6490a0 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleChunkedCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleChunkedCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.DoubleChunkedCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleRollupCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleRollupCountDistinctOperator.json index c57fa2dd34d..aa08a8a8517 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleRollupCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleRollupCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.DoubleRollupCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatChunkedCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatChunkedCountDistinctOperator.json index ae901209d21..4fedf4105c1 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatChunkedCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatChunkedCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.FloatChunkedCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatRollupCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatRollupCountDistinctOperator.json index e9fe8f93bcc..a4afd5755cb 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatRollupCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatRollupCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.FloatRollupCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntChunkedCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntChunkedCountDistinctOperator.json index d7f2355cfd7..ebc962f2678 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntChunkedCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntChunkedCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.IntChunkedCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntRollupCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntRollupCountDistinctOperator.json index a71e055bc03..037a85bb7b3 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntRollupCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntRollupCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.IntRollupCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongChunkedCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongChunkedCountDistinctOperator.json index 053a68fd994..4e795cbb096 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongChunkedCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongChunkedCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.LongChunkedCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongRollupCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongRollupCountDistinctOperator.json index cf007100a29..80a234d451a 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongRollupCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongRollupCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.LongRollupCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectChunkedCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectChunkedCountDistinctOperator.json index d2e8f8e2ab8..dedc194e6a6 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectChunkedCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectChunkedCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.ObjectChunkedCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectRollupCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectRollupCountDistinctOperator.json index 0262e611085..0e35f60b9c7 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectRollupCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectRollupCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.ObjectRollupCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortChunkedCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortChunkedCountDistinctOperator.json index a97172a5052..4271abea792 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortChunkedCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortChunkedCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.ShortChunkedCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortRollupCountDistinctOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortRollupCountDistinctOperator.json index cfed8c0d9ef..9084b61b5ae 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortRollupCountDistinctOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortRollupCountDistinctOperator.json @@ -9,6 +9,7 @@ "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "propagateUpdates": "Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed\n (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that\n the arguments to this method should not be mutated in any way.\n\n:param downstream: (io.deephaven.engine.table.TableUpdate) - The downstream ShiftAwareListener.Update (which does not have its\n ModifiedColumnSet finalized yet)\n:param newDestinations: (io.deephaven.engine.rowset.RowSet) - New destinations added on this update", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmcountdistinct.count.ShortRollupCountDistinctOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmminmax/SsmChunkedMinMaxOperator.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmminmax/SsmChunkedMinMaxOperator.json index 0bedd341ea4..bb81d93ba90 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmminmax/SsmChunkedMinMaxOperator.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/by/ssmminmax/SsmChunkedMinMaxOperator.json @@ -9,6 +9,7 @@ "makeSingletonContext": "Make a IterativeChunkedAggregationOperator.SingletonContext suitable for this operator if necessary.\n\n:param size: (int) - The maximum size of input chunks that will be used with the result context\n:return: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) A new IterativeChunkedAggregationOperator.SingletonContext, or null if none is necessary", "modifyChunk": "**Incompatible overloads text - text from the first overload:**\n\nModify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never\n includes modifies that have been shifted if IterativeChunkedAggregationOperator.requiresRowKeys() returns true - those are handled in\n IterativeChunkedAggregationOperator.shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk).\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the modification\n :param preValues: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param postValues: (io.deephaven.chunk.Chunk) - a chunk of values to aggregate\n :param postShiftRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in post-shift space\n :param destination: long\n :return: (boolean) true if the state was modified, false otherwise", "removeChunk": "Remove a chunk of data previously aggregated into the result columns.\n\n*Overload 1* \n :param bucketedContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.BucketedContext) - the operator-specific context\n :param values: (io.deephaven.chunk.Chunk) - a chunk of values that have been previously aggregated.\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destinations: (io.deephaven.chunk.IntChunk) - the destinations in resultColumn to remove the values from, parallel with startPositions and\n length\n :param startPositions: (io.deephaven.chunk.IntChunk) - the starting positions in the chunk for each destination\n :param length: (io.deephaven.chunk.IntChunk) - the number of values in the chunk for each destination\n :param stateModified: (io.deephaven.chunk.WritableBooleanChunk) - a boolean output array, parallel to destinations, which is set to true if the corresponding\n destination has been modified\n \n*Overload 2* \n :param singletonContext: (io.deephaven.engine.table.impl.by.IterativeChunkedAggregationOperator.SingletonContext) - the operator-specific context\n :param chunkSize: (int) - the size of the removal\n :param values: (io.deephaven.chunk.Chunk) - the values to remove from the aggregation\n :param inputRowKeys: (io.deephaven.chunk.LongChunk) - the input row keys, in pre-shift space\n :param destination: (long) - the destination in the result columns\n :return: (boolean) true if the state was modified, false otherwise", + "requiresRunFinds": "Some operators require that all values for a destination within a given chunk are adjacent. These operators must\n return true.\n\n:return: (boolean) true if the operator requires runs of the same destination to be adjacent", "startTrackingPrevValues": "Called after initialization; when the operator's result columns must have previous tracking enabled." }, "path": "io.deephaven.engine.table.impl.by.ssmminmax.SsmChunkedMinMaxOperator", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/select/AutoTuningIncrementalReleaseFilter.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/select/AutoTuningIncrementalReleaseFilter.json index d5f485b5fd0..add8d15e188 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/select/AutoTuningIncrementalReleaseFilter.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/select/AutoTuningIncrementalReleaseFilter.json @@ -4,6 +4,6 @@ "copy": "Create a copy of this WhereFilter.\n\n:return: (io.deephaven.engine.table.impl.select.AutoTuningIncrementalReleaseFilter) an independent copy of this WhereFilter." }, "path": "io.deephaven.engine.table.impl.select.AutoTuningIncrementalReleaseFilter", - "text": "Filter that releases the required number of rows from a table to saturate the UGP cycle.\n \n The table has an initial size, which can be thought of as the size during query initialization. There is an initial\n number of rows that are released, which is then used to tune the number of rows to release on the subsequent cycle.\n \n The targetFactor parameter is multiplied by the UGP's targetCycle. This allows you to determine how busy you want the\n UGP to be. For example a factor of 1, will attempt to hit the target cycle exactly. A target of 0.5 should result an\n UGP ratio of about 50%. A factor of 10 would mean that the system will extend beyond the target cycle time, coalesce\n updates accordingly and have a ratio that is nearly 100%.\n \n The time the rows are released is recorded, and a terminal notification is enqueued to record the end of the cycle.\n On each cycle, the number of rows per second is computed; and then the number of rows released is the UGP's target\n cycle multiplied by the rows per second multiplied by the target factor.\n\n\n \n The AutotuningIncrementalReleaseFilter can be used to benchmark how many rows of data a query can process. In its\n simplest form we can measure how many rows a lastBy statement can process. For example:\n \n\n import io.deephaven.engine.table.impl.select.AutoTuningIncrementalReleaseFilter\n\n quotes = engine.t(\"FeedOS\", \"EquityQuoteL1\").where(\"Date=lastBusinessDateNy()\")\n filter=new AutoTuningIncrementalReleaseFilter(10000, 10000, 1)\n quotesFiltered = quotes.where(filter)\n currentQuote = quotesFiltered.lastBy(\"LocalCodeStr\").update(\"Mid=(Bid + Ask)/2\")\n \n \n Produces a currentQuote table, and you can view the Log tab to determine how many rows per second were processed. The\n summary is sent to the WARN level:\n \n \n 12:55:49.985 WARN Completed release 6.97 seconds, rows=19630961, rows/second=2,817,053.86\n \n \n If verbose mode is enabled, progress is displayed for each cycle at the INFO level.\n \n\n \n You may specify a StreamLoggerImpl() to send the data to STDOUT, as follows:\n \n\n import io.deephaven.engine.table.impl.select.AutoTuningIncrementalReleaseFilter\n\n quotes = engine.t(\"FeedOS\", \"EquityQuoteL1\").where(\"Date=lastBusinessDateNy()\")\n logger = new io.deephaven.io.logger.StreamLoggerImpl()\n filterQuotes=new AutoTuningIncrementalReleaseFilter(logger, 10000, 10000, 1.0d, true)\n quotesFiltered = quotes.where(filterQuotes)\n currentQuote = quotesFiltered.lastBy(\"LocalCodeStr\").update(\"Mid=(Bid + Ask)/2\")\n \n\n The verbose information and the final report are easily visible on your console.\n \n\n The AutotuningIncrementalReleaseFilter is best suited for queries that have a single source table with arbitrary\n amounts of processing on that table. Multiple incremental release filters may be combined, and each filter will\n report the number of rows that were released per second, however the data is not synchronized between tables and it\n is not possible to differentiate which table is contributing more to the query's load without examining the\n performance tables. You may need to adjust the initial size parameters so that one table does not complete processing\n before another.\n \n \n import io.deephaven.engine.table.impl.select.AutoTuningIncrementalReleaseFilter\n\n quotes = engine.t(\"FeedOS\", \"EquityQuoteL1\").where(\"Date=lastBusinessDateNy()\")\n trades = engine.t(\"FeedOS\", \"EquityTradeL1\").where(\"Date=lastBusinessDateNy()\")\n filterQuotes=new AutoTuningIncrementalReleaseFilter(10000, 10000, 1, true)\n quotesFiltered = quotes.where(filterQuotes)\n filterTrades=new AutoTuningIncrementalReleaseFilter(10000, 10000, 1, true)\n tradesFiltered = trades.where(filterTrades)\n\n decorated = tradesFiltered.aj(quotesFiltered, \"LocalCodeStr,MarketTimestamp\", \"QuoteTime=MarketTimestamp,Bid,BidSize,Ask,AskSize\")", + "text": "Filter that releases the required number of rows from a table to saturate the UGP cycle.\n \n The table has an initial size, which can be thought of as the size during query initialization. There is an initial\n number of rows that are released, which is then used to tune the number of rows to release on the subsequent cycle.\n \n You must invoke the BaseIncrementalReleaseFilter.start() method to begin producing rows.\n \n\n The targetFactor parameter is multiplied by the UGP's targetCycle. This allows you to determine how busy you want the\n UGP to be. For example a factor of 1, will attempt to hit the target cycle exactly. A target of 0.5 should result an\n UGP ratio of about 50%. A factor of 10 would mean that the system will extend beyond the target cycle time, coalesce\n updates accordingly and have a ratio that is nearly 100%.\n \n The time the rows are released is recorded, and a terminal notification is enqueued to record the end of the cycle.\n On each cycle, the number of rows per second is computed; and then the number of rows released is the UGP's target\n cycle multiplied by the rows per second multiplied by the target factor.\n\n\n \n The AutotuningIncrementalReleaseFilter can be used to benchmark how many rows of data a query can process. In its\n simplest form we can measure how many rows a lastBy statement can process. For example:\n \n\n import io.deephaven.engine.table.impl.select.AutoTuningIncrementalReleaseFilter\n\n quotes = engine.t(\"FeedOS\", \"EquityQuoteL1\").where(\"Date=lastBusinessDateNy()\")\n filter=new AutoTuningIncrementalReleaseFilter(10000, 10000, 1)\n quotesFiltered = quotes.where(filter)\n currentQuote = quotesFiltered.lastBy(\"LocalCodeStr\").update(\"Mid=(Bid + Ask)/2\")\n \n \n Produces a currentQuote table, and you can view the Log tab to determine how many rows per second were processed. The\n summary is sent to the WARN level:\n \n \n 12:55:49.985 WARN Completed release 6.97 seconds, rows=19630961, rows/second=2,817,053.86\n \n \n If verbose mode is enabled, progress is displayed for each cycle at the INFO level.\n \n\n \n You may specify a StreamLoggerImpl() to send the data to STDOUT, as follows:\n \n\n import io.deephaven.engine.table.impl.select.AutoTuningIncrementalReleaseFilter\n\n quotes = engine.t(\"FeedOS\", \"EquityQuoteL1\").where(\"Date=lastBusinessDateNy()\")\n logger = new io.deephaven.io.logger.StreamLoggerImpl()\n filterQuotes=new AutoTuningIncrementalReleaseFilter(logger, 10000, 10000, 1.0d, true)\n quotesFiltered = quotes.where(filterQuotes)\n currentQuote = quotesFiltered.lastBy(\"LocalCodeStr\").update(\"Mid=(Bid + Ask)/2\")\n filterQuotes.start()\n \n\n The verbose information and the final report are easily visible on your console.\n \n\n The AutotuningIncrementalReleaseFilter is best suited for queries that have a single source table with arbitrary\n amounts of processing on that table. Multiple incremental release filters may be combined, and each filter will\n report the number of rows that were released per second, however the data is not synchronized between tables and it\n is not possible to differentiate which table is contributing more to the query's load without examining the\n performance tables. You may need to adjust the initial size parameters so that one table does not complete processing\n before another.\n \n \n import io.deephaven.engine.table.impl.select.AutoTuningIncrementalReleaseFilter\n\n quotes = engine.t(\"FeedOS\", \"EquityQuoteL1\").where(\"Date=lastBusinessDateNy()\")\n trades = engine.t(\"FeedOS\", \"EquityTradeL1\").where(\"Date=lastBusinessDateNy()\")\n filterQuotes=new AutoTuningIncrementalReleaseFilter(10000, 10000, 1, true)\n quotesFiltered = quotes.where(filterQuotes)\n filterTrades=new AutoTuningIncrementalReleaseFilter(10000, 10000, 1, true)\n tradesFiltered = trades.where(filterTrades)\n\n decorated = tradesFiltered.aj(quotesFiltered, \"LocalCodeStr,MarketTimestamp\", \"QuoteTime=MarketTimestamp,Bid,BidSize,Ask,AskSize\")\n\n filterTrades.start()\n filterQuotes.start()", "typeName": "class" } \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/select/BaseIncrementalReleaseFilter.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/select/BaseIncrementalReleaseFilter.json index 0a61f11516a..3ed860959c0 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/select/BaseIncrementalReleaseFilter.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/select/BaseIncrementalReleaseFilter.json @@ -2,6 +2,7 @@ "className": "io.deephaven.engine.table.impl.select.BaseIncrementalReleaseFilter", "methods": { "copy": "Create a copy of this WhereFilter.\n\n:return: (io.deephaven.engine.table.impl.select.BaseIncrementalReleaseFilter) an independent copy of this WhereFilter.", + "durationNanos": "How many nanos between the first release event and the final release event?\n\n:return: (long) nano duration of this filter, or NULL_LONG if the filter is not completed", "filter": "Filter selection to only matching rows.\n\n:param selection: (io.deephaven.engine.rowset.RowSet) - the indices that should be filtered. The selection must be a subset of fullSet, and may include\n rows that the engine determines need not be evaluated to produce the result. Implementations may\n not mutate or close selection.\n:param fullSet: (io.deephaven.engine.rowset.RowSet) - the complete RowSet of the table to filter. The fullSet is used for calculating variables like \"i\"\n or \"ii\". Implementations may not mutate or close fullSet.\n:param table: (io.deephaven.engine.table.Table) - the table to filter\n:param usePrev: (boolean) - true if previous values should be used. Implementing previous value filtering is optional, and a\n WhereFilter.PreviousFilteringNotSupported exception may be thrown. If a PreviousFiltering exception is thrown,\n then the caller must acquire the UpdateGraphProcessor lock.\n:return: (io.deephaven.engine.rowset.WritableRowSet) The subset of selection accepted by this filter; ownership passes to the caller", "getColumnArrays": "Get the array columns required by this select filter.\n\n:return: (java.util.List) the columns used as array input by this select filter.", "getColumns": "Get the columns required by this select filter.\n\n:return: (java.util.List) the columns used as input by this select filter.", @@ -10,7 +11,9 @@ "init": "Initialize this select filter given the table definition\n\n:param tableDefinition: (io.deephaven.engine.table.TableDefinition) - the definition of the table that will be filtered", "isRefreshing": "Is this filter refreshing?\n\n:return: (boolean) if this filter is refreshing", "isSimpleFilter": ":return: (boolean) true if this is a filter that does not require any code execution, but rather is handled entirely within\n the database engine.", - "setRecomputeListener": "Set the ShiftObliviousListener that should be notified if results based on this filter must be recomputed.\n\n:param listener: (io.deephaven.engine.table.impl.select.WhereFilter.RecomputeListener) - the listener to notify." + "setRecomputeListener": "Set the ShiftObliviousListener that should be notified if results based on this filter must be recomputed.\n\n:param listener: (io.deephaven.engine.table.impl.select.WhereFilter.RecomputeListener) - the listener to notify.", + "start": "Begin releasing rows during update propagation.", + "waitForCompletion": "Wait for all rows to be released.\n\n*Overload 1* \n \n \n*Overload 2* \n :param timeoutMillis: long" }, "path": "io.deephaven.engine.table.impl.select.BaseIncrementalReleaseFilter", "text": "Base class for filters that will release more rows of a table on each UGP cycle.\n\n The use case is for benchmarks that want to replay a table in order to better understand incremental processing\n capacity.", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/sources/DatetimeAsLongColumnSource.json b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/sources/DatetimeAsLongColumnSource.json index 4982e258628..9f62a2a98eb 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/sources/DatetimeAsLongColumnSource.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/table/impl/sources/DatetimeAsLongColumnSource.json @@ -1,5 +1,5 @@ { - "className": "io.deephaven.engine.table.impl.sources.DatetimeAsLongColumnSource", + "className": "io.deephaven.engine.table.impl.sources.DateTimeAsLongColumnSource", "methods": { "allowsReinterpret": "Test if a reinterpret call will succeed.\n\nNote: Java generics information - \n\n:param alternateDataType: (java.lang.Class) - The alternative type to consider\n:return: (boolean) If a reinterpret on this column source with the supplied alternateDataType will succeed.", "doReinterpret": "Supply allowed reinterpret results. The default implementation handles the most common case to avoid code\n duplication.\n\nNote: Java generics information - \n\n:param alternateDataType: (java.lang.Class) - The alternate data type\n:return: (io.deephaven.engine.table.ColumnSource) The resulting ColumnSource", @@ -10,7 +10,7 @@ "isImmutable": "Determine if this column source is immutable, meaning that the values at a given row key never change.\n\n:return: (boolean) true if the values at a given row key of the column source never change, false otherwise", "makeFillContext": "Allocate a new ChunkSource.FillContext for filling chunks from this FillContextMaker, typically a\n ChunkSource.\n\n:param chunkCapacity: (int) - The maximum size of any WritableChunk that will be filled with this context\n:param sharedContext: (io.deephaven.engine.table.SharedContext) - Shared store of intermediate results.\n:return: (io.deephaven.engine.table.ChunkSource.FillContext) A context for use with fill operations" }, - "path": "io.deephaven.engine.table.impl.sources.DatetimeAsLongColumnSource", - "text": "Reinterpret result ColumnSource implementations that translates Boolean to byte values.", + "path": "io.deephaven.engine.table.impl.sources.DateTimeAsLongColumnSource", + "text": "Reinterpret result ColumnSource implementations that translates DateTime to long values.", "typeName": "class" } \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/updategraph/UpdateGraphProcessor.json b/Integrations/python/deephaven/doc/io/deephaven/engine/updategraph/UpdateGraphProcessor.json index 608df1bbb14..6d7539555b4 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/updategraph/UpdateGraphProcessor.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/updategraph/UpdateGraphProcessor.json @@ -22,7 +22,7 @@ "maybeAddNotification": "Add a notification for this NotificationQueue to deliver (by invoking its run() method), iff the delivery step is\n the current step and the update cycle for that step is still in process. This is only supported for non-terminal\n notifications.\n\n:param notification: (io.deephaven.engine.updategraph.NotificationQueue.Notification) - The notification to add\n:param deliveryStep: (long) - The step to deliver this notification on\n:return: boolean", "refreshUpdateSourceForUnitTests": "Refresh an update source on a simulated UGP run thread, rather than this thread.\n\n:param updateSource: (java.lang.Runnable) - The update source to run", "removeSource": "Remove a source from this registrar.\n\n:param updateSource: (java.lang.Runnable) - The table to remove", - "removeTables": "Remove a collection of tables from the list of refreshing tables.\n\n:param tablesToRemove: (java.util.Collection) - The tables to remove from the list of refreshing tables", + "removeSources": "Remove a collection of sources from the list of refreshing sources.\n\n:param sourcesToRemove: (java.util.Collection) - The sources to remove from the list of refreshing sources", "requestRefresh": "Request that the next update cycle begin as soon as practicable. This \"hurry-up\" cycle happens through normal\n means using the refresh thread and its workers.", "requestSignal": ":param updateGraphProcessorCondition: java.util.concurrent.locks.Condition", "resetCycleDuration": "Resets the run cycle time to the default target configured via the\n \"UpdateGraphProcessor.targetCycleDurationMillis\" property.", @@ -34,6 +34,7 @@ "setWatchDogMillis": "Enable the loop watchdog with the specified timeout. A value of 0 disables the watchdog.\n\n:param watchDogMillis: (int) - The time in milliseconds to set the watchdog, or 0 to disable.", "setWatchDogTimeoutProcedure": "Set the procedure to be called when the watchdog times out.\n\n:param procedure: (java.util.function.LongConsumer) - The procedure to call", "sharedLock": "Get the shared lock for this UpdateGraphProcessor.\n \n Using this lock will prevent run processing from proceeding concurrently, but will allow other read-only\n processing to proceed.\n \n The shared lock implementation is expected to support reentrance.\n \n This lock does not support Lock.newCondition(). Use the exclusive\n lock if you need to wait on events that are driven by run processing.\n\n:return: (io.deephaven.util.locks.AwareFunctionalLock) The shared lock for this UpdateGraphProcessor", + "sourceCount": "Return the number of valid sources.\n\n:return: (int) the number of valid sources", "start": "Start the table run thread.", "startCycleForUnitTests": "Begin the next update cycle while in unit-test mode. Note that this happens on a simulated UGP run thread, rather than this thread.", "toString": ":return: java.lang.String", diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/util/WorkerPythonEnvironment.json b/Integrations/python/deephaven/doc/io/deephaven/engine/util/WorkerPythonEnvironment.json deleted file mode 100644 index d144cf7d4e2..00000000000 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/util/WorkerPythonEnvironment.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "className": "io.deephaven.engine.util.WorkerPythonEnvironment", - "methods": { - "eval": "Evaluates the given string as a statement.\n\n:param evalString: (java.lang.String) - the statement to evaluate", - "fetch": "Retrieves a value from our Python holder's globals.\n\n When the object is a convertible PyObject; we return the PyObject. Otherwise, we'll return a\n PythonRemoteQuery.PickledResult, which is suitable for unpickling by the remote side.\n\n The caller should never serialize an unconverted PyObject; it contains a raw pointer and will result in a Hotspot\n or memory corruption on the remote side.\n\n:param name: (java.lang.String) - the variable to retrieve\n:return: (java.lang.Object) the variable as a Java object; or pickled", - "valueOf": "Returns the enum constant of this type with the specified name.\nThe string must match exactly an identifier used to declare an\nenum constant in this type. (Extraneous whitespace characters are \nnot permitted.)\n\n:param name: (java.lang.String) - the name of the enum constant to be returned.\n:return: (io.deephaven.engine.util.WorkerPythonEnvironment) the enum constant with the specified name", - "values": "Returns an array containing the constants of this enum type, in\nthe order they are declared.\n\n:return: (io.deephaven.engine.util.WorkerPythonEnvironment[]) an array containing the constants of this enum type, in the order they are declared" - }, - "path": "io.deephaven.engine.util.WorkerPythonEnvironment", - "text": "This class is the support infrastructure for running Python remote queries.\n\n It is a singleton that contains an instance of a PythonHolder. All of the specially handled engine operations from a\n remote Python session should execute queries which interact wtih this class. The script sessions that run for\n PersistentQueries or consoles are handled separately by the PythonDeephavenSession.", - "typeName": "enum" -} \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/engine/util/jpy/JpyInit.json b/Integrations/python/deephaven/doc/io/deephaven/engine/util/jpy/JpyInit.json index e070eb543d1..712e98c0acd 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/engine/util/jpy/JpyInit.json +++ b/Integrations/python/deephaven/doc/io/deephaven/engine/util/jpy/JpyInit.json @@ -1,7 +1,7 @@ { "className": "io.deephaven.engine.util.jpy.JpyInit", "methods": { - "init": "*Overload 1* \n :param log: io.deephaven.io.logger.Logger\n \n*Overload 2* \n :param log: io.deephaven.io.logger.Logger\n :param jpyConfig: io.deephaven.jpy.JpyConfigExt" + "init": "First, checks Configuration.getInstance() for any explicit jpy properties. If none are set, it will\n source the configuration from JpyConfigFromSubprocess.fromSubprocess(Duration).\n\n*Overload 1* \n :param log: (io.deephaven.io.logger.Logger) - the log\n \n*Overload 2* \n :param log: io.deephaven.io.logger.Logger\n :param jpyConfig: io.deephaven.jpy.JpyConfigExt" }, "path": "io.deephaven.engine.util.jpy.JpyInit", "text": "Initializes Jpy.\n\n A guarded initialization via JpyConfigExt.startPython()", diff --git a/Integrations/python/deephaven/doc/io/deephaven/grpc_api/uri/UriModule.json b/Integrations/python/deephaven/doc/io/deephaven/grpc_api/uri/UriModule.json index 5ff1837bd49..32a4b5a5796 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/grpc_api/uri/UriModule.json +++ b/Integrations/python/deephaven/doc/io/deephaven/grpc_api/uri/UriModule.json @@ -1,11 +1,12 @@ { "className": "io.deephaven.grpc_api.uri.UriModule", "methods": { - "bindApplicationResolver": ":param resolver: io.deephaven.grpc_api.uri.ApplicationResolver\n:return: io.deephaven.grpc_api.uri.UriResolver", - "bindCsvResolver": ":param resolver: io.deephaven.grpc_api.uri.CsvTableResolver\n:return: io.deephaven.grpc_api.uri.UriResolver", - "bindParquetResolver": ":param resolver: io.deephaven.grpc_api.uri.ParquetTableResolver\n:return: io.deephaven.grpc_api.uri.UriResolver", - "bindQueryScopeResolver": ":param resolver: io.deephaven.grpc_api.uri.QueryScopeResolver\n:return: io.deephaven.grpc_api.uri.UriResolver", - "bindsBarrageTableResolver": ":param resolver: io.deephaven.grpc_api.uri.BarrageTableResolver\n:return: io.deephaven.grpc_api.uri.UriResolver" + "bindApplicationResolver": ":param resolver: io.deephaven.grpc_api.uri.ApplicationResolver\n:return: io.deephaven.uri.resolver.UriResolver", + "bindCsvResolver": ":param resolver: io.deephaven.grpc_api.uri.CsvTableResolver\n:return: io.deephaven.uri.resolver.UriResolver", + "bindParquetResolver": ":param resolver: io.deephaven.grpc_api.uri.ParquetTableResolver\n:return: io.deephaven.uri.resolver.UriResolver", + "bindQueryScopeResolver": ":param resolver: io.deephaven.grpc_api.uri.QueryScopeResolver\n:return: io.deephaven.uri.resolver.UriResolver", + "bindResolvers": ":param resolvers: java.util.Set\n:return: io.deephaven.uri.resolver.UriResolvers", + "bindsBarrageTableResolver": ":param resolver: io.deephaven.grpc_api.uri.BarrageTableResolver\n:return: io.deephaven.uri.resolver.UriResolver" }, "path": "io.deephaven.grpc_api.uri.UriModule", "text": "Installs the URI resolvers. See each specific resolver for more information.", diff --git a/Integrations/python/deephaven/doc/io/deephaven/grpc_api/uri/UriResolver.json b/Integrations/python/deephaven/doc/io/deephaven/grpc_api/uri/UriResolver.json deleted file mode 100644 index 2770015be5e..00000000000 --- a/Integrations/python/deephaven/doc/io/deephaven/grpc_api/uri/UriResolver.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "className": "io.deephaven.grpc_api.uri.UriResolver", - "methods": { - "isResolvable": "Returns true if the resolver can resolve the uri.\n\n:param uri: (java.net.URI) - the uri\n:return: (boolean) true if this resolver can resolve uri", - "resolve": "Resolve uri into an object.\n\n:param uri: (java.net.URI) - the URI\n:return: (java.lang.Object) the object", - "schemes": "The supported schemes.\n\n:return: (java.util.Set) the schemes" - }, - "path": "io.deephaven.grpc_api.uri.UriResolver", - "text": "A URI resolver resolves URIs into objects.", - "typeName": "interface" -} \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/grpc_api/uri/UriResolvers.json b/Integrations/python/deephaven/doc/io/deephaven/grpc_api/uri/UriResolvers.json deleted file mode 100644 index 59edb8ad43b..00000000000 --- a/Integrations/python/deephaven/doc/io/deephaven/grpc_api/uri/UriResolvers.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "className": "io.deephaven.grpc_api.uri.UriResolvers", - "methods": { - "find": "Note: Java generics information - \n\n:param clazz: java.lang.Class\n:return: java.util.Optional", - "resolve": ":param uri: java.net.URI\n:return: java.lang.Object", - "resolver": ":param uri: java.net.URI\n:return: io.deephaven.grpc_api.uri.UriResolver", - "resolvers": ":return: java.util.Set" - }, - "path": "io.deephaven.grpc_api.uri.UriResolvers", - "typeName": "class" -} \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/grpc_api/uri/UriResolversInstance.json b/Integrations/python/deephaven/doc/io/deephaven/grpc_api/uri/UriResolversInstance.json deleted file mode 100644 index 01aa32b6c42..00000000000 --- a/Integrations/python/deephaven/doc/io/deephaven/grpc_api/uri/UriResolversInstance.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "className": "io.deephaven.grpc_api.uri.UriResolversInstance", - "methods": { - "get": ":return: io.deephaven.grpc_api.uri.UriResolvers", - "init": ":param instance: io.deephaven.grpc_api.uri.UriResolvers" - }, - "path": "io.deephaven.grpc_api.uri.UriResolversInstance", - "typeName": "class" -} \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/grpc/RunChartDownsampleRequest/ZoomRange.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/grpc/RunChartDownsampleRequest/ZoomRange.json index d30454ddf0a..24c7b32072a 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/grpc/RunChartDownsampleRequest/ZoomRange.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/grpc/RunChartDownsampleRequest/ZoomRange.json @@ -5,13 +5,13 @@ "getDefaultInstance": ":return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange", "getDefaultInstanceForType": ":return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange", "getDescriptor": ":return: com.google.protobuf.Descriptors.Descriptor", - "getMaxDateNanos": "int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (long) The maxDateNanos.", - "getMinDateNanos": "int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (long) The minDateNanos.", + "getMaxDateNanos": "optional int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (long) The maxDateNanos.", + "getMinDateNanos": "optional int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (long) The minDateNanos.", "getParserForType": ":return: com.google.protobuf.Parser", "getSerializedSize": ":return: int", "getUnknownFields": ":return: com.google.protobuf.UnknownFieldSet", - "hasMaxDateNanos": "int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (boolean) Whether the maxDateNanos field is set.", - "hasMinDateNanos": "int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (boolean) Whether the minDateNanos field is set.", + "hasMaxDateNanos": "optional int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (boolean) Whether the maxDateNanos field is set.", + "hasMinDateNanos": "optional int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (boolean) Whether the minDateNanos field is set.", "hashCode": ":return: int", "isInitialized": ":return: boolean", "newBuilder": "*Overload 1* \n :return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder\n \n*Overload 2* \n :param prototype: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange\n :return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder", diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/grpc/RunChartDownsampleRequest/ZoomRange/Builder.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/grpc/RunChartDownsampleRequest/ZoomRange/Builder.json index 12e402864a1..46c3565471f 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/grpc/RunChartDownsampleRequest/ZoomRange/Builder.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/grpc/RunChartDownsampleRequest/ZoomRange/Builder.json @@ -6,23 +6,23 @@ "buildPartial": ":return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange", "clear": ":return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder", "clearField": ":param field: com.google.protobuf.Descriptors.FieldDescriptor\n:return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder", - "clearMaxDateNanos": "int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder) This builder for chaining.", - "clearMinDateNanos": "int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder) This builder for chaining.", + "clearMaxDateNanos": "optional int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder) This builder for chaining.", + "clearMinDateNanos": "optional int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder) This builder for chaining.", "clearOneof": ":param oneof: com.google.protobuf.Descriptors.OneofDescriptor\n:return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder", "clone": ":return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder", "getDefaultInstanceForType": ":return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange", "getDescriptor": ":return: com.google.protobuf.Descriptors.Descriptor", "getDescriptorForType": ":return: com.google.protobuf.Descriptors.Descriptor", - "getMaxDateNanos": "int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (long) The maxDateNanos.", - "getMinDateNanos": "int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (long) The minDateNanos.", - "hasMaxDateNanos": "int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (boolean) Whether the maxDateNanos field is set.", - "hasMinDateNanos": "int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (boolean) Whether the minDateNanos field is set.", + "getMaxDateNanos": "optional int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (long) The maxDateNanos.", + "getMinDateNanos": "optional int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (long) The minDateNanos.", + "hasMaxDateNanos": "optional int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (boolean) Whether the maxDateNanos field is set.", + "hasMinDateNanos": "optional int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (boolean) Whether the minDateNanos field is set.", "isInitialized": ":return: boolean", "mergeFrom": "*Overload 1* \n :param other: com.google.protobuf.Message\n :return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder\n \n*Overload 2* \n :param other: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange\n :return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder\n \n*Overload 3* \n :param input: com.google.protobuf.CodedInputStream\n :param extensionRegistry: com.google.protobuf.ExtensionRegistryLite\n :return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder", "mergeUnknownFields": ":param unknownFields: com.google.protobuf.UnknownFieldSet\n:return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder", "setField": ":param field: com.google.protobuf.Descriptors.FieldDescriptor\n:param value: java.lang.Object\n:return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder", - "setMaxDateNanos": "int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:param value: (long) - The maxDateNanos to set.\n:return: (io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder) This builder for chaining.", - "setMinDateNanos": "int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:param value: (long) - The minDateNanos to set.\n:return: (io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder) This builder for chaining.", + "setMaxDateNanos": "optional int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:param value: (long) - The maxDateNanos to set.\n:return: (io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder) This builder for chaining.", + "setMinDateNanos": "optional int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:param value: (long) - The minDateNanos to set.\n:return: (io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder) This builder for chaining.", "setRepeatedField": ":param field: com.google.protobuf.Descriptors.FieldDescriptor\n:param index: int\n:param value: java.lang.Object\n:return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder", "setUnknownFields": ":param unknownFields: com.google.protobuf.UnknownFieldSet\n:return: io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRange.Builder" }, diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/grpc/RunChartDownsampleRequest/ZoomRangeOrBuilder.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/grpc/RunChartDownsampleRequest/ZoomRangeOrBuilder.json index 2ad48ba9186..94eb2934ab5 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/grpc/RunChartDownsampleRequest/ZoomRangeOrBuilder.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/grpc/RunChartDownsampleRequest/ZoomRangeOrBuilder.json @@ -1,10 +1,10 @@ { "className": "io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest$ZoomRangeOrBuilder", "methods": { - "getMaxDateNanos": "int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (long) The maxDateNanos.", - "getMinDateNanos": "int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (long) The minDateNanos.", - "hasMaxDateNanos": "int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (boolean) Whether the maxDateNanos field is set.", - "hasMinDateNanos": "int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (boolean) Whether the minDateNanos field is set." + "getMaxDateNanos": "optional int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (long) The maxDateNanos.", + "getMinDateNanos": "optional int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (long) The minDateNanos.", + "hasMaxDateNanos": "optional int64 max_date_nanos = 2 [jstype = JS_STRING];\n\n:return: (boolean) Whether the maxDateNanos field is set.", + "hasMinDateNanos": "optional int64 min_date_nanos = 1 [jstype = JS_STRING];\n\n:return: (boolean) Whether the minDateNanos field is set." }, "path": "io.deephaven.proto.backplane.grpc.RunChartDownsampleRequest.ZoomRangeOrBuilder", "typeName": "interface" diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor.json index a22e1dce117..74b8e6d522d 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor.json @@ -23,15 +23,15 @@ "getTablesList": "repeated .io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse tables = 11;\n\n:return: java.util.List", "getTablesOrBuilder": "repeated .io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse tables = 11;\n\n:param index: int\n:return: io.deephaven.proto.backplane.grpc.ExportedTableCreationResponseOrBuilder", "getTablesOrBuilderList": "repeated .io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse tables = 11;\n\n:return: java.util.List", - "getTitle": "string title = 1;\n\n:return: (java.lang.String) The title.", - "getTitleBytes": "string title = 1;\n\n:return: (com.google.protobuf.ByteString) The bytes for title.", + "getTitle": "optional string title = 1;\n\n:return: (java.lang.String) The title.", + "getTitleBytes": "optional string title = 1;\n\n:return: (com.google.protobuf.ByteString) The bytes for title.", "getTitleColor": "string title_color = 3;\n\n:return: (java.lang.String) The titleColor.", "getTitleColorBytes": "string title_color = 3;\n\n:return: (com.google.protobuf.ByteString) The bytes for titleColor.", "getTitleFont": "string title_font = 2;\n\n:return: (java.lang.String) The titleFont.", "getTitleFontBytes": "string title_font = 2;\n\n:return: (com.google.protobuf.ByteString) The bytes for titleFont.", "getUnknownFields": ":return: com.google.protobuf.UnknownFieldSet", "getUpdateInterval": "int64 update_interval = 7;\n\n:return: (long) The updateInterval.", - "hasTitle": "string title = 1;\n\n:return: (boolean) Whether the title field is set.", + "hasTitle": "optional string title = 1;\n\n:return: (boolean) Whether the title field is set.", "hashCode": ":return: int", "isInitialized": ":return: boolean", "newBuilder": "*Overload 1* \n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder\n \n*Overload 2* \n :param prototype: io.deephaven.proto.backplane.script.grpc.FigureDescriptor\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder", diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/AxisDescriptor.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/AxisDescriptor.json index f56625faa4a..a614721ec42 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/AxisDescriptor.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/AxisDescriptor.json @@ -9,11 +9,11 @@ "getDefaultInstance": ":return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor", "getDefaultInstanceForType": ":return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor", "getDescriptor": ":return: com.google.protobuf.Descriptors.Descriptor", - "getFormatPattern": "string format_pattern = 9;\n\n:return: (java.lang.String) The formatPattern.", - "getFormatPatternBytes": "string format_pattern = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for formatPattern.", + "getFormatPattern": "optional string format_pattern = 9;\n\n:return: (java.lang.String) The formatPattern.", + "getFormatPatternBytes": "optional string format_pattern = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for formatPattern.", "getFormatType": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType format_type = 2;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType) The formatType.", "getFormatTypeValue": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType format_type = 2;\n\n:return: (int) The enum numeric value on the wire for formatType.", - "getGapBetweenMajorTicks": "double gap_between_major_ticks = 16;\n\n:return: (double) The gapBetweenMajorTicks.", + "getGapBetweenMajorTicks": "optional double gap_between_major_ticks = 16;\n\n:return: (double) The gapBetweenMajorTicks.", "getId": "string id = 1;\n\n:return: (java.lang.String) The id.", "getIdBytes": "string id = 1;\n\n:return: (com.google.protobuf.ByteString) The bytes for id.", "getInvert": "bool invert = 19;\n\n:return: (boolean) The invert.", @@ -42,8 +42,8 @@ "getTypeValue": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisType type = 3;\n\n:return: (int) The enum numeric value on the wire for type.", "getUnknownFields": ":return: com.google.protobuf.UnknownFieldSet", "hasBusinessCalendarDescriptor": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor business_calendar_descriptor = 21;\n\n:return: (boolean) Whether the businessCalendarDescriptor field is set.", - "hasFormatPattern": "string format_pattern = 9;\n\n:return: (boolean) Whether the formatPattern field is set.", - "hasGapBetweenMajorTicks": "double gap_between_major_ticks = 16;\n\n:return: (boolean) Whether the gapBetweenMajorTicks field is set.", + "hasFormatPattern": "optional string format_pattern = 9;\n\n:return: (boolean) Whether the formatPattern field is set.", + "hasGapBetweenMajorTicks": "optional double gap_between_major_ticks = 16;\n\n:return: (boolean) Whether the gapBetweenMajorTicks field is set.", "hashCode": ":return: int", "isInitialized": ":return: boolean", "newBuilder": "*Overload 1* \n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder\n \n*Overload 2* \n :param prototype: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder", diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/AxisDescriptor/Builder.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/AxisDescriptor/Builder.json index 2f62c061d78..23c8824505c 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/AxisDescriptor/Builder.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/AxisDescriptor/Builder.json @@ -10,9 +10,9 @@ "clearBusinessCalendarDescriptor": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor business_calendar_descriptor = 21;\n\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder", "clearColor": "string color = 10;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", "clearField": ":param field: com.google.protobuf.Descriptors.FieldDescriptor\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder", - "clearFormatPattern": "string format_pattern = 9;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", + "clearFormatPattern": "optional string format_pattern = 9;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", "clearFormatType": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType format_type = 2;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", - "clearGapBetweenMajorTicks": "double gap_between_major_ticks = 16;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", + "clearGapBetweenMajorTicks": "optional double gap_between_major_ticks = 16;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", "clearId": "string id = 1;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", "clearInvert": "bool invert = 19;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", "clearIsTimeAxis": "bool is_time_axis = 20;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", @@ -39,11 +39,11 @@ "getDefaultInstanceForType": ":return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor", "getDescriptor": ":return: com.google.protobuf.Descriptors.Descriptor", "getDescriptorForType": ":return: com.google.protobuf.Descriptors.Descriptor", - "getFormatPattern": "string format_pattern = 9;\n\n:return: (java.lang.String) The formatPattern.", - "getFormatPatternBytes": "string format_pattern = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for formatPattern.", + "getFormatPattern": "optional string format_pattern = 9;\n\n:return: (java.lang.String) The formatPattern.", + "getFormatPatternBytes": "optional string format_pattern = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for formatPattern.", "getFormatType": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType format_type = 2;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType) The formatType.", "getFormatTypeValue": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType format_type = 2;\n\n:return: (int) The enum numeric value on the wire for formatType.", - "getGapBetweenMajorTicks": "double gap_between_major_ticks = 16;\n\n:return: (double) The gapBetweenMajorTicks.", + "getGapBetweenMajorTicks": "optional double gap_between_major_ticks = 16;\n\n:return: (double) The gapBetweenMajorTicks.", "getId": "string id = 1;\n\n:return: (java.lang.String) The id.", "getIdBytes": "string id = 1;\n\n:return: (com.google.protobuf.ByteString) The bytes for id.", "getInvert": "bool invert = 19;\n\n:return: (boolean) The invert.", @@ -69,8 +69,8 @@ "getType": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisType type = 3;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisType) The type.", "getTypeValue": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisType type = 3;\n\n:return: (int) The enum numeric value on the wire for type.", "hasBusinessCalendarDescriptor": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor business_calendar_descriptor = 21;\n\n:return: (boolean) Whether the businessCalendarDescriptor field is set.", - "hasFormatPattern": "string format_pattern = 9;\n\n:return: (boolean) Whether the formatPattern field is set.", - "hasGapBetweenMajorTicks": "double gap_between_major_ticks = 16;\n\n:return: (boolean) Whether the gapBetweenMajorTicks field is set.", + "hasFormatPattern": "optional string format_pattern = 9;\n\n:return: (boolean) Whether the formatPattern field is set.", + "hasGapBetweenMajorTicks": "optional double gap_between_major_ticks = 16;\n\n:return: (boolean) Whether the gapBetweenMajorTicks field is set.", "isInitialized": ":return: boolean", "mergeBusinessCalendarDescriptor": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor business_calendar_descriptor = 21;\n\n:param value: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder", "mergeFrom": "*Overload 1* \n :param other: com.google.protobuf.Message\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder\n \n*Overload 2* \n :param other: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder\n \n*Overload 3* \n :param input: com.google.protobuf.CodedInputStream\n :param extensionRegistry: com.google.protobuf.ExtensionRegistryLite\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder", @@ -79,11 +79,11 @@ "setColor": "string color = 10;\n\n:param value: (java.lang.String) - The color to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", "setColorBytes": "string color = 10;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for color to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", "setField": ":param field: com.google.protobuf.Descriptors.FieldDescriptor\n:param value: java.lang.Object\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder", - "setFormatPattern": "string format_pattern = 9;\n\n:param value: (java.lang.String) - The formatPattern to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", - "setFormatPatternBytes": "string format_pattern = 9;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for formatPattern to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", + "setFormatPattern": "optional string format_pattern = 9;\n\n:param value: (java.lang.String) - The formatPattern to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", + "setFormatPatternBytes": "optional string format_pattern = 9;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for formatPattern to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", "setFormatType": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType format_type = 2;\n\n:param value: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType) - The formatType to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", "setFormatTypeValue": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType format_type = 2;\n\n:param value: (int) - The enum numeric value on the wire for formatType to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", - "setGapBetweenMajorTicks": "double gap_between_major_ticks = 16;\n\n:param value: (double) - The gapBetweenMajorTicks to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", + "setGapBetweenMajorTicks": "optional double gap_between_major_ticks = 16;\n\n:param value: (double) - The gapBetweenMajorTicks to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", "setId": "string id = 1;\n\n:param value: (java.lang.String) - The id to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", "setIdBytes": "string id = 1;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for id to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", "setInvert": "bool invert = 19;\n\n:param value: (boolean) - The invert to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.Builder) This builder for chaining.", diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/AxisDescriptorOrBuilder.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/AxisDescriptorOrBuilder.json index 435619374fa..06a60f1c029 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/AxisDescriptorOrBuilder.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/AxisDescriptorOrBuilder.json @@ -5,11 +5,11 @@ "getBusinessCalendarDescriptorOrBuilder": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor business_calendar_descriptor = 21;\n\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptorOrBuilder", "getColor": "string color = 10;\n\n:return: (java.lang.String) The color.", "getColorBytes": "string color = 10;\n\n:return: (com.google.protobuf.ByteString) The bytes for color.", - "getFormatPattern": "string format_pattern = 9;\n\n:return: (java.lang.String) The formatPattern.", - "getFormatPatternBytes": "string format_pattern = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for formatPattern.", + "getFormatPattern": "optional string format_pattern = 9;\n\n:return: (java.lang.String) The formatPattern.", + "getFormatPatternBytes": "optional string format_pattern = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for formatPattern.", "getFormatType": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType format_type = 2;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType) The formatType.", "getFormatTypeValue": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType format_type = 2;\n\n:return: (int) The enum numeric value on the wire for formatType.", - "getGapBetweenMajorTicks": "double gap_between_major_ticks = 16;\n\n:return: (double) The gapBetweenMajorTicks.", + "getGapBetweenMajorTicks": "optional double gap_between_major_ticks = 16;\n\n:return: (double) The gapBetweenMajorTicks.", "getId": "string id = 1;\n\n:return: (java.lang.String) The id.", "getIdBytes": "string id = 1;\n\n:return: (com.google.protobuf.ByteString) The bytes for id.", "getInvert": "bool invert = 19;\n\n:return: (boolean) The invert.", @@ -35,8 +35,8 @@ "getType": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisType type = 3;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisType) The type.", "getTypeValue": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisType type = 3;\n\n:return: (int) The enum numeric value on the wire for type.", "hasBusinessCalendarDescriptor": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor business_calendar_descriptor = 21;\n\n:return: (boolean) Whether the businessCalendarDescriptor field is set.", - "hasFormatPattern": "string format_pattern = 9;\n\n:return: (boolean) Whether the formatPattern field is set.", - "hasGapBetweenMajorTicks": "double gap_between_major_ticks = 16;\n\n:return: (boolean) Whether the gapBetweenMajorTicks field is set." + "hasFormatPattern": "optional string format_pattern = 9;\n\n:return: (boolean) Whether the formatPattern field is set.", + "hasGapBetweenMajorTicks": "optional double gap_between_major_ticks = 16;\n\n:return: (boolean) Whether the gapBetweenMajorTicks field is set." }, "path": "io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptorOrBuilder", "typeName": "interface" diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/Builder.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/Builder.json index dbc75db3c17..c511d05c0af 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/Builder.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/Builder.json @@ -21,7 +21,7 @@ "clearOneof": ":param oneof: com.google.protobuf.Descriptors.OneofDescriptor\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder", "clearRows": "int32 rows = 9;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", "clearTables": "repeated .io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse tables = 11;\n\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder", - "clearTitle": "string title = 1;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", + "clearTitle": "optional string title = 1;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", "clearTitleColor": "string title_color = 3;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", "clearTitleFont": "string title_font = 2;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", "clearUpdateInterval": "int64 update_interval = 7;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", @@ -49,14 +49,14 @@ "getTablesList": "repeated .io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse tables = 11;\n\n:return: java.util.List", "getTablesOrBuilder": "repeated .io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse tables = 11;\n\n:param index: int\n:return: io.deephaven.proto.backplane.grpc.ExportedTableCreationResponseOrBuilder", "getTablesOrBuilderList": "repeated .io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse tables = 11;\n\n:return: java.util.List", - "getTitle": "string title = 1;\n\n:return: (java.lang.String) The title.", - "getTitleBytes": "string title = 1;\n\n:return: (com.google.protobuf.ByteString) The bytes for title.", + "getTitle": "optional string title = 1;\n\n:return: (java.lang.String) The title.", + "getTitleBytes": "optional string title = 1;\n\n:return: (com.google.protobuf.ByteString) The bytes for title.", "getTitleColor": "string title_color = 3;\n\n:return: (java.lang.String) The titleColor.", "getTitleColorBytes": "string title_color = 3;\n\n:return: (com.google.protobuf.ByteString) The bytes for titleColor.", "getTitleFont": "string title_font = 2;\n\n:return: (java.lang.String) The titleFont.", "getTitleFontBytes": "string title_font = 2;\n\n:return: (com.google.protobuf.ByteString) The bytes for titleFont.", "getUpdateInterval": "int64 update_interval = 7;\n\n:return: (long) The updateInterval.", - "hasTitle": "string title = 1;\n\n:return: (boolean) Whether the title field is set.", + "hasTitle": "optional string title = 1;\n\n:return: (boolean) Whether the title field is set.", "isInitialized": ":return: boolean", "mergeFrom": "*Overload 1* \n :param other: com.google.protobuf.Message\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder\n \n*Overload 2* \n :param other: io.deephaven.proto.backplane.script.grpc.FigureDescriptor\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder\n \n*Overload 3* \n :param input: com.google.protobuf.CodedInputStream\n :param extensionRegistry: com.google.protobuf.ExtensionRegistryLite\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder", "mergeUnknownFields": ":param unknownFields: com.google.protobuf.UnknownFieldSet\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder", @@ -69,8 +69,8 @@ "setRepeatedField": ":param field: com.google.protobuf.Descriptors.FieldDescriptor\n:param index: int\n:param value: java.lang.Object\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder", "setRows": "int32 rows = 9;\n\n:param value: (int) - The rows to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", "setTables": "repeated .io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse tables = 11;\n\n*Overload 1* \n :param index: int\n :param value: io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder\n \n*Overload 2* \n :param index: int\n :param builderForValue: io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse.Builder\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder", - "setTitle": "string title = 1;\n\n:param value: (java.lang.String) - The title to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", - "setTitleBytes": "string title = 1;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for title to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", + "setTitle": "optional string title = 1;\n\n:param value: (java.lang.String) - The title to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", + "setTitleBytes": "optional string title = 1;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for title to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", "setTitleColor": "string title_color = 3;\n\n:param value: (java.lang.String) - The titleColor to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", "setTitleColorBytes": "string title_color = 3;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for titleColor to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", "setTitleFont": "string title_font = 2;\n\n:param value: (java.lang.String) - The titleFont to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.Builder) This builder for chaining.", diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/ChartDescriptor.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/ChartDescriptor.json index c49291a1012..b1dd76ac4b6 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/ChartDescriptor.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/ChartDescriptor.json @@ -32,14 +32,14 @@ "getSeriesOrBuilder": "repeated .io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor series = 3;\n\n:param index: int\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptorOrBuilder", "getSeriesOrBuilderList": "repeated .io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor series = 3;\n\n:return: java.util.List", "getShowLegend": "bool show_legend = 10;\n\n:return: (boolean) The showLegend.", - "getTitle": "string title = 7;\n\n:return: (java.lang.String) The title.", - "getTitleBytes": "string title = 7;\n\n:return: (com.google.protobuf.ByteString) The bytes for title.", + "getTitle": "optional string title = 7;\n\n:return: (java.lang.String) The title.", + "getTitleBytes": "optional string title = 7;\n\n:return: (com.google.protobuf.ByteString) The bytes for title.", "getTitleColor": "string title_color = 9;\n\n:return: (java.lang.String) The titleColor.", "getTitleColorBytes": "string title_color = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for titleColor.", "getTitleFont": "string title_font = 8;\n\n:return: (java.lang.String) The titleFont.", "getTitleFontBytes": "string title_font = 8;\n\n:return: (com.google.protobuf.ByteString) The bytes for titleFont.", "getUnknownFields": ":return: com.google.protobuf.UnknownFieldSet", - "hasTitle": "string title = 7;\n\n:return: (boolean) Whether the title field is set.", + "hasTitle": "optional string title = 7;\n\n:return: (boolean) Whether the title field is set.", "hashCode": ":return: int", "isInitialized": ":return: boolean", "newBuilder": "*Overload 1* \n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder\n \n*Overload 2* \n :param prototype: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder", diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/ChartDescriptor/Builder.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/ChartDescriptor/Builder.json index b9ba0e121d5..9220890c4ee 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/ChartDescriptor/Builder.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/ChartDescriptor/Builder.json @@ -26,7 +26,7 @@ "clearRowspan": "int32 rowspan = 2;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", "clearSeries": "repeated .io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor series = 3;\n\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder", "clearShowLegend": "bool show_legend = 10;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", - "clearTitle": "string title = 7;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", + "clearTitle": "optional string title = 7;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", "clearTitleColor": "string title_color = 9;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", "clearTitleFont": "string title_font = 8;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", "clone": ":return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder", @@ -64,13 +64,13 @@ "getSeriesOrBuilder": "repeated .io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor series = 3;\n\n:param index: int\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptorOrBuilder", "getSeriesOrBuilderList": "repeated .io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor series = 3;\n\n:return: java.util.List", "getShowLegend": "bool show_legend = 10;\n\n:return: (boolean) The showLegend.", - "getTitle": "string title = 7;\n\n:return: (java.lang.String) The title.", - "getTitleBytes": "string title = 7;\n\n:return: (com.google.protobuf.ByteString) The bytes for title.", + "getTitle": "optional string title = 7;\n\n:return: (java.lang.String) The title.", + "getTitleBytes": "optional string title = 7;\n\n:return: (com.google.protobuf.ByteString) The bytes for title.", "getTitleColor": "string title_color = 9;\n\n:return: (java.lang.String) The titleColor.", "getTitleColorBytes": "string title_color = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for titleColor.", "getTitleFont": "string title_font = 8;\n\n:return: (java.lang.String) The titleFont.", "getTitleFontBytes": "string title_font = 8;\n\n:return: (com.google.protobuf.ByteString) The bytes for titleFont.", - "hasTitle": "string title = 7;\n\n:return: (boolean) Whether the title field is set.", + "hasTitle": "optional string title = 7;\n\n:return: (boolean) Whether the title field is set.", "isInitialized": ":return: boolean", "mergeFrom": "*Overload 1* \n :param other: com.google.protobuf.Message\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder\n \n*Overload 2* \n :param other: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder\n \n*Overload 3* \n :param input: com.google.protobuf.CodedInputStream\n :param extensionRegistry: com.google.protobuf.ExtensionRegistryLite\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder", "mergeUnknownFields": ":param unknownFields: com.google.protobuf.UnknownFieldSet\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder", @@ -92,8 +92,8 @@ "setRowspan": "int32 rowspan = 2;\n\n:param value: (int) - The rowspan to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", "setSeries": "repeated .io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor series = 3;\n\n*Overload 1* \n :param index: int\n :param value: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder\n \n*Overload 2* \n :param index: int\n :param builderForValue: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder", "setShowLegend": "bool show_legend = 10;\n\n:param value: (boolean) - The showLegend to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", - "setTitle": "string title = 7;\n\n:param value: (java.lang.String) - The title to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", - "setTitleBytes": "string title = 7;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for title to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", + "setTitle": "optional string title = 7;\n\n:param value: (java.lang.String) - The title to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", + "setTitleBytes": "optional string title = 7;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for title to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", "setTitleColor": "string title_color = 9;\n\n:param value: (java.lang.String) - The titleColor to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", "setTitleColorBytes": "string title_color = 9;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for titleColor to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", "setTitleFont": "string title_font = 8;\n\n:param value: (java.lang.String) - The titleFont to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.Builder) This builder for chaining.", diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/ChartDescriptorOrBuilder.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/ChartDescriptorOrBuilder.json index 46a1d89731b..267b3b6b127 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/ChartDescriptorOrBuilder.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/ChartDescriptorOrBuilder.json @@ -26,13 +26,13 @@ "getSeriesOrBuilder": "repeated .io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor series = 3;\n\n:param index: int\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptorOrBuilder", "getSeriesOrBuilderList": "repeated .io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor series = 3;\n\n:return: java.util.List", "getShowLegend": "bool show_legend = 10;\n\n:return: (boolean) The showLegend.", - "getTitle": "string title = 7;\n\n:return: (java.lang.String) The title.", - "getTitleBytes": "string title = 7;\n\n:return: (com.google.protobuf.ByteString) The bytes for title.", + "getTitle": "optional string title = 7;\n\n:return: (java.lang.String) The title.", + "getTitleBytes": "optional string title = 7;\n\n:return: (com.google.protobuf.ByteString) The bytes for title.", "getTitleColor": "string title_color = 9;\n\n:return: (java.lang.String) The titleColor.", "getTitleColorBytes": "string title_color = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for titleColor.", "getTitleFont": "string title_font = 8;\n\n:return: (java.lang.String) The titleFont.", "getTitleFontBytes": "string title_font = 8;\n\n:return: (com.google.protobuf.ByteString) The bytes for titleFont.", - "hasTitle": "string title = 7;\n\n:return: (boolean) Whether the title field is set." + "hasTitle": "optional string title = 7;\n\n:return: (boolean) Whether the title field is set." }, "path": "io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptorOrBuilder", "typeName": "interface" diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/SeriesDescriptor.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/SeriesDescriptor.json index d8d93454bd0..a52a85edf67 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/SeriesDescriptor.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/SeriesDescriptor.json @@ -13,14 +13,14 @@ "getGradientVisible": "bool gradient_visible = 5;\n\n:return: (boolean) The gradientVisible.", "getLineColor": "string line_color = 6;\n\n:return: (java.lang.String) The lineColor.", "getLineColorBytes": "string line_color = 6;\n\n:return: (com.google.protobuf.ByteString) The bytes for lineColor.", - "getLinesVisible": "bool lines_visible = 3;\n\n:return: (boolean) The linesVisible.", + "getLinesVisible": "optional bool lines_visible = 3;\n\n:return: (boolean) The linesVisible.", "getName": "string name = 2;\n\n:return: (java.lang.String) The name.", "getNameBytes": "string name = 2;\n\n:return: (com.google.protobuf.ByteString) The bytes for name.", "getParserForType": ":return: com.google.protobuf.Parser", "getPlotStyle": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle plot_style = 1;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle) The plotStyle.", "getPlotStyleValue": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle plot_style = 1;\n\n:return: (int) The enum numeric value on the wire for plotStyle.", - "getPointLabelFormat": "string point_label_format = 8;\n\n:return: (java.lang.String) The pointLabelFormat.", - "getPointLabelFormatBytes": "string point_label_format = 8;\n\n:return: (com.google.protobuf.ByteString) The bytes for pointLabelFormat.", + "getPointLabelFormat": "optional string point_label_format = 8;\n\n:return: (java.lang.String) The pointLabelFormat.", + "getPointLabelFormatBytes": "optional string point_label_format = 8;\n\n:return: (com.google.protobuf.ByteString) The bytes for pointLabelFormat.", "getSerializedSize": ":return: int", "getShape": "string shape = 14;\n\n:return: (java.lang.String) The shape.", "getShapeBytes": "string shape = 14;\n\n:return: (com.google.protobuf.ByteString) The bytes for shape.", @@ -28,19 +28,19 @@ "getShapeColorBytes": "string shape_color = 13;\n\n:return: (com.google.protobuf.ByteString) The bytes for shapeColor.", "getShapeLabel": "string shape_label = 11;\n\n:return: (java.lang.String) The shapeLabel.", "getShapeLabelBytes": "string shape_label = 11;\n\n:return: (com.google.protobuf.ByteString) The bytes for shapeLabel.", - "getShapeSize": "double shape_size = 12;\n\n:return: (double) The shapeSize.", - "getShapesVisible": "bool shapes_visible = 4;\n\n:return: (boolean) The shapesVisible.", + "getShapeSize": "optional double shape_size = 12;\n\n:return: (double) The shapeSize.", + "getShapesVisible": "optional bool shapes_visible = 4;\n\n:return: (boolean) The shapesVisible.", "getUnknownFields": ":return: com.google.protobuf.UnknownFieldSet", - "getXToolTipPattern": "string x_tool_tip_pattern = 9;\n\n:return: (java.lang.String) The xToolTipPattern.", - "getXToolTipPatternBytes": "string x_tool_tip_pattern = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for xToolTipPattern.", - "getYToolTipPattern": "string y_tool_tip_pattern = 10;\n\n:return: (java.lang.String) The yToolTipPattern.", - "getYToolTipPatternBytes": "string y_tool_tip_pattern = 10;\n\n:return: (com.google.protobuf.ByteString) The bytes for yToolTipPattern.", - "hasLinesVisible": "bool lines_visible = 3;\n\n:return: (boolean) Whether the linesVisible field is set.", - "hasPointLabelFormat": "string point_label_format = 8;\n\n:return: (boolean) Whether the pointLabelFormat field is set.", - "hasShapeSize": "double shape_size = 12;\n\n:return: (boolean) Whether the shapeSize field is set.", - "hasShapesVisible": "bool shapes_visible = 4;\n\n:return: (boolean) Whether the shapesVisible field is set.", - "hasXToolTipPattern": "string x_tool_tip_pattern = 9;\n\n:return: (boolean) Whether the xToolTipPattern field is set.", - "hasYToolTipPattern": "string y_tool_tip_pattern = 10;\n\n:return: (boolean) Whether the yToolTipPattern field is set.", + "getXToolTipPattern": "optional string x_tool_tip_pattern = 9;\n\n:return: (java.lang.String) The xToolTipPattern.", + "getXToolTipPatternBytes": "optional string x_tool_tip_pattern = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for xToolTipPattern.", + "getYToolTipPattern": "optional string y_tool_tip_pattern = 10;\n\n:return: (java.lang.String) The yToolTipPattern.", + "getYToolTipPatternBytes": "optional string y_tool_tip_pattern = 10;\n\n:return: (com.google.protobuf.ByteString) The bytes for yToolTipPattern.", + "hasLinesVisible": "optional bool lines_visible = 3;\n\n:return: (boolean) Whether the linesVisible field is set.", + "hasPointLabelFormat": "optional string point_label_format = 8;\n\n:return: (boolean) Whether the pointLabelFormat field is set.", + "hasShapeSize": "optional double shape_size = 12;\n\n:return: (boolean) Whether the shapeSize field is set.", + "hasShapesVisible": "optional bool shapes_visible = 4;\n\n:return: (boolean) Whether the shapesVisible field is set.", + "hasXToolTipPattern": "optional string x_tool_tip_pattern = 9;\n\n:return: (boolean) Whether the xToolTipPattern field is set.", + "hasYToolTipPattern": "optional string y_tool_tip_pattern = 10;\n\n:return: (boolean) Whether the yToolTipPattern field is set.", "hashCode": ":return: int", "isInitialized": ":return: boolean", "newBuilder": "*Overload 1* \n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder\n \n*Overload 2* \n :param prototype: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder", diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/SeriesDescriptor/Builder.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/SeriesDescriptor/Builder.json index 7c762a5c9ca..0eae3f40873 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/SeriesDescriptor/Builder.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/SeriesDescriptor/Builder.json @@ -12,18 +12,18 @@ "clearField": ":param field: com.google.protobuf.Descriptors.FieldDescriptor\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder", "clearGradientVisible": "bool gradient_visible = 5;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "clearLineColor": "string line_color = 6;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "clearLinesVisible": "bool lines_visible = 3;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "clearLinesVisible": "optional bool lines_visible = 3;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "clearName": "string name = 2;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "clearOneof": ":param oneof: com.google.protobuf.Descriptors.OneofDescriptor\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder", "clearPlotStyle": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle plot_style = 1;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "clearPointLabelFormat": "string point_label_format = 8;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "clearPointLabelFormat": "optional string point_label_format = 8;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "clearShape": "string shape = 14;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "clearShapeColor": "string shape_color = 13;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "clearShapeLabel": "string shape_label = 11;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "clearShapeSize": "double shape_size = 12;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "clearShapesVisible": "bool shapes_visible = 4;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "clearXToolTipPattern": "string x_tool_tip_pattern = 9;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "clearYToolTipPattern": "string y_tool_tip_pattern = 10;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "clearShapeSize": "optional double shape_size = 12;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "clearShapesVisible": "optional bool shapes_visible = 4;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "clearXToolTipPattern": "optional string x_tool_tip_pattern = 9;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "clearYToolTipPattern": "optional string y_tool_tip_pattern = 10;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "clone": ":return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder", "getDataSources": "repeated .io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptor data_sources = 15;\n\n:param index: int\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptor", "getDataSourcesBuilder": "repeated .io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptor data_sources = 15;\n\n:param index: int\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptor.Builder", @@ -38,31 +38,31 @@ "getGradientVisible": "bool gradient_visible = 5;\n\n:return: (boolean) The gradientVisible.", "getLineColor": "string line_color = 6;\n\n:return: (java.lang.String) The lineColor.", "getLineColorBytes": "string line_color = 6;\n\n:return: (com.google.protobuf.ByteString) The bytes for lineColor.", - "getLinesVisible": "bool lines_visible = 3;\n\n:return: (boolean) The linesVisible.", + "getLinesVisible": "optional bool lines_visible = 3;\n\n:return: (boolean) The linesVisible.", "getName": "string name = 2;\n\n:return: (java.lang.String) The name.", "getNameBytes": "string name = 2;\n\n:return: (com.google.protobuf.ByteString) The bytes for name.", "getPlotStyle": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle plot_style = 1;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle) The plotStyle.", "getPlotStyleValue": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle plot_style = 1;\n\n:return: (int) The enum numeric value on the wire for plotStyle.", - "getPointLabelFormat": "string point_label_format = 8;\n\n:return: (java.lang.String) The pointLabelFormat.", - "getPointLabelFormatBytes": "string point_label_format = 8;\n\n:return: (com.google.protobuf.ByteString) The bytes for pointLabelFormat.", + "getPointLabelFormat": "optional string point_label_format = 8;\n\n:return: (java.lang.String) The pointLabelFormat.", + "getPointLabelFormatBytes": "optional string point_label_format = 8;\n\n:return: (com.google.protobuf.ByteString) The bytes for pointLabelFormat.", "getShape": "string shape = 14;\n\n:return: (java.lang.String) The shape.", "getShapeBytes": "string shape = 14;\n\n:return: (com.google.protobuf.ByteString) The bytes for shape.", "getShapeColor": "string shape_color = 13;\n\n:return: (java.lang.String) The shapeColor.", "getShapeColorBytes": "string shape_color = 13;\n\n:return: (com.google.protobuf.ByteString) The bytes for shapeColor.", "getShapeLabel": "string shape_label = 11;\n\n:return: (java.lang.String) The shapeLabel.", "getShapeLabelBytes": "string shape_label = 11;\n\n:return: (com.google.protobuf.ByteString) The bytes for shapeLabel.", - "getShapeSize": "double shape_size = 12;\n\n:return: (double) The shapeSize.", - "getShapesVisible": "bool shapes_visible = 4;\n\n:return: (boolean) The shapesVisible.", - "getXToolTipPattern": "string x_tool_tip_pattern = 9;\n\n:return: (java.lang.String) The xToolTipPattern.", - "getXToolTipPatternBytes": "string x_tool_tip_pattern = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for xToolTipPattern.", - "getYToolTipPattern": "string y_tool_tip_pattern = 10;\n\n:return: (java.lang.String) The yToolTipPattern.", - "getYToolTipPatternBytes": "string y_tool_tip_pattern = 10;\n\n:return: (com.google.protobuf.ByteString) The bytes for yToolTipPattern.", - "hasLinesVisible": "bool lines_visible = 3;\n\n:return: (boolean) Whether the linesVisible field is set.", - "hasPointLabelFormat": "string point_label_format = 8;\n\n:return: (boolean) Whether the pointLabelFormat field is set.", - "hasShapeSize": "double shape_size = 12;\n\n:return: (boolean) Whether the shapeSize field is set.", - "hasShapesVisible": "bool shapes_visible = 4;\n\n:return: (boolean) Whether the shapesVisible field is set.", - "hasXToolTipPattern": "string x_tool_tip_pattern = 9;\n\n:return: (boolean) Whether the xToolTipPattern field is set.", - "hasYToolTipPattern": "string y_tool_tip_pattern = 10;\n\n:return: (boolean) Whether the yToolTipPattern field is set.", + "getShapeSize": "optional double shape_size = 12;\n\n:return: (double) The shapeSize.", + "getShapesVisible": "optional bool shapes_visible = 4;\n\n:return: (boolean) The shapesVisible.", + "getXToolTipPattern": "optional string x_tool_tip_pattern = 9;\n\n:return: (java.lang.String) The xToolTipPattern.", + "getXToolTipPatternBytes": "optional string x_tool_tip_pattern = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for xToolTipPattern.", + "getYToolTipPattern": "optional string y_tool_tip_pattern = 10;\n\n:return: (java.lang.String) The yToolTipPattern.", + "getYToolTipPatternBytes": "optional string y_tool_tip_pattern = 10;\n\n:return: (com.google.protobuf.ByteString) The bytes for yToolTipPattern.", + "hasLinesVisible": "optional bool lines_visible = 3;\n\n:return: (boolean) Whether the linesVisible field is set.", + "hasPointLabelFormat": "optional string point_label_format = 8;\n\n:return: (boolean) Whether the pointLabelFormat field is set.", + "hasShapeSize": "optional double shape_size = 12;\n\n:return: (boolean) Whether the shapeSize field is set.", + "hasShapesVisible": "optional bool shapes_visible = 4;\n\n:return: (boolean) Whether the shapesVisible field is set.", + "hasXToolTipPattern": "optional string x_tool_tip_pattern = 9;\n\n:return: (boolean) Whether the xToolTipPattern field is set.", + "hasYToolTipPattern": "optional string y_tool_tip_pattern = 10;\n\n:return: (boolean) Whether the yToolTipPattern field is set.", "isInitialized": ":return: boolean", "mergeFrom": "*Overload 1* \n :param other: com.google.protobuf.Message\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder\n \n*Overload 2* \n :param other: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder\n \n*Overload 3* \n :param input: com.google.protobuf.CodedInputStream\n :param extensionRegistry: com.google.protobuf.ExtensionRegistryLite\n :return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder", "mergeUnknownFields": ":param unknownFields: com.google.protobuf.UnknownFieldSet\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder", @@ -72,13 +72,13 @@ "setGradientVisible": "bool gradient_visible = 5;\n\n:param value: (boolean) - The gradientVisible to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "setLineColor": "string line_color = 6;\n\n:param value: (java.lang.String) - The lineColor to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "setLineColorBytes": "string line_color = 6;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for lineColor to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "setLinesVisible": "bool lines_visible = 3;\n\n:param value: (boolean) - The linesVisible to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "setLinesVisible": "optional bool lines_visible = 3;\n\n:param value: (boolean) - The linesVisible to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "setName": "string name = 2;\n\n:param value: (java.lang.String) - The name to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "setNameBytes": "string name = 2;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for name to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "setPlotStyle": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle plot_style = 1;\n\n:param value: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle) - The plotStyle to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "setPlotStyleValue": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle plot_style = 1;\n\n:param value: (int) - The enum numeric value on the wire for plotStyle to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "setPointLabelFormat": "string point_label_format = 8;\n\n:param value: (java.lang.String) - The pointLabelFormat to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "setPointLabelFormatBytes": "string point_label_format = 8;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for pointLabelFormat to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "setPointLabelFormat": "optional string point_label_format = 8;\n\n:param value: (java.lang.String) - The pointLabelFormat to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "setPointLabelFormatBytes": "optional string point_label_format = 8;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for pointLabelFormat to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "setRepeatedField": ":param field: com.google.protobuf.Descriptors.FieldDescriptor\n:param index: int\n:param value: java.lang.Object\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder", "setShape": "string shape = 14;\n\n:param value: (java.lang.String) - The shape to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "setShapeBytes": "string shape = 14;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for shape to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", @@ -86,13 +86,13 @@ "setShapeColorBytes": "string shape_color = 13;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for shapeColor to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "setShapeLabel": "string shape_label = 11;\n\n:param value: (java.lang.String) - The shapeLabel to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "setShapeLabelBytes": "string shape_label = 11;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for shapeLabel to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "setShapeSize": "double shape_size = 12;\n\n:param value: (double) - The shapeSize to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "setShapesVisible": "bool shapes_visible = 4;\n\n:param value: (boolean) - The shapesVisible to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "setShapeSize": "optional double shape_size = 12;\n\n:param value: (double) - The shapeSize to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "setShapesVisible": "optional bool shapes_visible = 4;\n\n:param value: (boolean) - The shapesVisible to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", "setUnknownFields": ":param unknownFields: com.google.protobuf.UnknownFieldSet\n:return: io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder", - "setXToolTipPattern": "string x_tool_tip_pattern = 9;\n\n:param value: (java.lang.String) - The xToolTipPattern to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "setXToolTipPatternBytes": "string x_tool_tip_pattern = 9;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for xToolTipPattern to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "setYToolTipPattern": "string y_tool_tip_pattern = 10;\n\n:param value: (java.lang.String) - The yToolTipPattern to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", - "setYToolTipPatternBytes": "string y_tool_tip_pattern = 10;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for yToolTipPattern to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining." + "setXToolTipPattern": "optional string x_tool_tip_pattern = 9;\n\n:param value: (java.lang.String) - The xToolTipPattern to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "setXToolTipPatternBytes": "optional string x_tool_tip_pattern = 9;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for xToolTipPattern to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "setYToolTipPattern": "optional string y_tool_tip_pattern = 10;\n\n:param value: (java.lang.String) - The yToolTipPattern to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining.", + "setYToolTipPatternBytes": "optional string y_tool_tip_pattern = 10;\n\n:param value: (com.google.protobuf.ByteString) - The bytes for yToolTipPattern to set.\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder) This builder for chaining." }, "path": "io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.Builder", "text": "Protobuf type io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor", diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/SeriesDescriptorOrBuilder.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/SeriesDescriptorOrBuilder.json index 0772c203c78..3d346dddd72 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/SeriesDescriptorOrBuilder.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptor/SeriesDescriptorOrBuilder.json @@ -9,31 +9,31 @@ "getGradientVisible": "bool gradient_visible = 5;\n\n:return: (boolean) The gradientVisible.", "getLineColor": "string line_color = 6;\n\n:return: (java.lang.String) The lineColor.", "getLineColorBytes": "string line_color = 6;\n\n:return: (com.google.protobuf.ByteString) The bytes for lineColor.", - "getLinesVisible": "bool lines_visible = 3;\n\n:return: (boolean) The linesVisible.", + "getLinesVisible": "optional bool lines_visible = 3;\n\n:return: (boolean) The linesVisible.", "getName": "string name = 2;\n\n:return: (java.lang.String) The name.", "getNameBytes": "string name = 2;\n\n:return: (com.google.protobuf.ByteString) The bytes for name.", "getPlotStyle": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle plot_style = 1;\n\n:return: (io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle) The plotStyle.", "getPlotStyleValue": ".io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle plot_style = 1;\n\n:return: (int) The enum numeric value on the wire for plotStyle.", - "getPointLabelFormat": "string point_label_format = 8;\n\n:return: (java.lang.String) The pointLabelFormat.", - "getPointLabelFormatBytes": "string point_label_format = 8;\n\n:return: (com.google.protobuf.ByteString) The bytes for pointLabelFormat.", + "getPointLabelFormat": "optional string point_label_format = 8;\n\n:return: (java.lang.String) The pointLabelFormat.", + "getPointLabelFormatBytes": "optional string point_label_format = 8;\n\n:return: (com.google.protobuf.ByteString) The bytes for pointLabelFormat.", "getShape": "string shape = 14;\n\n:return: (java.lang.String) The shape.", "getShapeBytes": "string shape = 14;\n\n:return: (com.google.protobuf.ByteString) The bytes for shape.", "getShapeColor": "string shape_color = 13;\n\n:return: (java.lang.String) The shapeColor.", "getShapeColorBytes": "string shape_color = 13;\n\n:return: (com.google.protobuf.ByteString) The bytes for shapeColor.", "getShapeLabel": "string shape_label = 11;\n\n:return: (java.lang.String) The shapeLabel.", "getShapeLabelBytes": "string shape_label = 11;\n\n:return: (com.google.protobuf.ByteString) The bytes for shapeLabel.", - "getShapeSize": "double shape_size = 12;\n\n:return: (double) The shapeSize.", - "getShapesVisible": "bool shapes_visible = 4;\n\n:return: (boolean) The shapesVisible.", - "getXToolTipPattern": "string x_tool_tip_pattern = 9;\n\n:return: (java.lang.String) The xToolTipPattern.", - "getXToolTipPatternBytes": "string x_tool_tip_pattern = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for xToolTipPattern.", - "getYToolTipPattern": "string y_tool_tip_pattern = 10;\n\n:return: (java.lang.String) The yToolTipPattern.", - "getYToolTipPatternBytes": "string y_tool_tip_pattern = 10;\n\n:return: (com.google.protobuf.ByteString) The bytes for yToolTipPattern.", - "hasLinesVisible": "bool lines_visible = 3;\n\n:return: (boolean) Whether the linesVisible field is set.", - "hasPointLabelFormat": "string point_label_format = 8;\n\n:return: (boolean) Whether the pointLabelFormat field is set.", - "hasShapeSize": "double shape_size = 12;\n\n:return: (boolean) Whether the shapeSize field is set.", - "hasShapesVisible": "bool shapes_visible = 4;\n\n:return: (boolean) Whether the shapesVisible field is set.", - "hasXToolTipPattern": "string x_tool_tip_pattern = 9;\n\n:return: (boolean) Whether the xToolTipPattern field is set.", - "hasYToolTipPattern": "string y_tool_tip_pattern = 10;\n\n:return: (boolean) Whether the yToolTipPattern field is set." + "getShapeSize": "optional double shape_size = 12;\n\n:return: (double) The shapeSize.", + "getShapesVisible": "optional bool shapes_visible = 4;\n\n:return: (boolean) The shapesVisible.", + "getXToolTipPattern": "optional string x_tool_tip_pattern = 9;\n\n:return: (java.lang.String) The xToolTipPattern.", + "getXToolTipPatternBytes": "optional string x_tool_tip_pattern = 9;\n\n:return: (com.google.protobuf.ByteString) The bytes for xToolTipPattern.", + "getYToolTipPattern": "optional string y_tool_tip_pattern = 10;\n\n:return: (java.lang.String) The yToolTipPattern.", + "getYToolTipPatternBytes": "optional string y_tool_tip_pattern = 10;\n\n:return: (com.google.protobuf.ByteString) The bytes for yToolTipPattern.", + "hasLinesVisible": "optional bool lines_visible = 3;\n\n:return: (boolean) Whether the linesVisible field is set.", + "hasPointLabelFormat": "optional string point_label_format = 8;\n\n:return: (boolean) Whether the pointLabelFormat field is set.", + "hasShapeSize": "optional double shape_size = 12;\n\n:return: (boolean) Whether the shapeSize field is set.", + "hasShapesVisible": "optional bool shapes_visible = 4;\n\n:return: (boolean) Whether the shapesVisible field is set.", + "hasXToolTipPattern": "optional string x_tool_tip_pattern = 9;\n\n:return: (boolean) Whether the xToolTipPattern field is set.", + "hasYToolTipPattern": "optional string y_tool_tip_pattern = 10;\n\n:return: (boolean) Whether the yToolTipPattern field is set." }, "path": "io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptorOrBuilder", "typeName": "interface" diff --git a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptorOrBuilder.json b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptorOrBuilder.json index 82497c3e7b2..c56ed31bf92 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptorOrBuilder.json +++ b/Integrations/python/deephaven/doc/io/deephaven/proto/backplane/script/grpc/FigureDescriptorOrBuilder.json @@ -17,14 +17,14 @@ "getTablesList": "repeated .io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse tables = 11;\n\n:return: java.util.List", "getTablesOrBuilder": "repeated .io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse tables = 11;\n\n:param index: int\n:return: io.deephaven.proto.backplane.grpc.ExportedTableCreationResponseOrBuilder", "getTablesOrBuilderList": "repeated .io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse tables = 11;\n\n:return: java.util.List", - "getTitle": "string title = 1;\n\n:return: (java.lang.String) The title.", - "getTitleBytes": "string title = 1;\n\n:return: (com.google.protobuf.ByteString) The bytes for title.", + "getTitle": "optional string title = 1;\n\n:return: (java.lang.String) The title.", + "getTitleBytes": "optional string title = 1;\n\n:return: (com.google.protobuf.ByteString) The bytes for title.", "getTitleColor": "string title_color = 3;\n\n:return: (java.lang.String) The titleColor.", "getTitleColorBytes": "string title_color = 3;\n\n:return: (com.google.protobuf.ByteString) The bytes for titleColor.", "getTitleFont": "string title_font = 2;\n\n:return: (java.lang.String) The titleFont.", "getTitleFontBytes": "string title_font = 2;\n\n:return: (com.google.protobuf.ByteString) The bytes for titleFont.", "getUpdateInterval": "int64 update_interval = 7;\n\n:return: (long) The updateInterval.", - "hasTitle": "string title = 1;\n\n:return: (boolean) Whether the title field is set." + "hasTitle": "optional string title = 1;\n\n:return: (boolean) Whether the title field is set." }, "path": "io.deephaven.proto.backplane.script.grpc.FigureDescriptorOrBuilder", "typeName": "interface" diff --git a/Integrations/python/deephaven/doc/io/deephaven/qst/table/FullAggregationTable.json b/Integrations/python/deephaven/doc/io/deephaven/qst/table/FullAggregationTable.json deleted file mode 100644 index 32c304f55a6..00000000000 --- a/Integrations/python/deephaven/doc/io/deephaven/qst/table/FullAggregationTable.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "className": "io.deephaven.qst.table.FullAggregationTable", - "methods": { - "builder": ":return: io.deephaven.qst.table.FullAggregationTable.Builder", - "key": ":return: io.deephaven.api.agg.key.Key", - "walk": "Note: Java generics information - \n\n:param visitor: V\n:return: V" - }, - "path": "io.deephaven.qst.table.FullAggregationTable", - "typeName": "class" -} \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/qst/table/FullAggregationTable/Builder.json b/Integrations/python/deephaven/doc/io/deephaven/qst/table/FullAggregationTable/Builder.json deleted file mode 100644 index 535d4a7b1f3..00000000000 --- a/Integrations/python/deephaven/doc/io/deephaven/qst/table/FullAggregationTable/Builder.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "className": "io.deephaven.qst.table.FullAggregationTable$Builder", - "methods": { - "addAllColumns": ":param elements: java.lang.Iterable\n:return: io.deephaven.qst.table.FullAggregationTable.Builder", - "addColumns": "*Overload 1* \n :param element: io.deephaven.api.Selectable\n :return: io.deephaven.qst.table.FullAggregationTable.Builder\n \n*Overload 2* \n :param elements: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.FullAggregationTable.Builder", - "build": ":return: io.deephaven.qst.table.FullAggregationTable", - "key": ":param key: io.deephaven.api.agg.key.Key\n:return: io.deephaven.qst.table.FullAggregationTable.Builder", - "parent": ":param parent: io.deephaven.qst.table.TableSpec\n:return: io.deephaven.qst.table.FullAggregationTable.Builder" - }, - "path": "io.deephaven.qst.table.FullAggregationTable.Builder", - "typeName": "interface" -} \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/qst/table/GroupByTable.json b/Integrations/python/deephaven/doc/io/deephaven/qst/table/GroupByTable.json deleted file mode 100644 index a7fc877e9be..00000000000 --- a/Integrations/python/deephaven/doc/io/deephaven/qst/table/GroupByTable.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "className": "io.deephaven.qst.table.GroupByTable", - "methods": { - "builder": ":return: io.deephaven.qst.table.GroupByTable.Builder", - "columns": ":return: java.util.List", - "parent": ":return: io.deephaven.qst.table.TableSpec", - "walk": "Note: Java generics information - \n\n:param visitor: V\n:return: V" - }, - "path": "io.deephaven.qst.table.GroupByTable", - "typeName": "class" -} \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/qst/table/GroupByTable/Builder.json b/Integrations/python/deephaven/doc/io/deephaven/qst/table/GroupByTable/Builder.json deleted file mode 100644 index a79d2f94790..00000000000 --- a/Integrations/python/deephaven/doc/io/deephaven/qst/table/GroupByTable/Builder.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "className": "io.deephaven.qst.table.GroupByTable$Builder", - "methods": { - "addAllColumns": ":param elements: java.lang.Iterable\n:return: io.deephaven.qst.table.GroupByTable.Builder", - "addColumns": "*Overload 1* \n :param element: io.deephaven.api.Selectable\n :return: io.deephaven.qst.table.GroupByTable.Builder\n \n*Overload 2* \n :param elements: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.GroupByTable.Builder", - "build": ":return: io.deephaven.qst.table.GroupByTable", - "parent": ":param parent: io.deephaven.qst.table.TableSpec\n:return: io.deephaven.qst.table.GroupByTable.Builder" - }, - "path": "io.deephaven.qst.table.GroupByTable.Builder", - "typeName": "interface" -} \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/qst/table/SingleAggregationTable.json b/Integrations/python/deephaven/doc/io/deephaven/qst/table/SingleAggregationTable.json deleted file mode 100644 index 922997dff05..00000000000 --- a/Integrations/python/deephaven/doc/io/deephaven/qst/table/SingleAggregationTable.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "className": "io.deephaven.qst.table.SingleAggregationTable", - "methods": { - "builder": ":return: io.deephaven.qst.table.SingleAggregationTable.Builder", - "key": ":return: io.deephaven.api.agg.key.Key", - "walk": "Note: Java generics information - \n\n:param visitor: V\n:return: V" - }, - "path": "io.deephaven.qst.table.SingleAggregationTable", - "typeName": "class" -} \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/qst/table/SingleAggregationTable/Builder.json b/Integrations/python/deephaven/doc/io/deephaven/qst/table/SingleAggregationTable/Builder.json deleted file mode 100644 index cfa1548f4df..00000000000 --- a/Integrations/python/deephaven/doc/io/deephaven/qst/table/SingleAggregationTable/Builder.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "className": "io.deephaven.qst.table.SingleAggregationTable$Builder", - "methods": { - "addAllColumns": ":param elements: java.lang.Iterable\n:return: io.deephaven.qst.table.SingleAggregationTable.Builder", - "addColumns": "*Overload 1* \n :param element: io.deephaven.api.Selectable\n :return: io.deephaven.qst.table.SingleAggregationTable.Builder\n \n*Overload 2* \n :param elements: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.SingleAggregationTable.Builder", - "build": ":return: io.deephaven.qst.table.SingleAggregationTable", - "key": ":param key: io.deephaven.api.agg.key.Key\n:return: io.deephaven.qst.table.SingleAggregationTable.Builder", - "parent": ":param parent: io.deephaven.qst.table.TableSpec\n:return: io.deephaven.qst.table.SingleAggregationTable.Builder" - }, - "path": "io.deephaven.qst.table.SingleAggregationTable.Builder", - "typeName": "interface" -} \ No newline at end of file diff --git a/Integrations/python/deephaven/doc/io/deephaven/qst/table/TableBase.json b/Integrations/python/deephaven/doc/io/deephaven/qst/table/TableBase.json index 6742cbfa167..ff83031b683 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/qst/table/TableBase.json +++ b/Integrations/python/deephaven/doc/io/deephaven/qst/table/TableBase.json @@ -1,15 +1,22 @@ { "className": "io.deephaven.qst.table.TableBase", "methods": { + "absSumBy": "*Overload 1* \n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable", "aggAllBy": "*Overload 1* \n :param spec: io.deephaven.api.agg.spec.AggSpec\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param spec: io.deephaven.api.agg.spec.AggSpec\n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param spec: io.deephaven.api.agg.spec.AggSpec\n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 4* \n :param spec: io.deephaven.api.agg.spec.AggSpec\n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable", "aggBy": "*Overload 1* \n :param aggregation: io.deephaven.api.agg.Aggregation\n :return: io.deephaven.qst.table.AggregationTable\n \n*Overload 2* \n :param aggregation: io.deephaven.api.agg.Aggregation\n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregationTable\n \n*Overload 3* \n :param aggregation: io.deephaven.api.agg.Aggregation\n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregationTable\n \n*Overload 4* \n :param aggregations: java.util.Collection\n :return: io.deephaven.qst.table.AggregationTable\n \n*Overload 5* \n :param aggregations: java.util.Collection\n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregationTable\n \n*Overload 6* \n :param aggregations: java.util.Collection\n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregationTable", "aj": "**Incompatible overloads text - text from the first overload:**\n\nPerform an as-of join with the rightTable.\n\n \n Delegates to TableOperations.aj(Object, Collection, Collection, AsOfJoinRule).\n\n*Overload 1* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\").\n :return: (io.deephaven.qst.table.AsOfJoinTable) a new table joined according to the specification in columnsToMatch\n \n*Overload 2* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\").\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the left side that need to be added to the right\n side as a result of the match.\n :return: (io.deephaven.qst.table.AsOfJoinTable) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 3* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (io.deephaven.qst.table.AsOfJoinTable) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 4* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :param asOfJoinRule: (io.deephaven.api.AsOfJoinRule) - The binary search operator for the last match pair.\n :return: (io.deephaven.qst.table.AsOfJoinTable) a new table joined according to the specification in columnsToMatch and columnsToAdd", + "avgBy": "*Overload 1* \n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable", "countBy": "*Overload 1* \n :param countColumnName: java.lang.String\n :return: io.deephaven.qst.table.CountByTable\n \n*Overload 2* \n :param countColumnName: java.lang.String\n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.CountByTable\n \n*Overload 3* \n :param countColumnName: java.lang.String\n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.CountByTable\n \n*Overload 4* \n :param countColumnName: java.lang.String\n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.CountByTable", "exactJoin": "**Incompatible overloads text - text from the first overload:**\n\nPerform an exact-join with the rightTable.\n\n \n Delegates to TableOperations.exactJoin(Object, Collection, Collection).\n\n*Overload 1* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (io.deephaven.qst.table.ExactJoinTable) the exact-joined table\n \n*Overload 2* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :return: (io.deephaven.qst.table.ExactJoinTable) the exact-joined table\n \n*Overload 3* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the right side that need to be added to the left\n side as a result of the match.\n :return: (io.deephaven.qst.table.ExactJoinTable) the exact-joined table", + "firstBy": "*Overload 1* \n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable", "groupBy": "*Overload 1* \n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable", "head": ":param size: long\n:return: io.deephaven.qst.table.HeadTable", "join": "**Incompatible overloads text - text from the first overload:**\n\nPerform a cross join with the rightTable.\n\n \n Delegates to TableOperations.join(Object, Collection, Collection, int).\n\n*Overload 1* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (io.deephaven.qst.table.JoinTable) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 2* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :param reserveBits: (int) - The number of bits to reserve for rightTable groups.\n :return: (io.deephaven.qst.table.JoinTable) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 3* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :return: (io.deephaven.qst.table.JoinTable) a new table joined according to the specification in columnsToMatch and includes all non-key-columns from\n the right table\n \n*Overload 4* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the right side that need to be added to the left\n side as a result of the match.\n :return: (io.deephaven.qst.table.JoinTable) a new table joined according to the specification in columnsToMatch and columnsToAdd", + "lastBy": "*Overload 1* \n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable", "logic": ":return: io.deephaven.qst.TableCreationLogic", + "maxBy": "*Overload 1* \n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable", + "medianBy": "*Overload 1* \n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable", + "minBy": "*Overload 1* \n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable", "naturalJoin": "**Incompatible overloads text - text from the first overload:**\n\nPerform a natural-join with the rightTable.\n\n \n Delegates to TableOperations.naturalJoin(Object, Collection, Collection).\n\n*Overload 1* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (io.deephaven.qst.table.NaturalJoinTable) the natural-joined table\n \n*Overload 2* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :return: (io.deephaven.qst.table.NaturalJoinTable) the natural-joined table\n \n*Overload 3* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\")\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the right side that need to be added to the left\n side as a result of the match.\n :return: (io.deephaven.qst.table.NaturalJoinTable) the natural-joined table", "raj": "**Incompatible overloads text - text from the first overload:**\n\nPerform a reverse-as-of join with the rightTable.\n\n \n Delegates to TableOperations.raj(Object, Collection, Collection, ReverseAsOfJoinRule).\n\n*Overload 1* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\").\n :return: (io.deephaven.qst.table.ReverseAsOfJoinTable) a new table joined according to the specification in columnsToMatch\n \n*Overload 2* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.lang.String) - A comma separated list of match conditions (\"leftColumn=rightColumn\" or\n \"columnFoundInBoth\").\n :param columnsToAdd: (java.lang.String) - A comma separated list with the columns from the left side that need to be added to the right\n side as a result of the match.\n :return: (io.deephaven.qst.table.ReverseAsOfJoinTable) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 3* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :return: (io.deephaven.qst.table.ReverseAsOfJoinTable) a new table joined according to the specification in columnsToMatch and columnsToAdd\n \n*Overload 4* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - The right side table on the join.\n :param columnsToMatch: (java.util.Collection) - The match pair conditions.\n :param columnsToAdd: (java.util.Collection) - The columns from the right side that need to be added to the left side as a result of the\n match.\n :param reverseAsOfJoinRule: (io.deephaven.api.ReverseAsOfJoinRule) - The binary search operator for the last match pair.\n :return: (io.deephaven.qst.table.ReverseAsOfJoinTable) a new table joined according to the specification in columnsToMatch and columnsToAdd", "reverse": ":return: io.deephaven.qst.table.ReverseTable", @@ -18,15 +25,20 @@ "snapshot": "**Incompatible overloads text - text from the first overload:**\n\nSnapshot baseTable, triggered by this table, and return a new table as a result.\n\n \n Delegates to TableOperations.snapshot(Object, boolean, Collection).\n\n*Overload 1* \n :param baseTable: (io.deephaven.qst.table.TableSpec) - The table to be snapshotted\n :param stampColumns: (java.lang.String...) - The columns forming the \"snapshot key\", i.e. some subset of this Table's columns to be\n included in the result at snapshot time. As a special case, an empty stampColumns is taken to mean\n \"include all columns\".\n :return: (io.deephaven.qst.table.SnapshotTable) The result table\n \n*Overload 2* \n :param baseTable: (io.deephaven.qst.table.TableSpec) - The table to be snapshotted\n :param doInitialSnapshot: (boolean) - Take the first snapshot now (otherwise wait for a change event)\n :param stampColumns: (java.lang.String...) - The columns forming the \"snapshot key\", i.e. some subset of this Table's columns to be\n included in the result at snapshot time. As a special case, an empty stampColumns is taken to mean\n \"include all columns\".\n :return: (io.deephaven.qst.table.SnapshotTable) The result table\n \n*Overload 3* \n :param baseTable: (io.deephaven.qst.table.TableSpec) - The table to be snapshotted\n :param doInitialSnapshot: (boolean) - Take the first snapshot now (otherwise wait for a change event)\n :param stampColumns: (java.util.Collection) - The columns forming the \"snapshot key\", i.e. some subset of this Table's columns to be\n included in the result at snapshot time. As a special case, an empty stampColumns is taken to mean\n \"include all columns\".\n :return: (io.deephaven.qst.table.SnapshotTable) The result table", "sort": "*Overload 1* \n :param columnsToSortBy: java.lang.String...\n :return: io.deephaven.qst.table.SortTable\n \n*Overload 2* \n :param columnsToSortBy: java.util.Collection\n :return: io.deephaven.qst.table.SortTable", "sortDescending": ":param columnsToSortBy: java.lang.String...\n:return: io.deephaven.qst.table.SortTable", + "stdBy": "*Overload 1* \n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable", + "sumBy": "*Overload 1* \n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable", "tail": ":param size: long\n:return: io.deephaven.qst.table.TailTable", "toString": ":return: java.lang.String", "update": "*Overload 1* \n :param columns: java.lang.String...\n :return: io.deephaven.qst.table.UpdateTable\n \n*Overload 2* \n :param columns: java.util.Collection\n :return: io.deephaven.qst.table.UpdateTable", "updateView": "*Overload 1* \n :param columns: java.lang.String...\n :return: io.deephaven.qst.table.UpdateViewTable\n \n*Overload 2* \n :param columns: java.util.Collection\n :return: io.deephaven.qst.table.UpdateViewTable", + "varBy": "*Overload 1* \n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 4* \n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable", "view": "*Overload 1* \n :param columns: java.lang.String...\n :return: io.deephaven.qst.table.ViewTable\n \n*Overload 2* \n :param columns: java.util.Collection\n :return: io.deephaven.qst.table.ViewTable", "walk": "Note: Java generics information - \n\n:param visitor: V\n:return: V", + "wavgBy": "*Overload 1* \n :param weightColumn: java.lang.String\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param weightColumn: java.lang.String\n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param weightColumn: java.lang.String\n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 4* \n :param weightColumn: java.lang.String\n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable", "where": "*Overload 1* \n :param filters: java.lang.String...\n :return: io.deephaven.qst.table.WhereTable\n \n*Overload 2* \n :param filters: java.util.Collection\n :return: io.deephaven.qst.table.WhereTable", "whereIn": "**Incompatible overloads text - text from the first overload:**\n\nFilters this table based on the set of values in the rightTable.\n\n \n Delegates to TableOperations.whereIn(Object, Collection).\n\n*Overload 1* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - the filtering table.\n :param columnsToMatch: (java.lang.String...) - the columns to match between the two tables\n :return: (io.deephaven.qst.table.WhereInTable) a new table filtered on right table\n \n*Overload 2* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - the filtering table.\n :param columnsToMatch: (java.util.Collection) - the columns to match between the two tables\n :return: (io.deephaven.qst.table.WhereInTable) a new table filtered on right table", - "whereNotIn": "**Incompatible overloads text - text from the first overload:**\n\nFilters this table based on the set of values not in the rightTable.\n\n \n Delegates to TableOperations.whereNotIn(Object, Collection).\n\n*Overload 1* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - the filtering table.\n :param columnsToMatch: (java.lang.String...) - the columns to match between the two tables\n :return: (io.deephaven.qst.table.WhereNotInTable) a new table filtered on right table\n \n*Overload 2* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - the filtering table.\n :param columnsToMatch: (java.util.Collection) - the columns to match between the two tables\n :return: (io.deephaven.qst.table.WhereNotInTable) a new table filtered on right table" + "whereNotIn": "**Incompatible overloads text - text from the first overload:**\n\nFilters this table based on the set of values not in the rightTable.\n\n \n Delegates to TableOperations.whereNotIn(Object, Collection).\n\n*Overload 1* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - the filtering table.\n :param columnsToMatch: (java.lang.String...) - the columns to match between the two tables\n :return: (io.deephaven.qst.table.WhereNotInTable) a new table filtered on right table\n \n*Overload 2* \n :param rightTable: (io.deephaven.qst.table.TableSpec) - the filtering table.\n :param columnsToMatch: (java.util.Collection) - the columns to match between the two tables\n :return: (io.deephaven.qst.table.WhereNotInTable) a new table filtered on right table", + "wsumBy": "*Overload 1* \n :param weightColumn: java.lang.String\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 2* \n :param weightColumn: java.lang.String\n :param groupByColumns: java.lang.String...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 3* \n :param weightColumn: java.lang.String\n :param groupByColumns: io.deephaven.api.Selectable...\n :return: io.deephaven.qst.table.AggregateAllByTable\n \n*Overload 4* \n :param weightColumn: java.lang.String\n :param groupByColumns: java.util.Collection\n :return: io.deephaven.qst.table.AggregateAllByTable" }, "path": "io.deephaven.qst.table.TableBase", "typeName": "class" diff --git a/Integrations/python/deephaven/doc/io/deephaven/util/datastructures/SimpleReferenceManager.json b/Integrations/python/deephaven/doc/io/deephaven/util/datastructures/SimpleReferenceManager.json index 626f18f6b7d..34c7bd3fea9 100644 --- a/Integrations/python/deephaven/doc/io/deephaven/util/datastructures/SimpleReferenceManager.json +++ b/Integrations/python/deephaven/doc/io/deephaven/util/datastructures/SimpleReferenceManager.json @@ -9,7 +9,8 @@ "isEmpty": "Return true if the list is empty. Does not check for cleared references.\n\n:return: (boolean) true if the list is empty.", "remove": "Remove item from the list if present according to reference equality (==), and also any cleared\n references.\n\n:param item: (SimpleReferenceManager.T) - the item to remove.\n:return: (SimpleReferenceManager.T) The item if it was removed, else null", "removeAll": "Remove items in the collection from the list, and also any cleared references.\n\n:param items: (java.util.Collection) - the items to remove.", - "removeIf": "Retrieve all encountered items that satisfy a filter, while also removing any cleared references.\n\n:param filter: (java.util.function.Predicate) - The filter to decide if a valid item should be removed\n:return: (boolean) Whether we succeeded in removing anything" + "removeIf": "Retrieve all encountered items that satisfy a filter, while also removing any cleared references.\n\n:param filter: (java.util.function.Predicate) - The filter to decide if a valid item should be removed\n:return: (boolean) Whether we succeeded in removing anything", + "size": "Return the number of valid references in the list.\n\n Note that each reference is checked for validity, making this operation linear in the number of references.\n\n:return: (int) the number of valid references in the list" }, "path": "io.deephaven.util.datastructures.SimpleReferenceManager", "text": "A helper for manging a list of References. It hides the internal management of expired references and provides for\n iteration over the valid ones", diff --git a/Util/src/main/java/io/deephaven/util/datastructures/SimpleReferenceManager.java b/Util/src/main/java/io/deephaven/util/datastructures/SimpleReferenceManager.java index 673914b5cc7..928138931f2 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/SimpleReferenceManager.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/SimpleReferenceManager.java @@ -1,6 +1,7 @@ package io.deephaven.util.datastructures; import io.deephaven.base.reference.SimpleReference; +import org.apache.commons.lang3.mutable.MutableInt; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -195,6 +196,19 @@ public boolean isEmpty() { return references.isEmpty(); } + /** + * Return the number of valid references in the list. + * + * Note that each reference is checked for validity, making this operation linear in the number of references. + * + * @return the number of valid references in the list + */ + public int size() { + final MutableInt size = new MutableInt(0); + forEach((ref, source) -> size.increment()); + return size.intValue(); + } + /** * Clear the list of references. */ diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationContext.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationContext.java index 035cb6b56f0..54d1477c075 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationContext.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/AggregationContext.java @@ -46,6 +46,11 @@ class AggregationContext { */ private final boolean requiresIndices; + /** + * Does any operator require runs to be found? See {@link IterativeChunkedAggregationOperator#requiresRunFinds()}. + */ + private final boolean requiresRunFinds; + /** * True if slots that are removed and then reincarnated should be modified. */ @@ -94,6 +99,8 @@ class AggregationContext { this.transformers = transformers; this.addedBackModified = addedBackModified; requiresIndices = Arrays.stream(this.operators).anyMatch(IterativeChunkedAggregationOperator::requiresRowKeys); + requiresRunFinds = + Arrays.stream(this.operators).anyMatch(IterativeChunkedAggregationOperator::requiresRunFinds); requiresInputs = Arrays.stream(this.inputColumns).anyMatch(Objects::nonNull); unchunkedIndices = Arrays.stream(this.operators).allMatch(IterativeChunkedAggregationOperator::unchunkedRowSet); // noinspection unchecked @@ -140,6 +147,10 @@ boolean requiresIndices() { return requiresIndices; } + boolean requiresRunFinds(boolean skip) { + return requiresRunFinds || !skip; + } + boolean unchunkedIndices() { return unchunkedIndices; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ByteStreamSortedFirstOrLastChunkedOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ByteStreamSortedFirstOrLastChunkedOperator.java index 704ad861a40..9bd4b383df0 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ByteStreamSortedFirstOrLastChunkedOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ByteStreamSortedFirstOrLastChunkedOperator.java @@ -7,6 +7,7 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.MatchPair; import io.deephaven.engine.table.TableUpdate; +import io.deephaven.util.QueryConstants; import io.deephaven.util.compare.ByteComparisons; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.sources.ByteArraySource; @@ -30,11 +31,6 @@ public class ByteStreamSortedFirstOrLastChunkedOperator extends CopyingPermutedS private final boolean isCombo; private final ByteArraySource sortColumnValues; - /** - *

The next destination slot that we expect to be used. - *

Any destination at or after this one has an undefined value in {@link #sortColumnValues}. - */ - private long nextDestination; private RowSetBuilderRandom changedDestinationsBuilder; ByteStreamSortedFirstOrLastChunkedOperator( @@ -98,12 +94,11 @@ private boolean addChunk(@NotNull final ByteChunk values, if (length == 0) { return false; } - final boolean newDestination = destination >= nextDestination; + final boolean newDestination = redirections.getUnsafe(destination) == QueryConstants.NULL_LONG; int bestChunkPos; byte bestValue; if (newDestination) { - ++nextDestination; bestChunkPos = start; bestValue = values.get(start); } else { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/CharStreamSortedFirstOrLastChunkedOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/CharStreamSortedFirstOrLastChunkedOperator.java index 134ff239fd8..061be9c4cb0 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/CharStreamSortedFirstOrLastChunkedOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/CharStreamSortedFirstOrLastChunkedOperator.java @@ -4,6 +4,7 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.MatchPair; import io.deephaven.engine.table.TableUpdate; +import io.deephaven.util.QueryConstants; import io.deephaven.util.compare.CharComparisons; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.sources.CharacterArraySource; @@ -27,11 +28,6 @@ public class CharStreamSortedFirstOrLastChunkedOperator extends CopyingPermutedS private final boolean isCombo; private final CharacterArraySource sortColumnValues; - /** - *

The next destination slot that we expect to be used. - *

Any destination at or after this one has an undefined value in {@link #sortColumnValues}. - */ - private long nextDestination; private RowSetBuilderRandom changedDestinationsBuilder; CharStreamSortedFirstOrLastChunkedOperator( @@ -95,12 +91,11 @@ private boolean addChunk(@NotNull final CharChunk values, if (length == 0) { return false; } - final boolean newDestination = destination >= nextDestination; + final boolean newDestination = redirections.getUnsafe(destination) == QueryConstants.NULL_LONG; int bestChunkPos; char bestValue; if (newDestination) { - ++nextDestination; bestChunkPos = start; bestValue = values.get(start); } else { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ChunkedOperatorAggregationHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ChunkedOperatorAggregationHelper.java index f5b9d90aa12..86993ada24f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ChunkedOperatorAggregationHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ChunkedOperatorAggregationHelper.java @@ -48,7 +48,12 @@ @SuppressWarnings("rawtypes") public class ChunkedOperatorAggregationHelper { - static final int CHUNK_SIZE = 1 << 12; + static final int CHUNK_SIZE = + Configuration.getInstance().getIntegerWithDefault("ChunkedOperatorAggregationHelper.chunkSize", 1 << 12); + public static final boolean SKIP_RUN_FIND = + Configuration.getInstance().getBooleanWithDefault("ChunkedOperatorAggregationHelper.skipRunFind", false); + static final boolean HASHED_RUN_FIND = + Configuration.getInstance().getBooleanWithDefault("ChunkedOperatorAggregationHelper.hashedRunFind", true); public static QueryTable aggregation(AggregationContextFactory aggregationContextFactory, QueryTable queryTable, SelectColumn[] groupByColumns) { @@ -309,6 +314,7 @@ private static class KeyedUpdateContext implements SafeCloseable { private final IterativeChunkedAggregationOperator.BucketedContext[] bucketedContexts; private final IntIntTimsortKernel.IntIntSortKernelContext sortKernelContext; + private final HashedRunFinder.HashedRunContext hashedRunContext; // These are used for all access when only pre- or post-shift (or previous or current) are needed, else for // pre-shift/previous @@ -383,7 +389,13 @@ private KeyedUpdateContext(@NotNull final AggregationContext ac, bucketedContexts = toClose.addArray(new IterativeChunkedAggregationOperator.BucketedContext[ac.size()]); ac.initializeBucketedContexts(bucketedContexts, upstream, keysModified, od.operatorsWithModifiedInputColumns); - sortKernelContext = toClose.add(IntIntTimsortKernel.createContext(chunkSize)); + final boolean findRuns = ac.requiresRunFinds(SKIP_RUN_FIND); + sortKernelContext = + !findRuns || HASHED_RUN_FIND ? null : toClose.add(IntIntTimsortKernel.createContext(chunkSize)); + // even if we are not finding runs because of configuration or operators, we may have a shift in which case + // we still need to find runs + hashedRunContext = + !HASHED_RUN_FIND ? null : toClose.add(new HashedRunFinder.HashedRunContext(chunkSize)); sharedContext = toClose.add(SharedContext.makeSharedContext()); getContexts = toClose.addArray(new ChunkSource.GetContext[ac.size()]); @@ -596,17 +608,25 @@ private void doRemovesForChunk(@NotNull final RowSequence keyIndicesToRemoveChun private void propagateRemovesToOperators(@NotNull final RowSequence keyIndicesToRemoveChunk, @NotNull final WritableIntChunk slotsToRemoveFrom) { - findSlotRuns(sortKernelContext, runStarts, runLengths, chunkPositions, slotsToRemoveFrom); + final boolean findRuns = ac.requiresRunFinds(SKIP_RUN_FIND); + findSlotRuns(sortKernelContext, hashedRunContext, runStarts, runLengths, chunkPositions, slotsToRemoveFrom, + findRuns); + if (ac.requiresIndices()) { - final LongChunk keyIndices = keyIndicesToRemoveChunk.asRowKeyChunk(); - LongPermuteKernel.permuteInput(keyIndices, chunkPositions, permutedKeyIndices); + if (findRuns) { + final LongChunk keyIndices = keyIndicesToRemoveChunk.asRowKeyChunk(); + LongPermuteKernel.permuteInput(keyIndices, chunkPositions, permutedKeyIndices); + } else { + keyIndicesToRemoveChunk.fillRowKeyChunk(permutedKeyIndices); + } } boolean anyOperatorModified = false; boolean firstOperator = true; setFalse(modifiedSlots, runStarts.size()); + final Chunk[] valueChunks = findRuns ? workingChunks : new Chunk[ac.size()]; sharedContext.reset(); for (int oi = 0; oi < ac.size(); ++oi) { if (!firstOperator) { @@ -615,10 +635,15 @@ private void propagateRemovesToOperators(@NotNull final RowSequence keyIndicesTo final int inputSlot = ac.inputSlot(oi); if (oi == inputSlot) { - getAndPermuteChunk(ac.inputColumns[oi], getContexts[oi], keyIndicesToRemoveChunk, true, - permuteKernels[oi], chunkPositions, workingChunks[oi]); + if (findRuns) { + getAndPermuteChunk(ac.inputColumns[oi], getContexts[oi], keyIndicesToRemoveChunk, true, + permuteKernels[oi], chunkPositions, workingChunks[oi]); + } else { + valueChunks[inputSlot] = + getChunk(ac.inputColumns[oi], getContexts[oi], keyIndicesToRemoveChunk, true); + } } - ac.operators[oi].removeChunk(bucketedContexts[oi], inputSlot >= 0 ? workingChunks[inputSlot] : null, + ac.operators[oi].removeChunk(bucketedContexts[oi], inputSlot >= 0 ? valueChunks[inputSlot] : null, permutedKeyIndices, slotsToRemoveFrom, runStarts, runLengths, firstOperator ? modifiedSlots : slotsModifiedByOperator); @@ -661,18 +686,26 @@ private void propagateInsertsToOperators(@NotNull final RowSequence keyIndicesTo @NotNull final WritableIntChunk slotsToAddTo) { ac.ensureCapacity(outputPosition.intValue()); - findSlotRuns(sortKernelContext, runStarts, runLengths, chunkPositions, slotsToAddTo); + final boolean findRuns = ac.requiresRunFinds(SKIP_RUN_FIND); + findSlotRuns(sortKernelContext, hashedRunContext, runStarts, runLengths, chunkPositions, slotsToAddTo, + findRuns); if (ac.requiresIndices()) { - final LongChunk keyIndices = keyIndicesToInsertChunk.asRowKeyChunk(); - permutedKeyIndices.setSize(keyIndices.size()); - LongPermuteKernel.permuteInput(keyIndices, chunkPositions, permutedKeyIndices); + if (findRuns) { + final LongChunk keyIndices = keyIndicesToInsertChunk.asRowKeyChunk(); + permutedKeyIndices.setSize(keyIndices.size()); + LongPermuteKernel.permuteInput(keyIndices, chunkPositions, permutedKeyIndices); + } else { + keyIndicesToInsertChunk.fillRowKeyChunk(permutedKeyIndices); + } } boolean anyOperatorModified = false; boolean firstOperator = true; setFalse(modifiedSlots, runStarts.size()); + final Chunk[] valueChunks = findRuns ? workingChunks : new Chunk[ac.size()]; + sharedContext.reset(); for (int oi = 0; oi < ac.size(); ++oi) { if (!firstOperator) { @@ -681,10 +714,15 @@ private void propagateInsertsToOperators(@NotNull final RowSequence keyIndicesTo final int inputSlot = ac.inputSlot(oi); if (inputSlot == oi) { - getAndPermuteChunk(ac.inputColumns[oi], getContexts[oi], keyIndicesToInsertChunk, false, - permuteKernels[oi], chunkPositions, workingChunks[oi]); + if (findRuns) { + getAndPermuteChunk(ac.inputColumns[oi], getContexts[oi], keyIndicesToInsertChunk, false, + permuteKernels[oi], chunkPositions, workingChunks[oi]); + } else { + valueChunks[oi] = + getChunk(ac.inputColumns[oi], getContexts[oi], keyIndicesToInsertChunk, false); + } } - ac.operators[oi].addChunk(bucketedContexts[oi], inputSlot >= 0 ? workingChunks[inputSlot] : null, + ac.operators[oi].addChunk(bucketedContexts[oi], inputSlot >= 0 ? valueChunks[inputSlot] : null, permutedKeyIndices, slotsToAddTo, runStarts, runLengths, firstOperator ? modifiedSlots : slotsModifiedByOperator); @@ -727,7 +765,17 @@ private void doProcessShiftBucketed(@NotNull final WritableLongChunk[] valueChunks = findRuns ? workingChunks : new Chunk[ac.size()]; + final Chunk[] postValueChunks = findRuns ? postWorkingChunks : new Chunk[ac.size()]; + try (final RowSequence.Iterator preShiftIterator = preShiftKeyIndicesToModify.getRowSequenceIterator(); final RowSequence.Iterator postShiftIterator = shifted ? postShiftKeyIndicesToModify.getRowSequenceIterator() : null) { @@ -790,13 +844,18 @@ private void doSameSlotModifies(@NotNull final RowSequence preShiftKeyIndicesToM incrementalStateManager.findModifications(pc, postShiftKeyIndicesChunk, reinterpretedKeySources, slots); - findSlotRuns(sortKernelContext, runStarts, runLengths, chunkPositions, slots); + findSlotRuns(sortKernelContext, hashedRunContext, runStarts, runLengths, chunkPositions, slots, + findRuns); if (supplyPostIndices) { - final LongChunk postKeyIndices = - postShiftKeyIndicesChunk.asRowKeyChunk(); - permutedKeyIndices.setSize(postKeyIndices.size()); - LongPermuteKernel.permuteInput(postKeyIndices, chunkPositions, permutedKeyIndices); + if (findRuns) { + final LongChunk postKeyIndices = + postShiftKeyIndicesChunk.asRowKeyChunk(); + permutedKeyIndices.setSize(postKeyIndices.size()); + LongPermuteKernel.permuteInput(postKeyIndices, chunkPositions, permutedKeyIndices); + } else { + postShiftKeyIndicesChunk.fillRowKeyChunk(permutedKeyIndices); + } } boolean anyOperatorModified = false; @@ -818,18 +877,27 @@ private void doSameSlotModifies(@NotNull final RowSequence preShiftKeyIndicesToM } else /* operatorsToProcess[oi] */ { final int inputSlot = ac.inputSlot(oi); if (inputSlot >= 0 && !chunkInitialized[inputSlot]) { - getAndPermuteChunk(ac.inputColumns[inputSlot], getContexts[inputSlot], - preShiftKeyIndicesChunk, true, permuteKernels[inputSlot], chunkPositions, - workingChunks[inputSlot]); - getAndPermuteChunk(ac.inputColumns[inputSlot], postGetContexts[inputSlot], - postShiftKeyIndicesChunk, false, permuteKernels[inputSlot], chunkPositions, - postWorkingChunks[inputSlot]); + if (findRuns) { + getAndPermuteChunk(ac.inputColumns[inputSlot], getContexts[inputSlot], + preShiftKeyIndicesChunk, true, permuteKernels[inputSlot], chunkPositions, + workingChunks[inputSlot]); + getAndPermuteChunk(ac.inputColumns[inputSlot], postGetContexts[inputSlot], + postShiftKeyIndicesChunk, false, permuteKernels[inputSlot], chunkPositions, + postWorkingChunks[inputSlot]); + } else { + valueChunks[inputSlot] = + getChunk(ac.inputColumns[inputSlot], getContexts[inputSlot], + preShiftKeyIndicesChunk, true); + postValueChunks[inputSlot] = + getChunk(ac.inputColumns[inputSlot], postGetContexts[inputSlot], + postShiftKeyIndicesChunk, false); + } chunkInitialized[inputSlot] = true; } ac.operators[oi].modifyChunk(bucketedContexts[oi], - inputSlot >= 0 ? workingChunks[inputSlot] : null, - inputSlot >= 0 ? postWorkingChunks[inputSlot] : null, permutedKeyIndices, slots, + inputSlot >= 0 ? valueChunks[inputSlot] : null, + inputSlot >= 0 ? postValueChunks[inputSlot] : null, permutedKeyIndices, slots, runStarts, runLengths, firstOperator ? modifiedSlots : slotsModifiedByOperator); } @@ -847,6 +915,8 @@ private void doSameSlotModifies(@NotNull final RowSequence preShiftKeyIndicesToM private void doSameSlotModifyIndicesOnly(@NotNull final RowSequence postShiftKeyIndicesToModify, @NotNull final boolean[] operatorsToProcessIndicesOnly) { + final boolean findRuns = ac.requiresRunFinds(SKIP_RUN_FIND); + try (final RowSequence.Iterator postShiftIterator = postShiftKeyIndicesToModify.getRowSequenceIterator()) { while (postShiftIterator.hasMore()) { final RowSequence postShiftKeyIndicesChunk = @@ -854,11 +924,16 @@ private void doSameSlotModifyIndicesOnly(@NotNull final RowSequence postShiftKey incrementalStateManager.findModifications(pc, postShiftKeyIndicesChunk, reinterpretedKeySources, slots); - findSlotRuns(sortKernelContext, runStarts, runLengths, chunkPositions, slots); + findSlotRuns(sortKernelContext, hashedRunContext, runStarts, runLengths, chunkPositions, slots, + findRuns); - final LongChunk postKeyIndices = postShiftKeyIndicesChunk.asRowKeyChunk(); - permutedKeyIndices.setSize(postKeyIndices.size()); - LongPermuteKernel.permuteInput(postKeyIndices, chunkPositions, permutedKeyIndices); + if (findRuns) { + final LongChunk postKeyIndices = postShiftKeyIndicesChunk.asRowKeyChunk(); + permutedKeyIndices.setSize(postKeyIndices.size()); + LongPermuteKernel.permuteInput(postKeyIndices, chunkPositions, permutedKeyIndices); + } else { + postShiftKeyIndicesChunk.fillRowKeyChunk(permutedKeyIndices); + } boolean anyOperatorModified = false; boolean firstOperator = true; @@ -1012,11 +1087,13 @@ private ModifySplitResult splitKeyModificationsAndDoKeyChangeRemoves() { keyChangeIndicesPostShiftBuilder.appendKey(currentIndex); } } - slots.setSize(numKeyChanges); - removedKeyIndices.setSize(numKeyChanges); - try (final RowSequence keyIndicesToRemoveChunk = - RowSequenceFactory.wrapRowKeysChunkAsRowSequence(removedKeyIndices)) { - propagateRemovesToOperators(keyIndicesToRemoveChunk, slots); + if (numKeyChanges > 0) { + slots.setSize(numKeyChanges); + removedKeyIndices.setSize(numKeyChanges); + try (final RowSequence keyIndicesToRemoveChunk = + RowSequenceFactory.wrapRowKeysChunkAsRowSequence(removedKeyIndices)) { + propagateRemovesToOperators(keyIndicesToRemoveChunk, slots); + } } } } @@ -1320,12 +1397,22 @@ private static boolean anyTrue(BooleanChunk operatorSlots) { private static void findSlotRuns( IntIntTimsortKernel.IntIntSortKernelContext sortKernelContext, + HashedRunFinder.HashedRunContext hashedRunContext, WritableIntChunk runStarts, WritableIntChunk runLengths, - WritableIntChunk chunkPosition, WritableIntChunk slots) { - chunkPosition.setSize(slots.size()); - ChunkUtils.fillInOrder(chunkPosition); - IntIntTimsortKernel.sort(sortKernelContext, chunkPosition, slots); - IntFindRunsKernel.findRunsSingles(slots, runStarts, runLengths); + WritableIntChunk chunkPosition, WritableIntChunk slots, + boolean findRuns) { + if (!findRuns) { + chunkPosition.setSize(slots.size()); + ChunkUtils.fillInOrder(chunkPosition); + IntFindRunsKernel.findRunsSingles(slots, runStarts, runLengths); + } else if (HASHED_RUN_FIND) { + HashedRunFinder.findRunsHashed(hashedRunContext, runStarts, runLengths, chunkPosition, slots); + } else { + chunkPosition.setSize(slots.size()); + ChunkUtils.fillInOrder(chunkPosition); + IntIntTimsortKernel.sort(sortKernelContext, chunkPosition, slots); + IntFindRunsKernel.findRunsSingles(slots, runStarts, runLengths); + } } /** @@ -1334,6 +1421,18 @@ private static void findSlotRuns( private static void getAndPermuteChunk(ChunkSource.WithPrev inputColumn, ChunkSource.GetContext getContext, RowSequence chunkOk, boolean usePrev, PermuteKernel permuteKernel, IntChunk chunkPosition, WritableChunk workingChunk) { + final Chunk values = getChunk(inputColumn, getContext, chunkOk, usePrev); + + // permute the chunk based on the chunkPosition, so that we have values from a slot together + if (values != null) { + workingChunk.setSize(values.size()); + permuteKernel.permuteInput(values, chunkPosition, workingChunk); + } + } + + @Nullable + private static Chunk getChunk(ChunkSource.WithPrev inputColumn, + ChunkSource.GetContext getContext, RowSequence chunkOk, boolean usePrev) { final Chunk values; if (inputColumn == null) { values = null; @@ -1342,12 +1441,7 @@ private static void getAndPermuteChunk(ChunkSource.WithPrev inputColumn, } else { values = inputColumn.getChunk(getContext, chunkOk); } - - // permute the chunk based on the chunkPosition, so that we have values from a slot together - if (values != null) { - workingChunk.setSize(values.size()); - permuteKernel.permuteInput(values, chunkPosition, workingChunk); - } + return values; } private static void modifySlots(RowSetBuilderRandom modifiedBuilder, IntChunk runStarts, @@ -1452,9 +1546,12 @@ private static void initialBucketedKeyAddition(QueryTable withView, ChunkedOperatorAggregationStateManager stateManager, MutableInt outputPosition, boolean usePrev) { + final boolean findRuns = ac.requiresRunFinds(SKIP_RUN_FIND); + final ChunkSource.GetContext[] getContexts = new ChunkSource.GetContext[ac.size()]; // noinspection unchecked - final WritableChunk[] workingChunks = new WritableChunk[ac.size()]; + final WritableChunk[] workingChunks = findRuns ? new WritableChunk[ac.size()] : null; + final Chunk[] valueChunks = findRuns ? workingChunks : new Chunk[ac.size()]; final IterativeChunkedAggregationOperator.BucketedContext[] bucketedContexts = new IterativeChunkedAggregationOperator.BucketedContext[ac.size()]; @@ -1477,14 +1574,16 @@ private static void initialBucketedKeyAddition(QueryTable withView, try (final SafeCloseable bc = stateManager.makeAggregationStateBuildContext(buildSources, chunkSize); final SafeCloseable ignored1 = usePrev ? rowSet : null; final SafeCloseable ignored2 = new SafeCloseableArray<>(getContexts); - final SafeCloseable ignored3 = new SafeCloseableArray<>(workingChunks); + final SafeCloseable ignored3 = findRuns ? new SafeCloseableArray<>(workingChunks) : null; final SafeCloseable ignored4 = new SafeCloseableArray<>(bucketedContexts); final RowSequence.Iterator rsIt = rowSet.getRowSequenceIterator(); final WritableIntChunk outputPositions = WritableIntChunk.makeWritableChunk(chunkSize); final WritableIntChunk chunkPosition = WritableIntChunk.makeWritableChunk(chunkSize); final SharedContext sharedContext = SharedContext.makeSharedContext(); final IntIntTimsortKernel.IntIntSortKernelContext sortKernelContext = - IntIntTimsortKernel.createContext(chunkSize); + !findRuns || HASHED_RUN_FIND ? null : IntIntTimsortKernel.createContext(chunkSize); + final HashedRunFinder.HashedRunContext hashedRunContext = + !findRuns || !HASHED_RUN_FIND ? null : new HashedRunFinder.HashedRunContext(chunkSize); final WritableIntChunk runStarts = WritableIntChunk.makeWritableChunk(chunkSize); final WritableIntChunk runLengths = WritableIntChunk.makeWritableChunk(chunkSize); final WritableLongChunk permutedKeyIndices = @@ -1492,7 +1591,9 @@ private static void initialBucketedKeyAddition(QueryTable withView, final WritableBooleanChunk unusedModifiedSlots = WritableBooleanChunk.makeWritableChunk(chunkSize)) { ac.initializeGetContexts(sharedContext, getContexts, chunkSize); - ac.initializeWorkingChunks(workingChunks, chunkSize); + if (findRuns) { + ac.initializeWorkingChunks(workingChunks, chunkSize); + } ac.initializeBucketedContexts(bucketedContexts, chunkSize); while (rsIt.hasMore()) { @@ -1503,20 +1604,31 @@ private static void initialBucketedKeyAddition(QueryTable withView, ac.ensureCapacity(outputPosition.intValue()); - findSlotRuns(sortKernelContext, runStarts, runLengths, chunkPosition, outputPositions); + findSlotRuns(sortKernelContext, hashedRunContext, runStarts, runLengths, chunkPosition, outputPositions, + findRuns); if (permutedKeyIndices != null) { - final LongChunk keyIndices = chunkOk.asRowKeyChunk(); - LongPermuteKernel.permuteInput(keyIndices, chunkPosition, permutedKeyIndices); + if (findRuns) { + final LongChunk keyIndices = chunkOk.asRowKeyChunk(); + LongPermuteKernel.permuteInput(keyIndices, chunkPosition, permutedKeyIndices); + } else { + chunkOk.fillRowKeyChunk(permutedKeyIndices); + } } for (int ii = 0; ii < ac.size(); ++ii) { final int inputSlot = ac.inputSlot(ii); if (ii == inputSlot) { - getAndPermuteChunk(ac.inputColumns[ii], getContexts[ii], chunkOk, usePrev, permuteKernels[ii], - chunkPosition, workingChunks[ii]); + if (!findRuns) { + valueChunks[inputSlot] = getChunk(ac.inputColumns[ii], getContexts[ii], chunkOk, usePrev); + } else { + getAndPermuteChunk(ac.inputColumns[ii], getContexts[ii], chunkOk, usePrev, + permuteKernels[ii], + chunkPosition, workingChunks[ii]); + } } - ac.operators[ii].addChunk(bucketedContexts[ii], inputSlot >= 0 ? workingChunks[inputSlot] : null, + ac.operators[ii].addChunk(bucketedContexts[ii], + inputSlot >= 0 ? valueChunks[inputSlot] : null, permutedKeyIndices, outputPositions, runStarts, runLengths, unusedModifiedSlots); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/CopyingPermutedStreamFirstOrLastChunkedOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/CopyingPermutedStreamFirstOrLastChunkedOperator.java index ecb5c672502..b20f61b9b61 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/CopyingPermutedStreamFirstOrLastChunkedOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/CopyingPermutedStreamFirstOrLastChunkedOperator.java @@ -37,7 +37,7 @@ public CopyingPermutedStreamFirstOrLastChunkedOperator(@NotNull final MatchPair[ @Override public void ensureCapacity(final long tableSize) { - redirections.ensureCapacity(tableSize, false); + redirections.ensureCapacity(tableSize, true); } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/DoubleStreamSortedFirstOrLastChunkedOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/DoubleStreamSortedFirstOrLastChunkedOperator.java index 34d45d4cef1..e47f0d2e985 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/DoubleStreamSortedFirstOrLastChunkedOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/DoubleStreamSortedFirstOrLastChunkedOperator.java @@ -7,6 +7,7 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.MatchPair; import io.deephaven.engine.table.TableUpdate; +import io.deephaven.util.QueryConstants; import io.deephaven.util.compare.DoubleComparisons; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.sources.DoubleArraySource; @@ -30,11 +31,6 @@ public class DoubleStreamSortedFirstOrLastChunkedOperator extends CopyingPermute private final boolean isCombo; private final DoubleArraySource sortColumnValues; - /** - *

The next destination slot that we expect to be used. - *

Any destination at or after this one has an undefined value in {@link #sortColumnValues}. - */ - private long nextDestination; private RowSetBuilderRandom changedDestinationsBuilder; DoubleStreamSortedFirstOrLastChunkedOperator( @@ -98,12 +94,11 @@ private boolean addChunk(@NotNull final DoubleChunk values, if (length == 0) { return false; } - final boolean newDestination = destination >= nextDestination; + final boolean newDestination = redirections.getUnsafe(destination) == QueryConstants.NULL_LONG; int bestChunkPos; double bestValue; if (newDestination) { - ++nextDestination; bestChunkPos = start; bestValue = values.get(start); } else { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/FloatStreamSortedFirstOrLastChunkedOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/FloatStreamSortedFirstOrLastChunkedOperator.java index 2c3b0d9f644..f56b9bf1d54 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/FloatStreamSortedFirstOrLastChunkedOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/FloatStreamSortedFirstOrLastChunkedOperator.java @@ -7,6 +7,7 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.MatchPair; import io.deephaven.engine.table.TableUpdate; +import io.deephaven.util.QueryConstants; import io.deephaven.util.compare.FloatComparisons; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.sources.FloatArraySource; @@ -30,11 +31,6 @@ public class FloatStreamSortedFirstOrLastChunkedOperator extends CopyingPermuted private final boolean isCombo; private final FloatArraySource sortColumnValues; - /** - *

The next destination slot that we expect to be used. - *

Any destination at or after this one has an undefined value in {@link #sortColumnValues}. - */ - private long nextDestination; private RowSetBuilderRandom changedDestinationsBuilder; FloatStreamSortedFirstOrLastChunkedOperator( @@ -98,12 +94,11 @@ private boolean addChunk(@NotNull final FloatChunk values, if (length == 0) { return false; } - final boolean newDestination = destination >= nextDestination; + final boolean newDestination = redirections.getUnsafe(destination) == QueryConstants.NULL_LONG; int bestChunkPos; float bestValue; if (newDestination) { - ++nextDestination; bestChunkPos = start; bestValue = values.get(start); } else { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/HashedRunFinder.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/HashedRunFinder.java new file mode 100644 index 00000000000..35d109b4b43 --- /dev/null +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/HashedRunFinder.java @@ -0,0 +1,136 @@ +package io.deephaven.engine.table.impl.by; + +import io.deephaven.base.MathUtil; +import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.WritableIntChunk; +import io.deephaven.chunk.attributes.ChunkLengths; +import io.deephaven.chunk.attributes.ChunkPositions; +import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.util.SafeCloseable; + +/** + * Finds runs of the same slot, and fills in the chunkPositions, runStarts, and runLengths arrays. The slots are not in + * order; like slots are simply grouped together such that each slot is only represented in the chunk one time. + */ +public class HashedRunFinder { + private static final int UNUSED_HASH_TABLE_VALUE = -1; + + public static class HashedRunContext implements SafeCloseable { + final int tableSize; + final int tableMask; + + // the hash table is [outputPosition, position, overflow] + // the overflow is only [position, overflow] + final WritableIntChunk table; + final WritableIntChunk overflow; + + public HashedRunContext(int size) { + if (size == 0) { + tableSize = 0; + tableMask = 0; + table = null; + overflow = null; + return; + } + // load factor of half, rounded up + tableSize = 1 << MathUtil.ceilLog2(size * 2); + tableMask = tableSize - 1; + table = WritableIntChunk.makeWritableChunk(tableSize * 3); + overflow = WritableIntChunk.makeWritableChunk((size - 1) * 2); + table.fillWithValue(0, table.size(), UNUSED_HASH_TABLE_VALUE); + } + + @Override + public void close() { + if (table == null) { + return; + } + table.close(); + overflow.close(); + } + } + + public static void findRunsHashed( + HashedRunContext context, + WritableIntChunk runStarts, + WritableIntChunk runLengths, + WritableIntChunk chunkPositions, + WritableIntChunk outputPositions) { + final int size = outputPositions.size(); + + Assert.gtZero(size, "size"); + + int overflowPointer = 0; + + int minSlot = context.tableSize; + int maxSlot = 0; + + for (int chunkPosition = 0; chunkPosition < size; ++chunkPosition) { + final int outputPosition = outputPositions.get(chunkPosition); + int hashSlot = outputPosition & context.tableMask; + + do { + final int baseSlot = hashSlot * 3; + if (context.table.get(baseSlot) == UNUSED_HASH_TABLE_VALUE) { + // insert it here + context.table.set(baseSlot, outputPosition); + context.table.set(baseSlot + 1, chunkPosition); + context.table.set(baseSlot + 2, UNUSED_HASH_TABLE_VALUE); + minSlot = Math.min(hashSlot, minSlot); + maxSlot = Math.max(hashSlot, maxSlot); + break; + } else if (context.table.get(baseSlot) == outputPosition) { + context.overflow.set(overflowPointer, chunkPosition); + context.overflow.set(overflowPointer + 1, context.table.get(baseSlot + 2)); + context.table.set(baseSlot + 2, overflowPointer); + overflowPointer += 2; + break; + } else { + // linear probe + hashSlot = (hashSlot + 1) & context.tableMask; + } + } while (true); + } + + // now iterate the table into the outputPositions/chunkPositions chunks + int chunkPointer = 0; + chunkPositions.setSize(outputPositions.size()); + runLengths.setSize(0); + runStarts.setSize(0); + for (int hashSlot = minSlot; hashSlot <= maxSlot; ++hashSlot) { + final int baseSlot = hashSlot * 3; + final int outputPosition; + if ((outputPosition = context.table.get(baseSlot)) != UNUSED_HASH_TABLE_VALUE) { + // zero it out to be ready for the next usage + context.table.set(baseSlot, UNUSED_HASH_TABLE_VALUE); + + runStarts.add(chunkPointer); + outputPositions.set(chunkPointer, outputPosition); + chunkPositions.set(chunkPointer++, context.table.get(baseSlot + 1)); + + int len = 1; + int overflowLocation = context.table.get(baseSlot + 2); + // now chase overflow + while (overflowLocation != UNUSED_HASH_TABLE_VALUE) { + outputPositions.set(chunkPointer, outputPosition); + chunkPositions.set(chunkPointer++, context.overflow.get(overflowLocation)); + overflowLocation = context.overflow.get(overflowLocation + 1); + len++; + } + + // and reverse the overflow outputPositions so they appear in order + final int reverseLen = len - 1; + final int beginReverse = chunkPointer - reverseLen; + final int endReverse = chunkPointer - 1; + for (int ii = 0; ii < reverseLen / 2; ++ii) { + final int rr = chunkPositions.get(beginReverse + ii); + chunkPositions.set(beginReverse + ii, chunkPositions.get(endReverse - ii)); + chunkPositions.set(endReverse - ii, rr); + } + + runLengths.add(len); + } + } + Assert.eq(chunkPointer, "chunkPointer", outputPositions.size(), "outputPositions.size()"); + } +} diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/IntStreamSortedFirstOrLastChunkedOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/IntStreamSortedFirstOrLastChunkedOperator.java index a4dfd700414..c6c8d201a59 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/IntStreamSortedFirstOrLastChunkedOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/IntStreamSortedFirstOrLastChunkedOperator.java @@ -7,6 +7,7 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.MatchPair; import io.deephaven.engine.table.TableUpdate; +import io.deephaven.util.QueryConstants; import io.deephaven.util.compare.IntComparisons; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.sources.IntegerArraySource; @@ -30,11 +31,6 @@ public class IntStreamSortedFirstOrLastChunkedOperator extends CopyingPermutedSt private final boolean isCombo; private final IntegerArraySource sortColumnValues; - /** - *

The next destination slot that we expect to be used. - *

Any destination at or after this one has an undefined value in {@link #sortColumnValues}. - */ - private long nextDestination; private RowSetBuilderRandom changedDestinationsBuilder; IntStreamSortedFirstOrLastChunkedOperator( @@ -98,12 +94,11 @@ private boolean addChunk(@NotNull final IntChunk values, if (length == 0) { return false; } - final boolean newDestination = destination >= nextDestination; + final boolean newDestination = redirections.getUnsafe(destination) == QueryConstants.NULL_LONG; int bestChunkPos; int bestValue; if (newDestination) { - ++nextDestination; bestChunkPos = start; bestValue = values.get(start); } else { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/IterativeChunkedAggregationOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/IterativeChunkedAggregationOperator.java index 6cb14df01a7..f3de70785c8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/IterativeChunkedAggregationOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/IterativeChunkedAggregationOperator.java @@ -235,6 +235,15 @@ default boolean requiresRowKeys() { return false; } + /** + * Some operators require that all values for a destination within a given chunk are adjacent. These operators must + * return true. + * + * @return true if the operator requires runs of the same destination to be adjacent + */ + default boolean requiresRunFinds() { + return false; + } /** * Whether the operator can deal with an unchunked RowSet more efficiently than a chunked RowSet. diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/LongStreamSortedFirstOrLastChunkedOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/LongStreamSortedFirstOrLastChunkedOperator.java index 30615a1abf8..4ca680a4c5e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/LongStreamSortedFirstOrLastChunkedOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/LongStreamSortedFirstOrLastChunkedOperator.java @@ -7,6 +7,7 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.MatchPair; import io.deephaven.engine.table.TableUpdate; +import io.deephaven.util.QueryConstants; import io.deephaven.util.compare.LongComparisons; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.sources.LongArraySource; @@ -30,11 +31,6 @@ public class LongStreamSortedFirstOrLastChunkedOperator extends CopyingPermutedS private final boolean isCombo; private final LongArraySource sortColumnValues; - /** - *

The next destination slot that we expect to be used. - *

Any destination at or after this one has an undefined value in {@link #sortColumnValues}. - */ - private long nextDestination; private RowSetBuilderRandom changedDestinationsBuilder; LongStreamSortedFirstOrLastChunkedOperator( @@ -98,12 +94,11 @@ private boolean addChunk(@NotNull final LongChunk values, if (length == 0) { return false; } - final boolean newDestination = destination >= nextDestination; + final boolean newDestination = redirections.getUnsafe(destination) == QueryConstants.NULL_LONG; int bestChunkPos; long bestValue; if (newDestination) { - ++nextDestination; bestChunkPos = start; bestValue = values.get(start); } else { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ObjectStreamSortedFirstOrLastChunkedOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ObjectStreamSortedFirstOrLastChunkedOperator.java index 8ec7002212f..171d9a5841a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ObjectStreamSortedFirstOrLastChunkedOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ObjectStreamSortedFirstOrLastChunkedOperator.java @@ -7,6 +7,7 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.MatchPair; import io.deephaven.engine.table.TableUpdate; +import io.deephaven.util.QueryConstants; import io.deephaven.util.compare.ObjectComparisons; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.sources.ObjectArraySource; @@ -30,11 +31,6 @@ public class ObjectStreamSortedFirstOrLastChunkedOperator extends CopyingPermute private final boolean isCombo; private final ObjectArraySource sortColumnValues; - /** - *

The next destination slot that we expect to be used. - *

Any destination at or after this one has an undefined value in {@link #sortColumnValues}. - */ - private long nextDestination; private RowSetBuilderRandom changedDestinationsBuilder; ObjectStreamSortedFirstOrLastChunkedOperator( @@ -98,12 +94,11 @@ private boolean addChunk(@NotNull final ObjectChunk va if (length == 0) { return false; } - final boolean newDestination = destination >= nextDestination; + final boolean newDestination = redirections.getUnsafe(destination) == QueryConstants.NULL_LONG; int bestChunkPos; Object bestValue; if (newDestination) { - ++nextDestination; bestChunkPos = start; bestValue = values.get(start); } else { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/PartitionByChunkedOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/PartitionByChunkedOperator.java index be80eb47bfd..c531617f90b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/PartitionByChunkedOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/PartitionByChunkedOperator.java @@ -5,11 +5,11 @@ import io.deephaven.chunk.attributes.ChunkPositions; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.rowset.impl.AdaptiveOrderedLongSetBuilderRandom; import io.deephaven.engine.rowset.impl.WritableRowSetImpl; import io.deephaven.engine.table.*; import io.deephaven.engine.rowset.*; import io.deephaven.engine.rowset.impl.OrderedLongSet; -import io.deephaven.engine.rowset.impl.OrderedLongSetBuilderSequential; import io.deephaven.engine.table.impl.TableUpdateImpl; import io.deephaven.engine.table.impl.chunkboxer.ChunkBoxer; import io.deephaven.engine.updategraph.UpdateGraphProcessor; @@ -203,14 +203,14 @@ public void shiftChunk(final BucketedContext bucketedContext, final Chunk length, @NotNull final WritableBooleanChunk stateModified) { Assert.eqNull(previousValues, "previousValues"); Assert.eqNull(newValues, "newValues"); - final OrderedLongSetBuilderSequential chunkDestinationBuilder = new OrderedLongSetBuilderSequential(true); + final AdaptiveOrderedLongSetBuilderRandom chunkDestinationBuilder = new AdaptiveOrderedLongSetBuilderRandom(); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); final long destination = destinations.get(startPosition); if (appendShifts(preShiftRowKeys, postShiftRowKeys, startPosition, runLength, destination)) { - chunkDestinationBuilder.appendKey(destination); + chunkDestinationBuilder.addKey(destination); } } try (final RowSet chunkDestinationsShifted = diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ShortStreamSortedFirstOrLastChunkedOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ShortStreamSortedFirstOrLastChunkedOperator.java index fc508475463..3975769335b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ShortStreamSortedFirstOrLastChunkedOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ShortStreamSortedFirstOrLastChunkedOperator.java @@ -7,6 +7,7 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.MatchPair; import io.deephaven.engine.table.TableUpdate; +import io.deephaven.util.QueryConstants; import io.deephaven.util.compare.ShortComparisons; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.sources.ShortArraySource; @@ -30,11 +31,6 @@ public class ShortStreamSortedFirstOrLastChunkedOperator extends CopyingPermuted private final boolean isCombo; private final ShortArraySource sortColumnValues; - /** - *

The next destination slot that we expect to be used. - *

Any destination at or after this one has an undefined value in {@link #sortColumnValues}. - */ - private long nextDestination; private RowSetBuilderRandom changedDestinationsBuilder; ShortStreamSortedFirstOrLastChunkedOperator( @@ -98,12 +94,11 @@ private boolean addChunk(@NotNull final ShortChunk values, if (length == 0) { return false; } - final boolean newDestination = destination >= nextDestination; + final boolean newDestination = redirections.getUnsafe(destination) == QueryConstants.NULL_LONG; int bestChunkPos; short bestValue; if (newDestination) { - ++nextDestination; bestChunkPos = start; bestValue = values.get(start); } else { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/StreamFirstChunkedOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/StreamFirstChunkedOperator.java index e9dd017d561..8c2f501e397 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/StreamFirstChunkedOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/StreamFirstChunkedOperator.java @@ -66,20 +66,46 @@ public void ensureCapacity(final long tableSize) { } @Override - public void addChunk(final BucketedContext context, // Unused + public void addChunk(final BucketedContext bucketedContext, final Chunk values, // Unused @NotNull final LongChunk inputRowKeys, @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, final IntChunk length, // Unused @NotNull final WritableBooleanChunk stateModified) { + + final StreamFirstBucketedContext context = (StreamFirstBucketedContext) bucketedContext; + + long maxDestination = nextDestination - 1; + + // we can essentially do a radix sort; anything less than nextDestination is not of interest; everything else + // must fall between nextDestination and our chunk size + context.rowKeyToInsert.fillWithValue(0, startPositions.size(), Long.MAX_VALUE); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); - final long destination = destinations.get(startPosition); - if (maybeAssignFirst(destination, inputRowKeys.get(startPosition))) { - stateModified.set(ii, true); + final int destination = destinations.get(startPosition); + if (destination >= nextDestination) { + Assert.lt(destination, "destination", nextDestination + startPositions.size(), + "nextDestination + startPositions.size()"); + maxDestination = Math.max(destination, maxDestination); + + final long inputRowKey = inputRowKeys.get(startPosition); + final int index = (int) (destination - nextDestination); + + context.destinationsToInsert.set(index, destination); + context.rowKeyToInsert.set(index, Math.min(context.rowKeyToInsert.get(index), inputRowKey)); } } + context.destinationsToInsert.setSize((int) (maxDestination - nextDestination + 1)); + context.rowKeyToInsert.setSize((int) (maxDestination - nextDestination + 1)); + + for (int ii = 0; ii < context.destinationsToInsert.size(); ++ii) { + final int destination = context.destinationsToInsert.get(ii); + final long rowKey = context.rowKeyToInsert.get(ii); + redirections.set(destination - firstDestinationThisStep, rowKey); + } + + nextDestination = maxDestination + 1; } @Override @@ -195,4 +221,25 @@ private void copyStreamToResult(@NotNull final RowSequence destinations) { } } } + + private static class StreamFirstBucketedContext implements BucketedContext { + final WritableIntChunk destinationsToInsert; + final WritableLongChunk rowKeyToInsert; + + public StreamFirstBucketedContext(int size) { + destinationsToInsert = WritableIntChunk.makeWritableChunk(size); + rowKeyToInsert = WritableLongChunk.makeWritableChunk(size); + } + + @Override + public void close() { + destinationsToInsert.close(); + rowKeyToInsert.close(); + } + } + + @Override + public BucketedContext makeBucketedContext(int size) { + return new StreamFirstBucketedContext(size); + } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteChunkedCountDistinctOperator.java index feea917df2f..bfa2c5105bd 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteChunkedCountDistinctOperator.java @@ -281,6 +281,11 @@ public void startTrackingPrevValues() { } } + @Override + public boolean requiresRunFinds() { + return true; + } + //endregion //region Private Helpers diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteRollupCountDistinctOperator.java index 491c6d75598..2e1e9cc263a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteRollupCountDistinctOperator.java @@ -515,6 +515,11 @@ public void ensureCapacity(long tableSize) { return columns; } + @Override + public boolean requiresRunFinds() { + return true; + } + @Override public void startTrackingPrevValues() { if(prevFlusher != null) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharChunkedCountDistinctOperator.java index 2356fe2d3f0..0ea3c424b93 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharChunkedCountDistinctOperator.java @@ -278,6 +278,11 @@ public void startTrackingPrevValues() { } } + @Override + public boolean requiresRunFinds() { + return true; + } + //endregion //region Private Helpers diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharRollupCountDistinctOperator.java index 7b0f573ff2d..6e12553d4c4 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharRollupCountDistinctOperator.java @@ -512,6 +512,11 @@ public void ensureCapacity(long tableSize) { return columns; } + @Override + public boolean requiresRunFinds() { + return true; + } + @Override public void startTrackingPrevValues() { if(prevFlusher != null) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleChunkedCountDistinctOperator.java index 5bfebce9c91..18f0678c930 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleChunkedCountDistinctOperator.java @@ -281,6 +281,11 @@ public void startTrackingPrevValues() { } } + @Override + public boolean requiresRunFinds() { + return true; + } + //endregion //region Private Helpers diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleRollupCountDistinctOperator.java index 28d42df0ed8..0aa71d84f0d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleRollupCountDistinctOperator.java @@ -515,6 +515,11 @@ public void ensureCapacity(long tableSize) { return columns; } + @Override + public boolean requiresRunFinds() { + return true; + } + @Override public void startTrackingPrevValues() { if(prevFlusher != null) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatChunkedCountDistinctOperator.java index 4f5fe86d5d4..504ed52d64e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatChunkedCountDistinctOperator.java @@ -281,6 +281,11 @@ public void startTrackingPrevValues() { } } + @Override + public boolean requiresRunFinds() { + return true; + } + //endregion //region Private Helpers diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatRollupCountDistinctOperator.java index 919806c26ac..4ff5df7e16f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatRollupCountDistinctOperator.java @@ -515,6 +515,11 @@ public void ensureCapacity(long tableSize) { return columns; } + @Override + public boolean requiresRunFinds() { + return true; + } + @Override public void startTrackingPrevValues() { if(prevFlusher != null) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntChunkedCountDistinctOperator.java index 7f08949d063..63382ad584c 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntChunkedCountDistinctOperator.java @@ -281,6 +281,11 @@ public void startTrackingPrevValues() { } } + @Override + public boolean requiresRunFinds() { + return true; + } + //endregion //region Private Helpers diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntRollupCountDistinctOperator.java index c9410889ec8..5cfe97cf6c3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntRollupCountDistinctOperator.java @@ -515,6 +515,11 @@ public void ensureCapacity(long tableSize) { return columns; } + @Override + public boolean requiresRunFinds() { + return true; + } + @Override public void startTrackingPrevValues() { if(prevFlusher != null) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongChunkedCountDistinctOperator.java index eaa8391455c..28c7b69c394 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongChunkedCountDistinctOperator.java @@ -281,6 +281,11 @@ public void startTrackingPrevValues() { } } + @Override + public boolean requiresRunFinds() { + return true; + } + //endregion //region Private Helpers diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongRollupCountDistinctOperator.java index 7f853eca319..11accc13d82 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongRollupCountDistinctOperator.java @@ -515,6 +515,11 @@ public void ensureCapacity(long tableSize) { return columns; } + @Override + public boolean requiresRunFinds() { + return true; + } + @Override public void startTrackingPrevValues() { if(prevFlusher != null) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectChunkedCountDistinctOperator.java index f5c3a7cb144..abed7602d7b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectChunkedCountDistinctOperator.java @@ -282,6 +282,11 @@ public void startTrackingPrevValues() { } } + @Override + public boolean requiresRunFinds() { + return true; + } + //endregion //region Private Helpers diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectRollupCountDistinctOperator.java index dd8da16e98c..b56d0660372 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectRollupCountDistinctOperator.java @@ -516,6 +516,11 @@ public void ensureCapacity(long tableSize) { return columns; } + @Override + public boolean requiresRunFinds() { + return true; + } + @Override public void startTrackingPrevValues() { if(prevFlusher != null) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortChunkedCountDistinctOperator.java index 555a0e4625c..e2ebd572174 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortChunkedCountDistinctOperator.java @@ -281,6 +281,11 @@ public void startTrackingPrevValues() { } } + @Override + public boolean requiresRunFinds() { + return true; + } + //endregion //region Private Helpers diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortRollupCountDistinctOperator.java index 3c8e8a64d14..1ffc95aaa08 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortRollupCountDistinctOperator.java @@ -515,6 +515,11 @@ public void ensureCapacity(long tableSize) { return columns; } + @Override + public boolean requiresRunFinds() { + return true; + } + @Override public void startTrackingPrevValues() { if(prevFlusher != null) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/SsmChunkedMinMaxOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/SsmChunkedMinMaxOperator.java index 8e2fff199da..9c087906da4 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/SsmChunkedMinMaxOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmminmax/SsmChunkedMinMaxOperator.java @@ -330,6 +330,11 @@ public void startTrackingPrevValues() { resultColumn.startTrackingPrevValues(); } + @Override + public boolean requiresRunFinds() { + return true; + } + @Override public BucketedContext makeBucketedContext(int size) { return new BucketSsmMinMaxContext(chunkType, size); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/replay/Replayer.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/replay/Replayer.java index 458198c9843..6e61ee28a35 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/replay/Replayer.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/replay/Replayer.java @@ -93,7 +93,7 @@ public void shutdown() throws IOException { if (done) { return; } - UpdateGraphProcessor.DEFAULT.removeTables(currentTables); + UpdateGraphProcessor.DEFAULT.removeSources(currentTables); currentTables = null; if (UpdateGraphProcessor.DEFAULT.exclusiveLock().isHeldByCurrentThread()) { shutdownInternal(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/AutoTuningIncrementalReleaseFilter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/AutoTuningIncrementalReleaseFilter.java index 07003fdfa52..556a024d874 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/AutoTuningIncrementalReleaseFilter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/AutoTuningIncrementalReleaseFilter.java @@ -23,6 +23,9 @@ * The table has an initial size, which can be thought of as the size during query initialization. There is an initial * number of rows that are released, which is then used to tune the number of rows to release on the subsequent cycle. *

+ * You must invoke the {@link #start()} method to begin producing rows. + *

+ *

* The targetFactor parameter is multiplied by the UGP's targetCycle. This allows you to determine how busy you want the * UGP to be. For example a factor of 1, will attempt to hit the target cycle exactly. A target of 0.5 should result an * UGP ratio of about 50%. A factor of 10 would mean that the system will extend beyond the target cycle time, coalesce @@ -69,6 +72,7 @@ * filterQuotes=new AutoTuningIncrementalReleaseFilter(logger, 10000, 10000, 1.0d, true) * quotesFiltered = quotes.where(filterQuotes) * currentQuote = quotesFiltered.lastBy("LocalCodeStr").update("Mid=(Bid + Ask)/2") + * filterQuotes.start() * *

* The verbose information and the final report are easily visible on your console. @@ -93,6 +97,10 @@ * tradesFiltered = trades.where(filterTrades) * * decorated = tradesFiltered.aj(quotesFiltered, "LocalCodeStr,MarketTimestamp", "QuoteTime=MarketTimestamp,Bid,BidSize,Ask,AskSize") + * + * filterTrades.start() + * filterQuotes.start() + * * */ public class AutoTuningIncrementalReleaseFilter extends BaseIncrementalReleaseFilter { @@ -210,7 +218,7 @@ public AutoTuningIncrementalReleaseFilter(long initialSize, long initialRelease, @ScriptApi public AutoTuningIncrementalReleaseFilter(Logger logger, long initialSize, long initialRelease, double targetFactor, boolean verbose, TimeProvider timeProvider) { - super(initialSize); + super(initialSize, false); this.logger = logger; this.targetFactor = targetFactor; this.verbose = verbose; @@ -226,13 +234,17 @@ long getSizeIncrement() { @Override public void run() { + super.run(); if (releasedAll) { - return; + throw new IllegalStateException(); } final DateTime now = timeProvider.currentTime(); if (nextSize == 0) { firstCycle = now; nextSize = initialRelease; + if (verbose) { + logger.info().append("Releasing: ").append(nextSize).append(" rows, first release").endl(); + } } else { final long cycleDuration = (cycleEnd.getNanos() - lastRefresh.getNanos()); final long targetCycle = UpdateGraphProcessor.DEFAULT.getTargetCycleDurationMillis() * 1000 * 1000; @@ -248,7 +260,7 @@ public void run() { final double eta = (remaining / totalRowsPerSecond); logger.info().append("Releasing: ").append(nextSize).append(" rows, last rows/second: ") .append(decimalFormat.format(rowsPerNanoSecond * 1_000_000_000L)).append(", duration=") - .append(cycleDuration / 1000000L).append(" ms, total rows/second=") + .append(decimalFormat.format(cycleDuration / 1000000.0)).append(" ms, total rows/second=") .append(decimalFormat.format(totalRowsPerSecond)).append(", ETA ") .append(decimalFormat.format(eta)).append(" sec").endl(); } @@ -272,12 +284,12 @@ public void run() { } }); lastRefresh = now; - super.run(); } @Override void onReleaseAll() { releasedAll = true; + super.onReleaseAll(); } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/BaseIncrementalReleaseFilter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/BaseIncrementalReleaseFilter.java index 1f0d8cf0045..37be9e11b3e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/BaseIncrementalReleaseFilter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/BaseIncrementalReleaseFilter.java @@ -10,6 +10,9 @@ import io.deephaven.engine.table.TableDefinition; import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.rowset.RowSet; +import io.deephaven.time.DateTimeUtils; +import io.deephaven.util.QueryConstants; +import io.deephaven.util.annotations.ScriptApi; import java.util.Collections; import java.util.List; @@ -30,8 +33,24 @@ public abstract class BaseIncrementalReleaseFilter extends WhereFilterLivenessAr transient private boolean addedToUpdateGraphProcessor = false; - BaseIncrementalReleaseFilter(long initialSize) { + private transient volatile long firstReleaseNanos = QueryConstants.NULL_LONG; + private transient volatile long releaseAllNanos = QueryConstants.NULL_LONG; + + /** + * Should we release entries during the UpdateGraphProcessor cycle? + */ + private transient volatile boolean started; + private transient volatile boolean initialized = false; + + /** + * Construct an incremental release filter. + * + * @param initialSize how many rows should be released in the initialized table before any updates + * @param started should updates proceed immediately + */ + BaseIncrementalReleaseFilter(long initialSize, boolean started) { releasedSize = this.initialSize = initialSize; + this.started = started; } @Override @@ -46,6 +65,14 @@ public List getColumnArrays() { @Override public void init(TableDefinition tableDefinition) { + initialized = true; + if (!started) { + return; + } + addToUpdateGraphProcessor(); + } + + private void addToUpdateGraphProcessor() { if (!addedToUpdateGraphProcessor) { UpdateGraphProcessor.DEFAULT.addSource(this); addedToUpdateGraphProcessor = true; @@ -69,12 +96,92 @@ public WritableRowSet filter(RowSet selection, RowSet fullSet, Table table, bool if (fullSet.size() <= releasedSize) { onReleaseAll(); releasedSize = fullSet.size(); + UpdateGraphProcessor.DEFAULT.removeSource(this); } return fullSet.subSetByPositionRange(0, releasedSize).intersect(selection); } - void onReleaseAll() {} + /** + * Callback that is executed when all of our expected rows have been released. + */ + void onReleaseAll() { + releaseAllNanos = DateTimeUtils.currentTime().getNanos(); + if (firstReleaseNanos == QueryConstants.NULL_LONG) { + // there was no processing to do + firstReleaseNanos = releaseAllNanos; + } + } + + /** + * Wait for all rows to be released. + */ + @ScriptApi + public void waitForCompletion() throws InterruptedException { + if (UpdateGraphProcessor.DEFAULT.isRefreshThread()) { + throw new IllegalStateException( + "Can not wait for completion while on UpdateGraphProcessor refresh thread, updates would block."); + } + if (releaseAllNanos != QueryConstants.NULL_LONG) { + return; + } + UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(() -> { + while (releaseAllNanos == QueryConstants.NULL_LONG) { + // this only works because we will never actually filter out a row from the result; in the general + // WhereFilter case, the result table may not update. We could await on the source table, but + listener.getTable().awaitUpdate(); + } + }); + } + + /** + * Wait for all rows to be released. + */ + @ScriptApi + public void waitForCompletion(long timeoutMillis) throws InterruptedException { + if (UpdateGraphProcessor.DEFAULT.isRefreshThread()) { + throw new IllegalStateException( + "Can not wait for completion while on UpdateGraphProcessor refresh thread, updates would block."); + } + if (releaseAllNanos != QueryConstants.NULL_LONG) { + return; + } + final long end = System.currentTimeMillis() + timeoutMillis; + UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(() -> { + while (releaseAllNanos == QueryConstants.NULL_LONG) { + // this only works because we will never actually filter out a row from the result; in the general + // WhereFilter case, the result table may not update. We could await on the source table, but + final long remainingTimeout = Math.max(0, end - System.currentTimeMillis()); + if (remainingTimeout == 0) { + return; + } + listener.getTable().awaitUpdate(remainingTimeout); + } + }); + } + + /** + * How many nanos between the first release event and the final release event? + * + * @return nano duration of this filter, or NULL_LONG if the filter is not completed + */ + @ScriptApi + public long durationNanos() { + if (releaseAllNanos == QueryConstants.NULL_LONG || firstReleaseNanos == QueryConstants.NULL_LONG) { + return QueryConstants.NULL_LONG; + } + return releaseAllNanos - firstReleaseNanos; + } + + /** + * Begin releasing rows during update propagation. + */ + public void start() { + started = true; + if (initialized) { + addToUpdateGraphProcessor(); + } + } public long getInitialSize() { return initialSize; @@ -112,6 +219,12 @@ public boolean isRefreshing() { @Override public void run() { + if (!started) { + throw new IllegalStateException(); + } + if (firstReleaseNanos == QueryConstants.NULL_LONG) { + firstReleaseNanos = DateTimeUtils.currentTime().getNanos(); + } releaseMoreEntries = true; listener.requestRecompute(); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/IncrementalReleaseFilter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/IncrementalReleaseFilter.java index 10a08c2fd97..af6292cf906 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/IncrementalReleaseFilter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/IncrementalReleaseFilter.java @@ -17,7 +17,7 @@ public class IncrementalReleaseFilter extends BaseIncrementalReleaseFilter { * @param sizeIncrement how many rows to release at the beginning of each UGP cycle. */ public IncrementalReleaseFilter(long initialSize, long sizeIncrement) { - super(initialSize); + super(initialSize, true); this.sizeIncrement = sizeIncrement; } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableAggregationTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableAggregationTest.java index 71d264f75d6..0fce1a0c95c 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableAggregationTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableAggregationTest.java @@ -2225,12 +2225,11 @@ private void testCountByIncremental(int size) { for (int i = 0; i < 100; i++) { RefreshingTableTestCase.simulateShiftAwareStep(size, random, queryTable, columnInfo, en); } - } @Test public void testMinMaxByIncremental() { - final int[] sizes = {10, 50, 200}; + final int[] sizes = {10, 20, 50, 200}; for (final int size : sizes) { for (int seed = 0; seed < 1; ++seed) { testMinMaxByIncremental(size, seed); @@ -2259,7 +2258,7 @@ private void testMinMaxByIncremental(int size, int seed) { TableTools.showWithRowSet(queryTable); } final EvalNuggetInterface[] en = new EvalNuggetInterface[] { - EvalNugget.from(() -> queryTable.maxBy("Sym").sort("Sym")), + EvalNugget.Sorted.from(() -> queryTable.maxBy("Sym"), "Sym"), EvalNugget.from(() -> queryTable.sort("Sym").maxBy("Sym")), EvalNugget.from(() -> queryTable.dropColumns("Sym").sort("intCol").maxBy("intCol").sort("intCol")), EvalNugget.from(() -> queryTable.sort("Sym", "intCol").maxBy("Sym", "intCol").sort("Sym", "intCol")), @@ -2286,7 +2285,7 @@ private void testMinMaxByIncremental(int size, int seed) { }; for (int step = 0; step < 50; step++) { if (RefreshingTableTestCase.printTableUpdates) { - System.out.println("Seed = " + seed + ", step=" + step); + System.out.println("Seed = " + seed + ", size=" + size + ", step=" + step); } RefreshingTableTestCase.simulateShiftAwareStep(size, random, queryTable, columnInfo, en); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableTreeTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableTreeTest.java index b5fc41d48ee..e5a797a6eb0 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableTreeTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableTreeTest.java @@ -928,10 +928,9 @@ static private void doCompareWithChildren(Function.Unary actual expectedValue = expectedValue.sort(sortColumns); } - final String diff = diff(maybePrev(actualValue.dropColumns(hierarchicalColumnName), actualPrev), - maybePrev(expectedValue.dropColumns(hierarchicalColumnName), expectedPrev), 10, - EnumSet.of(TableDiff.DiffItems.DoublesExact)); - Assert.assertEquals(msg, "", diff); + assertTableEquals(maybePrev(expectedValue.dropColumns(hierarchicalColumnName), expectedPrev), + maybePrev(actualValue.dropColumns(hierarchicalColumnName), actualPrev), + TableDiff.DiffItems.DoublesExact); final ColumnSource actualChildren = columnOrPrev(actualValue, hierarchicalColumnName, actualPrev); final ColumnSource expectedChildren = columnOrPrev(expectedValue, hierarchicalColumnName, expectedPrev); @@ -1622,7 +1621,14 @@ public void testDuplicateAgg() { } public void testRollupIncremental() { - final Random random = new Random(0); + for (int seed = 0; seed < 1; ++seed) { + System.out.println("Seed = " + seed); + testRollupIncremental(seed); + } + } + + private void testRollupIncremental(int seed) { + final Random random = new Random(seed); final TstUtils.ColumnInfo[] columnInfo; final int size = 100; @@ -1683,6 +1689,7 @@ public void validate(String msg) {} @Override public void show() { + System.out.println("Table:"); TableTools.showWithRowSet(table); } }, diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSumByProfile.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSumByProfile.java new file mode 100644 index 00000000000..bc727b96d27 --- /dev/null +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSumByProfile.java @@ -0,0 +1,46 @@ +package io.deephaven.engine.table.impl; + +import io.deephaven.engine.table.Table; +import io.deephaven.engine.util.TableTools; +import io.deephaven.parquet.table.ParquetTools; + +import java.text.DecimalFormat; + +public class TestSumByProfile { + public static void main(String[] args) { + if (args.length != 2) { + usage(); + } + final String filename = args[1]; + + final boolean view; + switch (args[0]) { + case "view": + view = true; + break; + case "select": + view = false; + break; + default: + view = true; + usage(); + } + System.out.println("Reading: " + filename); + final Table relation = ParquetTools.readTable(filename); + final long startTimeSelect = System.nanoTime(); + final Table viewed = view ? relation.view("animal_id", "Values") : relation.select("animal_id", "Values"); + final long endTimeSelect = System.nanoTime(); + System.out.println("Select Elapsed Time: " + new DecimalFormat("###,###.000") + .format(((double) (endTimeSelect - startTimeSelect) / 1_000_000_000.0))); + final Table sumBy = viewed.sumBy("animal_id"); + final long endTimeSum = System.nanoTime(); + System.out.println("Sum Elapsed Time: " + + new DecimalFormat("###,###.000").format(((double) (endTimeSum - endTimeSelect) / 1_000_000_000.0))); + TableTools.show(sumBy); + } + + private static void usage() { + System.err.println("TestSumByProfile select|view filename"); + System.exit(1); + } +} diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestIncrementalReleaseFilter.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestIncrementalReleaseFilter.java index 8cfda5aaf4d..5cf84eed9b4 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestIncrementalReleaseFilter.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestIncrementalReleaseFilter.java @@ -12,6 +12,7 @@ import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.select.AutoTuningIncrementalReleaseFilter; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; +import junit.framework.TestCase; import java.util.List; import java.util.stream.Collectors; @@ -87,19 +88,24 @@ public void testAutoTune2() { // I just want to see commas in the output UpdateGraphProcessor.DEFAULT.setTargetCycleDurationMillis(100); final Table source = TableTools.emptyTable(1_000_000); - TableTools.show(source); final AutoTuningIncrementalReleaseFilter incrementalReleaseFilter = new AutoTuningIncrementalReleaseFilter(0, 100, 1.1, true, new ClockTimeProvider(new RealTimeClock())); + incrementalReleaseFilter.start(); final Table filtered = source.where(incrementalReleaseFilter); final Table updated = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> filtered.update("I=ii")); + int steps = 0; + while (filtered.size() < source.size()) { UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + if (steps++ > 100) { + TestCase.fail("Did not release rows promptly."); + } } - TableTools.show(updated); + assertEquals(source.size(), updated.size()); } private int testAutoTuneCycle(int cycleTime) { @@ -109,6 +115,7 @@ private int testAutoTuneCycle(int cycleTime) { final AutoTuningIncrementalReleaseFilter incrementalReleaseFilter = new AutoTuningIncrementalReleaseFilter(0, 100, 1.1, true, new ClockTimeProvider(new RealTimeClock())); + incrementalReleaseFilter.start(); final Table filtered = source.where(incrementalReleaseFilter); final Table updated = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> filtered @@ -118,7 +125,9 @@ private int testAutoTuneCycle(int cycleTime) { while (filtered.size() < source.size()) { UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); System.out.println(filtered.size() + " / " + updated.size()); - cycles++; + if (cycles++ > (2 * (source.size() * 100) / cycleTime)) { + TestCase.fail("Did not release rows promptly."); + } } return cycles; } diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraphProcessor.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraphProcessor.java index 1ce4785fd2e..8b80a302a0a 100644 --- a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraphProcessor.java +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraphProcessor.java @@ -617,13 +617,22 @@ public void removeSource(@NotNull final Runnable updateSource) { } /** - * Remove a collection of tables from the list of refreshing tables. + * Remove a collection of sources from the list of refreshing sources. * - * @implNote This will not set the tables as {@link DynamicNode#setRefreshing(boolean) non-refreshing}. - * @param tablesToRemove The tables to remove from the list of refreshing tables + * @implNote This will not set the sources as {@link DynamicNode#setRefreshing(boolean) non-refreshing}. + * @param sourcesToRemove The sources to remove from the list of refreshing sources */ - public void removeTables(final Collection tablesToRemove) { - sources.removeAll(tablesToRemove); + public void removeSources(final Collection sourcesToRemove) { + sources.removeAll(sourcesToRemove); + } + + /** + * Return the number of valid sources. + * + * @return the number of valid sources + */ + public int sourceCount() { + return sources.size(); } /**