From a033b250e4e4aa39927b317e0e4bca7178717fde Mon Sep 17 00:00:00 2001 From: Sergio De Lorenzis Date: Wed, 17 Apr 2024 10:38:58 +0200 Subject: [PATCH] CrateDB Bulkloader --- assemblies/plugins/dist/pom.xml | 13 + .../transforms/cratedbbulkloader/pom.xml | 44 + .../src/assembly/assembly.xml | 49 + .../src/main/resources/version.xml | 20 + assemblies/plugins/transforms/pom.xml | 3 +- .../integration-tests-cratedb.yaml | 41 + .../resource/cratedb/crate.yml | 36 + .../cratedb/0001-copy-from-existing-file.hpl | 282 ++++ .../cratedb/datasets/dataset-100.csv | 100 ++ integration-tests/cratedb/dev-env-config.json | 3 + integration-tests/cratedb/hop-config.json | 290 ++++ .../main-0001-copy-to-from-filesystem.hwf | 194 +++ .../pipeline-run-configuration/local.json | 21 + .../cratedb/metadata/rdbms/cratedb-test.json | 26 + .../workflow-run-configuration/local.json | 11 + integration-tests/cratedb/project-config.json | 13 + ...baseMeta.java => CrateDBDatabaseMeta.java} | 5 +- .../cratedb/CrateDBDatabaseMetaIT.java | 15 +- .../cratedb/CrateDBDatabaseMetaTest.java | 4 +- .../cratedb/CrateDBValueMetaBaseTest.java | 16 +- .../cratedb/ReleaseSavePointTest.java | 2 +- plugins/transforms/cratedbbulkloader/pom.xml | 58 + .../cratedbbulkloader/CrateDBBulkLoader.java | 916 +++++++++++ .../CrateDBBulkLoaderData.java | 77 + .../CrateDBBulkLoaderDialog.java | 1388 +++++++++++++++++ .../CrateDBBulkLoaderField.java | 72 + .../CrateDBBulkLoaderMeta.java | 937 +++++++++++ .../http/BulkImportClient.java | 117 ++ .../http/CrateDBHttpResponse.java | 39 + .../http/HttpBulkImportResponse.java | 75 + .../http/HttpImportMetrics.java | 42 + .../cratedbbulkloader/http/RowMetrics.java | 34 + .../http/exceptions/CrateDBHopException.java | 26 + .../UnauthorizedCrateDBAccessException.java | 24 + .../src/main/resources/CrateDBBulkLoader.svg | 21 + .../messages/messages_en_US.properties | 125 ++ .../messages/messages_it_IT.properties | 19 + .../http/BulkImportClientIT.java | 163 ++ .../http/HttpBulkImportResponseTest.java | 96 ++ .../src/test/resources/crate.yml | 36 + plugins/transforms/pom.xml | 1 + 41 files changed, 5432 insertions(+), 22 deletions(-) create mode 100644 assemblies/plugins/transforms/cratedbbulkloader/pom.xml create mode 100644 assemblies/plugins/transforms/cratedbbulkloader/src/assembly/assembly.xml create mode 100644 assemblies/plugins/transforms/cratedbbulkloader/src/main/resources/version.xml create mode 100644 docker/integration-tests/integration-tests-cratedb.yaml create mode 100644 docker/integration-tests/resource/cratedb/crate.yml create mode 100644 integration-tests/cratedb/0001-copy-from-existing-file.hpl create mode 100644 integration-tests/cratedb/datasets/dataset-100.csv create mode 100644 integration-tests/cratedb/dev-env-config.json create mode 100644 integration-tests/cratedb/hop-config.json create mode 100644 integration-tests/cratedb/main-0001-copy-to-from-filesystem.hwf create mode 100644 integration-tests/cratedb/metadata/pipeline-run-configuration/local.json create mode 100644 integration-tests/cratedb/metadata/rdbms/cratedb-test.json create mode 100644 integration-tests/cratedb/metadata/workflow-run-configuration/local.json create mode 100644 integration-tests/cratedb/project-config.json rename plugins/databases/cratedb/src/main/java/org/apache/hop/databases/cratedb/{CrateDbDatabaseMeta.java => CrateDBDatabaseMeta.java} (97%) create mode 100644 plugins/transforms/cratedbbulkloader/pom.xml create mode 100644 plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoader.java create mode 100644 plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderData.java create mode 100644 plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderDialog.java create mode 100644 plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderField.java create mode 100644 plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderMeta.java create mode 100644 plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/BulkImportClient.java create mode 100644 plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/CrateDBHttpResponse.java create mode 100644 plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/HttpBulkImportResponse.java create mode 100644 plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/HttpImportMetrics.java create mode 100644 plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/RowMetrics.java create mode 100644 plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/exceptions/CrateDBHopException.java create mode 100644 plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/exceptions/UnauthorizedCrateDBAccessException.java create mode 100644 plugins/transforms/cratedbbulkloader/src/main/resources/CrateDBBulkLoader.svg create mode 100644 plugins/transforms/cratedbbulkloader/src/main/resources/org/apache/hop/pipeline/transforms/cratedbbulkloader/messages/messages_en_US.properties create mode 100644 plugins/transforms/cratedbbulkloader/src/main/resources/org/apache/hop/pipeline/transforms/cratedbbulkloader/messages/messages_it_IT.properties create mode 100644 plugins/transforms/cratedbbulkloader/src/test/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/BulkImportClientIT.java create mode 100644 plugins/transforms/cratedbbulkloader/src/test/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/HttpBulkImportResponseTest.java create mode 100644 plugins/transforms/cratedbbulkloader/src/test/resources/crate.yml diff --git a/assemblies/plugins/dist/pom.xml b/assemblies/plugins/dist/pom.xml index 3e036c6e08d..7b58c5cc65f 100644 --- a/assemblies/plugins/dist/pom.xml +++ b/assemblies/plugins/dist/pom.xml @@ -999,6 +999,19 @@ + + org.apache.hop + hop-assemblies-plugins-transforms-cratedbbulkloader + ${project.version} + zip + + + * + * + + + + org.apache.hop hop-assemblies-plugins-transforms-creditcardvalidator diff --git a/assemblies/plugins/transforms/cratedbbulkloader/pom.xml b/assemblies/plugins/transforms/cratedbbulkloader/pom.xml new file mode 100644 index 00000000000..836482ff4fa --- /dev/null +++ b/assemblies/plugins/transforms/cratedbbulkloader/pom.xml @@ -0,0 +1,44 @@ + + + + + 4.0.0 + + org.apache.hop + hop-assemblies-plugins-transforms + 2.9.0-SNAPSHOT + + + hop-assemblies-plugins-transforms-cratedbbulkloader + ${parent.version} + pom + + Hop Assemblies Plugins Transforms CrateDB bulk loader + + + + + org.apache.hop + hop-transform-cratedbbulkloader + ${project.version} + + + + diff --git a/assemblies/plugins/transforms/cratedbbulkloader/src/assembly/assembly.xml b/assemblies/plugins/transforms/cratedbbulkloader/src/assembly/assembly.xml new file mode 100644 index 00000000000..e0ed410d341 --- /dev/null +++ b/assemblies/plugins/transforms/cratedbbulkloader/src/assembly/assembly.xml @@ -0,0 +1,49 @@ + + + + hop-assemblies-plugins-transforms-cratedbbulkloader + + zip + + transforms/cratedbbulkloader + + + ${project.basedir}/src/main/resources/version.xml + . + true + + + + + lib + + **/* + + + + + + false + + org.apache.hop:hop-transform-cratedbbulkloader:jar + + + + diff --git a/assemblies/plugins/transforms/cratedbbulkloader/src/main/resources/version.xml b/assemblies/plugins/transforms/cratedbbulkloader/src/main/resources/version.xml new file mode 100644 index 00000000000..36ab20e22eb --- /dev/null +++ b/assemblies/plugins/transforms/cratedbbulkloader/src/main/resources/version.xml @@ -0,0 +1,20 @@ + + + +${project.version} diff --git a/assemblies/plugins/transforms/pom.xml b/assemblies/plugins/transforms/pom.xml index 891eb41f78e..8fa187c1c8c 100644 --- a/assemblies/plugins/transforms/pom.xml +++ b/assemblies/plugins/transforms/pom.xml @@ -47,6 +47,7 @@ combinationlookup concatfields constant + cratedbbulkloader creditcardvalidator cubeinput cubeoutput @@ -177,4 +178,4 @@ zipfile - \ No newline at end of file + diff --git a/docker/integration-tests/integration-tests-cratedb.yaml b/docker/integration-tests/integration-tests-cratedb.yaml new file mode 100644 index 00000000000..d6044acd7c5 --- /dev/null +++ b/docker/integration-tests/integration-tests-cratedb.yaml @@ -0,0 +1,41 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +services: + integration_test_database: + extends: + file: integration-tests-base.yaml + service: integration_test + depends_on: + cratedb: + condition: service_healthy + links: + - cratedb + + cratedb: + image: crate:latest + ports: + - "4200:4200" + - "5432:5432" + healthcheck: + test: [ "CMD", "curl", "-f", "http://localhost:4200" ] + interval: 20s + timeout: 10s + retries: 6 + start_period: 120s + volumes: + - ./resource/cratedb/crate.yml:/crate/config/crate.yml diff --git a/docker/integration-tests/resource/cratedb/crate.yml b/docker/integration-tests/resource/cratedb/crate.yml new file mode 100644 index 00000000000..ceef64391e8 --- /dev/null +++ b/docker/integration-tests/resource/cratedb/crate.yml @@ -0,0 +1,36 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +auth: + host_based: + enabled: true + config: + 0: + user: crate + # address: _local_ + method: trust + 99: + method: password + +network.host: _local_,_site_ + +# Paths +path: + logs: /data/log + data: /data/data +blobs: + path: /data/blobs diff --git a/integration-tests/cratedb/0001-copy-from-existing-file.hpl b/integration-tests/cratedb/0001-copy-from-existing-file.hpl new file mode 100644 index 00000000000..a6fc0566621 --- /dev/null +++ b/integration-tests/cratedb/0001-copy-from-existing-file.hpl @@ -0,0 +1,282 @@ + + + + + 0001-copy-from-existing-file + Y + + + + Normal + + + N + 1000 + 100 + - + 2024/04/26 15:51:58.413 + - + 2024/04/26 15:51:58.413 + + + + + + Read 100 rows dataset + CrateDB bulk loader + Y + + + + CrateDB bulk loader + CrateDBBulkLoader + + Y + + 1 + + none + + + 10 + cratedb-test + N + + + firstName + firstName + + + lastName + lastName + + + birthDate + birthDate + + + city + city + + + country + country + + + birthDateAndTime + birthDateAndTime + + + http://cratedb:4200/_sql + alice + Encrypted 2be98afc86aa7f2e4bb18bd63c99dbdde + N + doc + Y + Y + N + test
+ N + N + Y + N + + + 608 + 176 + +
+ + Read 100 rows dataset + TextFileInput2 + + Y + + 1 + + none + + + N + N + + + , + " + N + +
N
+ 1 +
N
+ 1 + N + 1 + N + 80 + 0 + Y + N + + N + N + + Unix + + Characters + N + + ${PROJECT_HOME}/datasets/dataset-100.csv + + + N + N + CSV + None + + + + + + firstName + String + + $ + . + , + - + + -1 + 9 + -1 + none + N + + + lastName + String + + $ + . + , + - + + -1 + 11 + -1 + none + N + + + birthDateAndTime + Date + yyyy-MM-dd HH:mm:ss.SSS + $ + . + , + - + + -1 + -1 + -1 + none + N + + + city + String + + $ + . + , + - + + -1 + 17 + -1 + none + N + + + country + String + + $ + . + , + - + + -1 + 37 + -1 + none + N + + + birthDate + Date + yyyy-MM-dd + $ + . + , + - + + -1 + -1 + -1 + none + N + + + 0 + N + N + + + N + + + + + + warning + + error + + line + N + en_US + + + + + + + + + + + 272 + 176 + +
+ + + +
diff --git a/integration-tests/cratedb/datasets/dataset-100.csv b/integration-tests/cratedb/datasets/dataset-100.csv new file mode 100644 index 00000000000..8cf2bccbc06 --- /dev/null +++ b/integration-tests/cratedb/datasets/dataset-100.csv @@ -0,0 +1,100 @@ +Margie,Koss,1999-11-09 01:54:03.940,Clydeport,Martinique,1978-05-24 +Freddie,Marks,1970-09-29 13:24:26.739,Schaeferfort,Madagascar,1993-06-15 +Paul,McLaughlin,2001-11-10 13:57:54.338,Bayermouth,Liechtenstein,1977-09-16 +Mary,Langosh,1973-09-09 15:04:25.396,Austinstad,Slovakia (Slovak Republic),2004-07-19 +Andrea,Wunsch,2006-01-22 18:48:06.902,Lake Renettaburgh,Democratic People's Republic of Korea,1962-11-21 +Leeanne,Brekke,1974-08-30 19:28:02.225,Williamsonside,Pitcairn Islands,2000-10-09 +Laverne,Schinner,1974-09-30 11:39:23.292,North Shantay,Cape Verde,1983-09-21 +Sabrina,Hane,1998-02-20 17:44:50.627,Carlyhaven,Cote d'Ivoire,1983-10-17 +Lettie,Wintheiser,1962-11-25 06:07:49.147,Hermistonchester,Benin,1974-02-19 +Mica,Jacobs,1996-06-19 05:21:41.683,East Fermin,Turks and Caicos Islands,1978-05-22 +Brynn,Lueilwitz,1996-12-10 09:25:45.482,Feeneybury,Western Sahara,2005-11-20 +Stanton,Kirlin,1991-09-08 03:27:18.919,Port Ahmad,Romania,1999-05-09 +Olimpia,Parker,1965-08-09 09:22:32.117,Corkeryville,Gabon,1980-05-23 +Jude,Steuber,2006-03-09 04:29:42.253,Wendieport,Mongolia,2002-04-12 +Adelle,Willms,1971-11-06 22:32:14.376,Hodkiewiczstad,Pitcairn Islands,1983-12-19 +Reanna,O'Keefe,1983-04-25 03:19:08.235,Nelleshire,Congo,1990-06-12 +Les,Boyer,1986-06-08 22:53:55.766,Wehnerstad,Netherlands Antilles,1977-01-09 +Jonna,Ward,1995-07-06 11:12:24.554,Tommyside,Vanuatu,1970-12-03 +Stefani,Bernier,1971-11-22 07:54:33.695,Treenaview,Gibraltar,1970-03-14 +Maryetta,Cronin,1985-11-23 02:36:27.977,East Melissaburgh,Svalbard & Jan Mayen Islands,1984-03-11 +Sophie,Mosciski,1959-12-27 08:43:15.402,Lake Barrettland,Reunion,1984-10-20 +Oscar,Braun,1998-10-21 12:21:35.519,East Scottieside,Cameroon,1963-11-15 +Margorie,Huel,1981-02-17 16:24:44.769,New Ed,Netherlands Antilles,1963-02-20 +Lenora,Rutherford,1996-09-17 13:11:49.786,Reedbury,Micronesia,2003-05-11 +Tawana,Satterfield,1968-04-07 10:33:12.395,Noelbury,Northern Mariana Islands,1964-05-20 +Roni,Koelpin,1995-09-29 23:37:59.419,South Ericside,Serbia,1965-05-19 +Artie,Thiel,1978-04-08 15:54:37.369,West Demetrice,Grenada,1976-11-07 +Eliseo,Kshlerin,2003-10-29 00:31:20.869,Lurlenefurt,Venezuela,1966-06-11 +Alissa,Konopelski,1961-10-11 17:04:20.527,Brandeeport,Kazakhstan,1991-04-03 +Toi,Balistreri,1973-10-17 03:53:45.908,Greenfelderhaven,Saudi Arabia,1976-08-11 +Katherina,Yost,1979-05-02 09:22:52.437,West Ladonna,Macedonia,1998-10-23 +Harlan,Murazik,1987-03-30 21:49:58.228,New Tamicaburgh,Brunei Darussalam,2004-06-24 +Romaine,Weimann,1978-04-16 10:37:19.724,Port Dorismouth,Mozambique,1993-01-19 +Vanessa,Stanton,1966-08-23 09:19:34.869,Wehnerhaven,Slovenia,2002-04-10 +Clara,Wolff,1965-03-12 23:44:22.234,McDermottview,Spain,1969-08-17 +Carmel,Wisoky,1972-09-04 14:51:16.175,West Carenmouth,French Southern Territories,1999-02-24 +Jimmie,Torp,1985-05-10 06:24:54.840,East Winford,French Southern Territories,1991-06-05 +Livia,Renner,1973-07-22 04:09:16.854,Port Raymonport,Vietnam,1972-07-22 +Raul,Roberts,1987-08-22 07:05:23.376,Fernandehaven,Macedonia,2002-07-07 +Ariane,Cummings,1983-05-24 04:20:53.221,North Kitmouth,French Guiana,1969-09-04 +Janae,Farrell,1996-06-06 08:14:53.323,Ramirotown,Indonesia,2000-08-29 +Tena,Streich,1985-01-28 01:37:26.947,West Stephenview,Bhutan,1991-10-06 +Dalila,Miller,1963-12-21 09:36:45.291,Port Tanner,Andorra,1983-11-02 +Orval,Williamson,1995-10-14 09:40:32.944,Lake Jesusita,Bahamas,1998-02-19 +Loren,Bednar,1994-08-30 09:55:16.326,Leisaburgh,Slovenia,1967-06-13 +Delbert,Sanford,1977-09-28 21:31:15.998,Port Tobyview,Suriname,1997-02-18 +Bernadine,O'Reilly,1989-10-27 22:01:45.709,Dudleyport,Mauritius,1963-01-20 +Jc,Wuckert,2005-02-23 20:59:35.563,Shaneville,Jersey,1989-10-06 +Logan,Wolff,1977-05-01 09:10:12.470,Croninstad,Guinea-Bissau,1972-12-09 +Erinn,Grady,1987-06-14 00:55:33.162,East Olga,Spain,1969-01-29 +Phung,Will,1998-08-27 16:15:55.348,Nicolasville,Micronesia,1975-04-22 +Kareem,Kuvalis,1973-09-27 19:04:24.486,Loviefort,Cambodia,1995-06-27 +Thad,O'Keefe,1995-05-02 12:38:36.284,New Latonia,Palestinian Territory,1972-02-27 +Andy,Rath,1963-11-25 18:36:41.595,Port Christianaside,Cambodia,1968-10-22 +Jinny,Brakus,1963-05-08 05:47:09.106,New Gena,Lesotho,1966-06-06 +Celena,Goyette,1985-08-23 04:14:25.868,Cruickshankmouth,Equatorial Guinea,1977-05-25 +Tashina,Fisher,1975-08-05 19:53:56.902,Lake Vernon,Romania,1995-08-30 +Rhett,Bradtke,1991-03-06 00:27:07.047,East Erasmo,Pakistan,1976-06-29 +Myrtis,Prohaska,1990-01-18 10:06:09.268,Lake Arleenland,Afghanistan,1978-07-07 +Lamont,Nitzsche,1977-06-26 21:26:23.683,Marcoland,Marshall Islands,1967-09-02 +Alla,Roob,2004-09-20 01:38:54.963,New Dion,Bolivia,1996-07-31 +Lashawna,Collier,2000-12-06 09:18:12.864,Schimmelshire,South Africa,1987-08-18 +Art,Herzog,1996-08-12 18:32:24.749,West Tonyfort,Australia,1968-11-27 +Adam,Smitham,1995-07-18 01:57:39.994,Dareview,Bosnia and Herzegovina,1960-07-16 +Beatrice,Mayer,1966-12-02 04:10:46.476,Port Loren,Jordan,1996-06-14 +Gus,Cremin,1964-02-19 19:23:22.827,Alfredside,Niger,1960-08-19 +Micah,Kulas,1982-08-19 19:30:35.348,Janycemouth,Gabon,2004-08-29 +Avery,Walsh,1971-03-18 19:24:43.817,Chassidyton,Saint Martin,1974-08-20 +Rufus,Runte,2004-01-19 08:39:59.530,Randallburgh,Greenland,1986-12-09 +Curtis,Nitzsche,1964-08-08 18:56:20.513,Lake Rebecca,Mali,1988-04-16 +Dawn,Hansen,1968-09-06 20:41:25.891,North Romeo,Tajikistan,1967-11-08 +Chassidy,Lynch,1990-12-03 14:20:17.393,West Hassan,China,1997-01-13 +Laurena,Graham,1978-06-30 17:32:58.420,Gutmannview,French Polynesia,1987-07-16 +Sabine,Rippin,1985-10-09 05:10:56.815,North Chu,Equatorial Guinea,1999-08-24 +Shalonda,Purdy,2005-02-08 17:41:56.759,West Tamar,Equatorial Guinea,2005-03-29 +Trevor,Kling,1985-11-05 14:47:29.804,North Bellafort,Iceland,1974-07-25 +Warner,Hintz,1962-07-13 10:51:14.095,North Amos,Dominica,1985-03-16 +Leigh,Willms,1970-04-21 11:24:28.807,O'Konside,India,1978-01-11 +Charlyn,Schmidt,2004-05-31 06:25:42.785,Leland,Tokelau,1963-12-16 +Dessie,Paucek,1980-02-07 06:20:30.327,Bechtelarton,Malaysia,1984-04-01 +Janette,Jacobson,1992-10-09 11:00:18.605,Kumfurt,French Guiana,2005-07-03 +Young,Langworth,1997-04-14 00:32:38.622,North Geraldstad,Latvia,1980-04-09 +Sparkle,Waters,1964-08-04 12:03:55.542,Rathstad,Gambia,1984-11-27 +Yvonne,Koelpin,1993-01-25 10:09:01.958,South Ivorybury,British Indian Ocean Territory (Chagos Archipelago),1988-09-13 +Burt,Reynolds,1993-07-24 06:09:33.132,North Jonas,Jamaica,1962-04-29 +Chase,Murazik,1987-09-18 16:17:39.061,Wizaborough,Cyprus,1963-11-25 +Oliver,Ruecker,1979-09-03 02:01:21.674,West Nolan,Benin,1971-12-05 +Rolande,Funk,1971-08-04 10:08:40.002,New Wendellland,Venezuela,1988-05-01 +Floyd,Littel,2004-12-12 15:01:04.824,North Leroyton,Niger,1985-09-12 +Sung,Altenwerth,2004-10-20 17:46:24.509,McGlynnberg,British Indian Ocean Territory (Chagos Archipelago),1964-07-29 +Jacque,Torp,1985-06-07 11:19:35.069,Denesikshire,Mozambique,1988-10-05 +Kristyn,Murazik,2001-02-17 06:46:58.560,Brettmouth,Congo,1983-04-08 +Gerardo,Kris,2005-04-26 12:18:36.156,Lefflerside,Zimbabwe,1971-05-16 +Darron,O'Connell,1998-08-31 00:28:08.148,South Cary,Malta,1984-08-22 +Dominga,Dickinson,1993-05-28 17:01:25.087,East Paris,Samoa,1963-10-22 +Wilson,Nolan,1986-06-21 02:32:56.340,West Keenahaven,Norfolk Island,1999-03-16 +Morton,Koelpin,1988-04-14 19:16:46.464,Cummerataberg,Denmark,1965-04-17 +Sharan,Rice,1972-09-10 07:08:35.484,Renafurt,Somalia,1985-11-17 +Hershel,O'Kon,1983-02-27 22:00:08.139,Bethannmouth,Slovakia (Slovak Republic),1980-04-17 +Sophie,Ratke,1981-02-18 15:01:11.883,Leschfurt,Nigeria,1966-04-13 diff --git a/integration-tests/cratedb/dev-env-config.json b/integration-tests/cratedb/dev-env-config.json new file mode 100644 index 00000000000..6367db79923 --- /dev/null +++ b/integration-tests/cratedb/dev-env-config.json @@ -0,0 +1,3 @@ +{ + "variables" : [] +} diff --git a/integration-tests/cratedb/hop-config.json b/integration-tests/cratedb/hop-config.json new file mode 100644 index 00000000000..cc0d7486861 --- /dev/null +++ b/integration-tests/cratedb/hop-config.json @@ -0,0 +1,290 @@ +{ + "variables": [ + { + "name": "HOP_LENIENT_STRING_TO_NUMBER_CONVERSION", + "value": "N", + "description": "System wide flag to allow lenient string to number conversion for backward compatibility. If this setting is set to \"Y\", an string starting with digits will be converted successfully into a number. (example: 192.168.1.1 will be converted into 192 or 192.168 or 192168 depending on the decimal and grouping symbol). The default (N) will be to throw an error if non-numeric symbols are found in the string." + }, + { + "name": "HOP_COMPATIBILITY_DB_IGNORE_TIMEZONE", + "value": "N", + "description": "System wide flag to ignore timezone while writing date/timestamp value to the database." + }, + { + "name": "HOP_LOG_SIZE_LIMIT", + "value": "0", + "description": "The log size limit for all pipelines and workflows that don't have the \"log size limit\" property set in their respective properties." + }, + { + "name": "HOP_EMPTY_STRING_DIFFERS_FROM_NULL", + "value": "N", + "description": "NULL vs Empty String. If this setting is set to Y, an empty string and null are different. Otherwise they are not." + }, + { + "name": "HOP_MAX_LOG_SIZE_IN_LINES", + "value": "0", + "description": "The maximum number of log lines that are kept internally by Hop. Set to 0 to keep all rows (default)" + }, + { + "name": "HOP_MAX_LOG_TIMEOUT_IN_MINUTES", + "value": "1440", + "description": "The maximum age (in minutes) of a log line while being kept internally by Hop. Set to 0 to keep all rows indefinitely (default)" + }, + { + "name": "HOP_MAX_WORKFLOW_TRACKER_SIZE", + "value": "5000", + "description": "The maximum number of workflow trackers kept in memory" + }, + { + "name": "HOP_MAX_ACTIONS_LOGGED", + "value": "5000", + "description": "The maximum number of action results kept in memory for logging purposes." + }, + { + "name": "HOP_MAX_LOGGING_REGISTRY_SIZE", + "value": "10000", + "description": "The maximum number of logging registry entries kept in memory for logging purposes." + }, + { + "name": "HOP_LOG_TAB_REFRESH_DELAY", + "value": "1000", + "description": "The hop log tab refresh delay." + }, + { + "name": "HOP_LOG_TAB_REFRESH_PERIOD", + "value": "1000", + "description": "The hop log tab refresh period." + }, + { + "name": "HOP_PLUGIN_CLASSES", + "value": null, + "description": "A comma delimited list of classes to scan for plugin annotations" + }, + { + "name": "HOP_PLUGIN_PACKAGES", + "value": null, + "description": "A comma delimited list of packages to scan for plugin annotations (warning: slow!!)" + }, + { + "name": "HOP_TRANSFORM_PERFORMANCE_SNAPSHOT_LIMIT", + "value": "0", + "description": "The maximum number of transform performance snapshots to keep in memory. Set to 0 to keep all snapshots indefinitely (default)" + }, + { + "name": "HOP_ROWSET_GET_TIMEOUT", + "value": "50", + "description": "The name of the variable that optionally contains an alternative rowset get timeout (in ms). This only makes a difference for extremely short lived pipelines." + }, + { + "name": "HOP_ROWSET_PUT_TIMEOUT", + "value": "50", + "description": "The name of the variable that optionally contains an alternative rowset put timeout (in ms). This only makes a difference for extremely short lived pipelines." + }, + { + "name": "HOP_CORE_TRANSFORMS_FILE", + "value": null, + "description": "The name of the project variable that will contain the alternative location of the hop-transforms.xml file. You can use this to customize the list of available internal transforms outside of the codebase." + }, + { + "name": "HOP_CORE_WORKFLOW_ACTIONS_FILE", + "value": null, + "description": "The name of the project variable that will contain the alternative location of the hop-workflow-actions.xml file." + }, + { + "name": "HOP_SERVER_OBJECT_TIMEOUT_MINUTES", + "value": "1440", + "description": "This project variable will set a time-out after which waiting, completed or stopped pipelines and workflows will be automatically cleaned up. The default value is 1440 (one day)." + }, + { + "name": "HOP_PIPELINE_PAN_JVM_EXIT_CODE", + "value": null, + "description": "Set this variable to an integer that will be returned as the Pan JVM exit code." + }, + { + "name": "HOP_DISABLE_CONSOLE_LOGGING", + "value": "N", + "description": "Set this variable to Y to disable standard Hop logging to the console. (stdout)" + }, + { + "name": "HOP_REDIRECT_STDERR", + "value": "N", + "description": "Set this variable to Y to redirect stderr to Hop logging." + }, + { + "name": "HOP_REDIRECT_STDOUT", + "value": "N", + "description": "Set this variable to Y to redirect stdout to Hop logging." + }, + { + "name": "HOP_DEFAULT_NUMBER_FORMAT", + "value": null, + "description": "The name of the variable containing an alternative default number format" + }, + { + "name": "HOP_DEFAULT_BIGNUMBER_FORMAT", + "value": null, + "description": "The name of the variable containing an alternative default bignumber format" + }, + { + "name": "HOP_DEFAULT_INTEGER_FORMAT", + "value": null, + "description": "The name of the variable containing an alternative default integer format" + }, + { + "name": "HOP_DEFAULT_DATE_FORMAT", + "value": null, + "description": "The name of the variable containing an alternative default date format" + }, + { + "name": "HOP_DEFAULT_TIMESTAMP_FORMAT", + "value": null, + "description": "The name of the variable containing an alternative default timestamp format" + }, + { + "name": "HOP_DEFAULT_SERVLET_ENCODING", + "value": null, + "description": "Defines the default encoding for servlets, leave it empty to use Java default encoding" + }, + { + "name": "HOP_FAIL_ON_LOGGING_ERROR", + "value": "N", + "description": "Set this variable to Y when you want the workflow/pipeline fail with an error when the related logging process (e.g. to a database) fails." + }, + { + "name": "HOP_AGGREGATION_MIN_NULL_IS_VALUED", + "value": "N", + "description": "Set this variable to Y to set the minimum to NULL if NULL is within an aggregate. Otherwise by default NULL is ignored by the MIN aggregate and MIN is set to the minimum value that is not NULL. See also the variable HOP_AGGREGATION_ALL_NULLS_ARE_ZERO." + }, + { + "name": "HOP_AGGREGATION_ALL_NULLS_ARE_ZERO", + "value": "N", + "description": "Set this variable to Y to return 0 when all values within an aggregate are NULL. Otherwise by default a NULL is returned when all values are NULL." + }, + { + "name": "HOP_COMPATIBILITY_TEXT_FILE_OUTPUT_APPEND_NO_HEADER", + "value": "N", + "description": "Set this variable to Y for backward compatibility for the Text File Output transform. Setting this to Ywill add no header row at all when the append option is enabled, regardless if the file is existing or not." + }, + { + "name": "HOP_PASSWORD_ENCODER_PLUGIN", + "value": "Hop", + "description": "Specifies the password encoder plugin to use by ID (Hop is the default)." + }, + { + "name": "HOP_SYSTEM_HOSTNAME", + "value": null, + "description": "You can use this variable to speed up hostname lookup. Hostname lookup is performed by Hop so that it is capable of logging the server on which a workflow or pipeline is executed." + }, + { + "name": "HOP_SERVER_JETTY_ACCEPTORS", + "value": null, + "description": "A variable to configure jetty option: acceptors for Carte" + }, + { + "name": "HOP_SERVER_JETTY_ACCEPT_QUEUE_SIZE", + "value": null, + "description": "A variable to configure jetty option: acceptQueueSize for Carte" + }, + { + "name": "HOP_SERVER_JETTY_RES_MAX_IDLE_TIME", + "value": null, + "description": "A variable to configure jetty option: lowResourcesMaxIdleTime for Carte" + }, + { + "name": "HOP_COMPATIBILITY_MERGE_ROWS_USE_REFERENCE_STREAM_WHEN_IDENTICAL", + "value": "N", + "description": "Set this variable to Y for backward compatibility for the Merge Rows (diff) transform. Setting this to Y will use the data from the reference stream (instead of the comparison stream) in case the compared rows are identical." + }, + { + "name": "HOP_SPLIT_FIELDS_REMOVE_ENCLOSURE", + "value": "false", + "description": "Set this variable to false to preserve enclosure symbol after splitting the string in the Split fields transform. Changing it to true will remove first and last enclosure symbol from the resulting string chunks." + }, + { + "name": "HOP_ALLOW_EMPTY_FIELD_NAMES_AND_TYPES", + "value": "false", + "description": "Set this variable to TRUE to allow your pipeline to pass 'null' fields and/or empty types." + }, + { + "name": "HOP_GLOBAL_LOG_VARIABLES_CLEAR_ON_EXPORT", + "value": "false", + "description": "Set this variable to false to preserve global log variables defined in pipeline / workflow Properties -> Log panel. Changing it to true will clear it when export pipeline / workflow." + }, + { + "name": "HOP_FILE_OUTPUT_MAX_STREAM_COUNT", + "value": "1024", + "description": "This project variable is used by the Text File Output transform. It defines the max number of simultaneously open files within the transform. The transform will close/reopen files as necessary to insure the max is not exceeded" + }, + { + "name": "HOP_FILE_OUTPUT_MAX_STREAM_LIFE", + "value": "0", + "description": "This project variable is used by the Text File Output transform. It defines the max number of milliseconds between flushes of files opened by the transform." + }, + { + "name": "HOP_USE_NATIVE_FILE_DIALOG", + "value": "N", + "description": "Set this value to Y if you want to use the system file open/save dialog when browsing files" + }, + { + "name": "HOP_AUTO_CREATE_CONFIG", + "value": "Y", + "description": "Set this value to N if you don't want to automatically create a hop configuration file (hop-config.json) when it's missing" + } + ], + "LocaleDefault": "en_BE", + "guiProperties": { + "FontFixedSize": "13", + "MaxUndo": "100", + "DarkMode": "Y", + "FontNoteSize": "13", + "ShowOSLook": "Y", + "FontFixedStyle": "0", + "FontNoteName": ".AppleSystemUIFont", + "FontFixedName": "Monospaced", + "FontGraphStyle": "0", + "FontDefaultSize": "13", + "GraphColorR": "255", + "FontGraphSize": "13", + "IconSize": "32", + "BackgroundColorB": "255", + "FontNoteStyle": "0", + "FontGraphName": ".AppleSystemUIFont", + "FontDefaultName": ".AppleSystemUIFont", + "GraphColorG": "255", + "UseGlobalFileBookmarks": "Y", + "FontDefaultStyle": "0", + "GraphColorB": "255", + "BackgroundColorR": "255", + "BackgroundColorG": "255", + "WorkflowDialogStyle": "RESIZE,MAX,MIN", + "LineWidth": "1", + "ContextDialogShowCategories": "Y" + }, + "projectsConfig": { + "enabled": true, + "projectMandatory": true, + "environmentMandatory": false, + "defaultProject": "default", + "defaultEnvironment": null, + "standardParentProject": "default", + "standardProjectsFolder": null, + "projectConfigurations": [ + { + "projectName": "default", + "projectHome": "${HOP_CONFIG_FOLDER}", + "configFilename": "project-config.json" + } + ], + "lifecycleEnvironments": [ + { + "name": "dev", + "purpose": "Testing", + "projectName": "default", + "configurationFiles": [ + "${PROJECT_HOME}/dev-env-config.json" + ] + } + ], + "projectLifecycles": [] + } +} diff --git a/integration-tests/cratedb/main-0001-copy-to-from-filesystem.hwf b/integration-tests/cratedb/main-0001-copy-to-from-filesystem.hwf new file mode 100644 index 00000000000..a26183e4971 --- /dev/null +++ b/integration-tests/cratedb/main-0001-copy-to-from-filesystem.hwf @@ -0,0 +1,194 @@ + + + + main-0001-copy-to-from-filesystem + Y + + + + - + 2024/04/26 15:51:06.701 + - + 2024/04/26 15:51:06.701 + + + + + Start + + SPECIAL + + 1 + 12 + 60 + 0 + 0 + N + 0 + 1 + N + 96 + 144 + + + + 0001-copy-from-existing-file + + PIPELINE + + N + N + N + N + N + N + ${PROJECT_HOME}/0001-copy-from-existing-file.hpl + Basic + + Y + + N + local + N + N + Y + N + 800 + 144 + + + + Create test user and grant privileges + + SQL + + cratedb-test + N + DROP USER IF EXISTS alice; +CREATE USER alice WITH (password='password'); +GRANT ALL PRIVILEGES TO alice; + N + N + N + 288 + 144 + + + + Create test table + + SQL + + cratedb-test + N + DROP TABLE IF EXISTS doc.test; +CREATE TABLE doc.test +( + firstName VARCHAR(255) +, lastName VARCHAR(255) +, birthDate TIMESTAMP +, city VARCHAR(255) +, country VARCHAR(100) +, birthDateAndTime TIMESTAMP +); + N + N + N + 544 + 144 + + + + Check if 100 rows + + EVAL_TABLE_CONTENT + + cratedb-test + doc + test + rows_count_equal + 100 + N + N + + N + Y + N + 1152 + 144 + + + + Refresh test table + + SQL + + cratedb-test + N + REFRESH TABLE doc.test; + N + N + N + 1008 + 144 + + + + + + Start + Create test user and grant privileges + Y + Y + Y + + + Create test user and grant privileges + Create test table + Y + Y + Y + + + Create test table + 0001-copy-from-existing-file + Y + N + Y + + + 0001-copy-from-existing-file + Refresh test table + Y + Y + N + + + Refresh test table + Check if 100 rows + Y + Y + N + + + + + + diff --git a/integration-tests/cratedb/metadata/pipeline-run-configuration/local.json b/integration-tests/cratedb/metadata/pipeline-run-configuration/local.json new file mode 100644 index 00000000000..11a9ee03d27 --- /dev/null +++ b/integration-tests/cratedb/metadata/pipeline-run-configuration/local.json @@ -0,0 +1,21 @@ +{ + "engineRunConfiguration": { + "Local": { + "feedback_size": "50000", + "sample_size": "100", + "sample_type_in_gui": "Last", + "wait_time": "20", + "rowset_size": "10000", + "safe_mode": false, + "show_feedback": false, + "topo_sort": false, + "gather_metrics": false, + "transactional": false + } + }, + "defaultSelection": true, + "configurationVariables": [], + "name": "local", + "description": "", + "executionInfoLocationName": "Runs your pipelines locally with the standard local Hop pipeline engine" +} \ No newline at end of file diff --git a/integration-tests/cratedb/metadata/rdbms/cratedb-test.json b/integration-tests/cratedb/metadata/rdbms/cratedb-test.json new file mode 100644 index 00000000000..400703de389 --- /dev/null +++ b/integration-tests/cratedb/metadata/rdbms/cratedb-test.json @@ -0,0 +1,26 @@ +{ + "rdbms": { + "CRATEDB": { + "databaseName": "test", + "pluginId": "CRATEDB", + "accessType": 0, + "hostname": "cratedb", + "password": "Encrypted 2be98afc86aa7f2e4bb18bd63c99dbdde", + "pluginName": "CrateDB", + "port": "5432", + "attributes": { + "SUPPORTS_TIMESTAMP_DATA_TYPE": "Y", + "QUOTE_ALL_FIELDS": "N", + "SUPPORTS_BOOLEAN_DATA_TYPE": "Y", + "FORCE_IDENTIFIERS_TO_LOWERCASE": "N", + "PRESERVE_RESERVED_WORD_CASE": "Y", + "SQL_CONNECT": "", + "FORCE_IDENTIFIERS_TO_UPPERCASE": "N", + "PREFERRED_SCHEMA_NAME": "" + }, + "manualUrl": "", + "username": "crate" + } + }, + "name": "cratedb-test" +} \ No newline at end of file diff --git a/integration-tests/cratedb/metadata/workflow-run-configuration/local.json b/integration-tests/cratedb/metadata/workflow-run-configuration/local.json new file mode 100644 index 00000000000..1d0cf74baec --- /dev/null +++ b/integration-tests/cratedb/metadata/workflow-run-configuration/local.json @@ -0,0 +1,11 @@ +{ + "engineRunConfiguration": { + "Local": { + "safe_mode": false, + "transactional": false + } + }, + "defaultSelection": true, + "name": "local", + "description": "Runs your workflows locally with the standard local Hop workflow engine" +} \ No newline at end of file diff --git a/integration-tests/cratedb/project-config.json b/integration-tests/cratedb/project-config.json new file mode 100644 index 00000000000..6a91171e1c8 --- /dev/null +++ b/integration-tests/cratedb/project-config.json @@ -0,0 +1,13 @@ +{ + "metadataBaseFolder" : "${PROJECT_HOME}/metadata", + "unitTestsBasePath" : "${PROJECT_HOME}", + "dataSetsCsvFolder" : "${PROJECT_HOME}/datasets", + "enforcingExecutionInHome" : true, + "config" : { + "variables" : [ { + "name" : "HOP_LICENSE_HEADER_FILE", + "value" : "${PROJECT_HOME}/../asf-header.txt", + "description" : "This will automatically serialize the ASF license header into pipelines and workflows in the integration test projects" + } ] + } +} \ No newline at end of file diff --git a/plugins/databases/cratedb/src/main/java/org/apache/hop/databases/cratedb/CrateDbDatabaseMeta.java b/plugins/databases/cratedb/src/main/java/org/apache/hop/databases/cratedb/CrateDBDatabaseMeta.java similarity index 97% rename from plugins/databases/cratedb/src/main/java/org/apache/hop/databases/cratedb/CrateDbDatabaseMeta.java rename to plugins/databases/cratedb/src/main/java/org/apache/hop/databases/cratedb/CrateDBDatabaseMeta.java index f3edf652084..86196c6c6fb 100644 --- a/plugins/databases/cratedb/src/main/java/org/apache/hop/databases/cratedb/CrateDbDatabaseMeta.java +++ b/plugins/databases/cratedb/src/main/java/org/apache/hop/databases/cratedb/CrateDBDatabaseMeta.java @@ -25,9 +25,10 @@ @DatabaseMetaPlugin( type = "CRATEDB", typeDescription = "CrateDB", - documentationUrl = "/database/databases/cratedb.html") + documentationUrl = "/database/databases/cratedb.html", + classLoaderGroup = "crate-db") @GuiPlugin(id = "GUI-CrateDBDatabaseMeta") -public class CrateDbDatabaseMeta extends PostgreSqlDatabaseMeta { +public class CrateDBDatabaseMeta extends PostgreSqlDatabaseMeta { private static final String SEQUENCES_NOT_SUPPORTED = "CrateDB does not support sequences"; diff --git a/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/CrateDBDatabaseMetaIT.java b/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/CrateDBDatabaseMetaIT.java index 6360d0cdbe0..92fc989a9b2 100644 --- a/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/CrateDBDatabaseMetaIT.java +++ b/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/CrateDBDatabaseMetaIT.java @@ -41,7 +41,7 @@ public class CrateDBDatabaseMetaIT { private static Connection connection; - private CrateDbDatabaseMeta nativeMeta = new CrateDbDatabaseMeta(); + private CrateDBDatabaseMeta nativeMeta = new CrateDBDatabaseMeta(); @BeforeClass public static void setup() throws Exception { @@ -83,14 +83,13 @@ public void doNotSupportSequences() { public void sqlStatements() throws Exception { executeUpdate( "INSERT INTO foo (id, name, description) VALUES (1, 'Alice', 'test_description');"); - Thread.sleep(1500); // need a break to make sure the data is there: unfortunately, - // CrateDB does not support transactions, rather it promote eventual consistency. - // Using an async lib for assertions like awaitility would be a better approach + executeUpdate("REFRESH TABLE foo;"); + int counter = 0; ResultSet rs = executeQuery(nativeMeta.getSqlQueryFields("foo")); while (rs.next()) { counter++; - assertTrue("Alice".equals(rs.getString("name"))); + assertEquals("Alice", rs.getString("name")); } assertTrue(counter > 0); @@ -98,7 +97,7 @@ public void sqlStatements() throws Exception { rs = executeQuery(nativeMeta.getSqlTableExists("foo")); while (rs.next()) { counter++; - assertTrue("Alice".equals(rs.getString("name"))); + assertEquals("Alice", rs.getString("name")); } assertTrue(counter > 0); @@ -106,7 +105,7 @@ public void sqlStatements() throws Exception { rs = executeQuery(nativeMeta.getSqlQueryColumnFields("name", "foo")); while (rs.next()) { counter++; - assertTrue("Alice".equals(rs.getString("name"))); + assertEquals("Alice", rs.getString("name")); } assertTrue(counter > 0); @@ -114,7 +113,7 @@ public void sqlStatements() throws Exception { rs = executeQuery(nativeMeta.getSqlColumnExists("name", "foo")); while (rs.next()) { counter++; - assertTrue("Alice".equals(rs.getString("name"))); + assertEquals("Alice", rs.getString("name")); } assertTrue(counter > 0); } diff --git a/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/CrateDBDatabaseMetaTest.java b/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/CrateDBDatabaseMetaTest.java index b43971d2675..ef46b6aed91 100644 --- a/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/CrateDBDatabaseMetaTest.java +++ b/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/CrateDBDatabaseMetaTest.java @@ -35,13 +35,13 @@ import org.junit.Test; public class CrateDBDatabaseMetaTest { - CrateDbDatabaseMeta nativeMeta; + CrateDBDatabaseMeta nativeMeta; private static final String SEQUENCES_NOT_SUPPORTED = "CrateDB doesn't support sequences"; @Before public void setupBefore() { - nativeMeta = new CrateDbDatabaseMeta(); + nativeMeta = new CrateDBDatabaseMeta(); nativeMeta.setAccessType(DatabaseMeta.TYPE_ACCESS_NATIVE); } diff --git a/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/CrateDBValueMetaBaseTest.java b/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/CrateDBValueMetaBaseTest.java index d7d8689a941..28fabd390dc 100644 --- a/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/CrateDBValueMetaBaseTest.java +++ b/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/CrateDBValueMetaBaseTest.java @@ -125,7 +125,7 @@ public List getEvents() { @Test public void test_PDI_17126_Postgres() throws Exception { String data = StringUtils.repeat("*", 10); - initValueMeta(new CrateDbDatabaseMeta(), 20, data); + initValueMeta(new CrateDBDatabaseMeta(), 20, data); verify(preparedStatementMock, times(1)).setString(0, data); } @@ -137,7 +137,7 @@ public void test_PDI_17126_Postgres() throws Exception { @Test public void test_Pdi_17126_postgres_DataLongerThanMetaLength() throws Exception { String data = StringUtils.repeat("*", 20); - initValueMeta(new CrateDbDatabaseMeta(), 10, data); + initValueMeta(new CrateDBDatabaseMeta(), 10, data); verify(preparedStatementMock, times(1)).setString(0, data); } @@ -151,7 +151,7 @@ public void test_Pdi_17126_postgres_truncate() throws Exception { List events = listener.getEvents(); assertEquals(0, events.size()); - databaseMetaSpy.setIDatabase(new CrateDbDatabaseMeta()); + databaseMetaSpy.setIDatabase(new CrateDBDatabaseMeta()); doReturn(1024).when(databaseMetaSpy).getMaxTextFieldLength(); doReturn(false).when(databaseMetaSpy).supportsSetCharacterStream(); @@ -184,7 +184,7 @@ public void testMetdataPreviewSqlNumericWithUndefinedSizeUsingPostgesSql() doReturn(0).when(resultSet).getInt("COLUMN_SIZE"); doReturn(mock(Object.class)).when(resultSet).getObject("DECIMAL_DIGITS"); doReturn(0).when(resultSet).getInt("DECIMAL_DIGITS"); - doReturn(mock(CrateDbDatabaseMeta.class)).when(dbMeta).getIDatabase(); + doReturn(mock(CrateDBDatabaseMeta.class)).when(dbMeta).getIDatabase(); IValueMeta valueMeta = valueMetaBase.getMetadataPreview(variables, dbMeta, resultSet); assertFalse(valueMeta.isBigNumber()); // TODO: VALIDATE! assertEquals(0, valueMeta.getPrecision()); // TODO: VALIDATE! @@ -194,7 +194,7 @@ public void testMetdataPreviewSqlNumericWithUndefinedSizeUsingPostgesSql() @Test public void testMetdataPreviewSqlBinaryToHopBinary() throws SQLException, HopDatabaseException { doReturn(Types.BINARY).when(resultSet).getInt("DATA_TYPE"); - doReturn(mock(CrateDbDatabaseMeta.class)).when(dbMeta).getIDatabase(); + doReturn(mock(CrateDBDatabaseMeta.class)).when(dbMeta).getIDatabase(); IValueMeta valueMeta = valueMetaBase.getMetadataPreview(variables, dbMeta, resultSet); assertTrue(valueMeta.isBinary()); } @@ -202,7 +202,7 @@ public void testMetdataPreviewSqlBinaryToHopBinary() throws SQLException, HopDat @Test public void testMetdataPreviewSqlBlobToHopBinary() throws SQLException, HopDatabaseException { doReturn(Types.BLOB).when(resultSet).getInt("DATA_TYPE"); - doReturn(mock(CrateDbDatabaseMeta.class)).when(dbMeta).getIDatabase(); + doReturn(mock(CrateDBDatabaseMeta.class)).when(dbMeta).getIDatabase(); IValueMeta valueMeta = valueMetaBase.getMetadataPreview(variables, dbMeta, resultSet); assertTrue(valueMeta.isBinary()); assertTrue(valueMeta.isBinary()); @@ -212,7 +212,7 @@ public void testMetdataPreviewSqlBlobToHopBinary() throws SQLException, HopDatab public void testMetdataPreviewSqlVarBinaryToHopBinary() throws SQLException, HopDatabaseException { doReturn(Types.VARBINARY).when(resultSet).getInt("DATA_TYPE"); - doReturn(mock(CrateDbDatabaseMeta.class)).when(dbMeta).getIDatabase(); + doReturn(mock(CrateDBDatabaseMeta.class)).when(dbMeta).getIDatabase(); IValueMeta valueMeta = valueMetaBase.getMetadataPreview(variables, dbMeta, resultSet); assertTrue(valueMeta.isBinary()); } @@ -221,7 +221,7 @@ public void testMetdataPreviewSqlVarBinaryToHopBinary() public void testMetdataPreviewSqlLongVarBinaryToHopBinary() throws SQLException, HopDatabaseException { doReturn(Types.LONGVARBINARY).when(resultSet).getInt("DATA_TYPE"); - doReturn(mock(CrateDbDatabaseMeta.class)).when(dbMeta).getIDatabase(); + doReturn(mock(CrateDBDatabaseMeta.class)).when(dbMeta).getIDatabase(); IValueMeta valueMeta = valueMetaBase.getMetadataPreview(variables, dbMeta, resultSet); assertTrue(valueMeta.isBinary()); } diff --git a/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/ReleaseSavePointTest.java b/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/ReleaseSavePointTest.java index bc6d0edd76e..b56310235b8 100644 --- a/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/ReleaseSavePointTest.java +++ b/plugins/databases/cratedb/src/test/java/org/apache/hop/databases/cratedb/ReleaseSavePointTest.java @@ -26,7 +26,7 @@ public class ReleaseSavePointTest { IDatabase[] support = new IDatabase[] { - new CrateDbDatabaseMeta(), + new CrateDBDatabaseMeta(), }; @Test diff --git a/plugins/transforms/cratedbbulkloader/pom.xml b/plugins/transforms/cratedbbulkloader/pom.xml new file mode 100644 index 00000000000..9d23df22d15 --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/pom.xml @@ -0,0 +1,58 @@ + + + + + 4.0.0 + + org.apache.hop + hop-plugins-transforms + 2.9.0-SNAPSHOT + + + hop-transform-cratedbbulkloader + jar + + Hop Plugins Transforms CrateDB bulk loader + + + 2.7.0 + + + + + io.crate + crate-jdbc + ${cratedb-jdbc.version} + runtime + + + org.apache.hop + hop-databases-cratedb + ${project.version} + + + org.testcontainers + cratedb + 1.19.7 + test + + + + diff --git a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoader.java b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoader.java new file mode 100644 index 00000000000..06bf7c411f1 --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoader.java @@ -0,0 +1,916 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader; + +import com.fasterxml.jackson.core.JsonProcessingException; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import org.apache.commons.lang3.StringUtils; +import org.apache.hop.core.Const; +import org.apache.hop.core.database.Database; +import org.apache.hop.core.database.DatabaseMeta; +import org.apache.hop.core.exception.HopDatabaseException; +import org.apache.hop.core.exception.HopException; +import org.apache.hop.core.exception.HopTransformException; +import org.apache.hop.core.exception.HopValueException; +import org.apache.hop.core.row.IRowMeta; +import org.apache.hop.core.row.IValueMeta; +import org.apache.hop.core.row.RowMeta; +import org.apache.hop.core.row.value.ValueMetaString; +import org.apache.hop.core.util.Utils; +import org.apache.hop.core.vfs.HopVfs; +import org.apache.hop.i18n.BaseMessages; +import org.apache.hop.pipeline.Pipeline; +import org.apache.hop.pipeline.PipelineMeta; +import org.apache.hop.pipeline.transform.BaseTransform; +import org.apache.hop.pipeline.transform.TransformMeta; +import org.apache.hop.pipeline.transforms.cratedbbulkloader.http.BulkImportClient; +import org.apache.hop.pipeline.transforms.cratedbbulkloader.http.HttpBulkImportResponse; +import org.apache.hop.pipeline.transforms.cratedbbulkloader.http.exceptions.CrateDBHopException; + +public class CrateDBBulkLoader extends BaseTransform { + private static final Class PKG = + CrateDBBulkLoader.class; // for i18n purposes, needed by Translator2!! + public static final String TIMESTAMP_CONVERSION_MASK = "yyyy-MM-dd HH:mm:ss.SSS"; + public static final String DATE_CONVERSION_MASK = "yyyy-MM-dd"; + + private final BulkImportClient bulkImportClient = + new BulkImportClient(meta.getHttpEndpoint(), meta.getHttpLogin(), meta.getHttpPassword()); + + public CrateDBBulkLoader( + TransformMeta transformMeta, + CrateDBBulkLoaderMeta meta, + CrateDBBulkLoaderData data, + int copyNr, + PipelineMeta pipelineMeta, + Pipeline pipeline) { + super(transformMeta, meta, data, copyNr, pipelineMeta, pipeline); + } + + @Override + public boolean init() { + + if (super.init()) { + try { + // Validating that the connection has been defined. + verifyDatabaseConnection(); + data.databaseMeta = this.getPipelineMeta().findDatabase(meta.getConnection(), variables); + + if (meta.isStreamToS3Csv()) { + if (!meta.isUseHttpEndpoint()) { + // get the file output stream to write to S3 + data.writer = HopVfs.getOutputStream(resolve(meta.getReadFromFilename()), false); + } + } + + data.db = new Database(this, this, data.databaseMeta); + data.db.connect(); + getDbFields(); + + if (log.isBasic()) { + logBasic( + BaseMessages.getString( + PKG, "CrateDBBulkLoader.Connection.Connected", data.db.getDatabaseMeta())); + } + initBinaryDataFields(); + + data.db.setAutoCommit(false); + + return true; + } catch (HopException e) { + logError("An error occurred initializing this transform: " + e.getMessage()); + stopAll(); + setErrors(1); + } + } + return false; + } + + @Override + public boolean processRow() throws HopException { + + Object[] r = getRow(); // this also waits for a previous transform to be finished. + + if (r == null) { // no more input to be expected... + if (first && meta.isTruncateTable() && !meta.isOnlyWhenHaveRows()) { + truncateTable(); + } + + if (!first) { + try { + data.close(); + closeFile(); + if (meta.isUseHttpEndpoint()) { + String[] columns = + meta.getFields().stream() + .map(CrateDBBulkLoaderField::getDatabaseField) + .toArray(String[]::new); + data.outputRowMeta.getValueMetaList().forEach(v -> logBasic(v.toString())); + String schema = meta.getSchemaName(); + String table = meta.getTableName(); + writeBatchToCrateDB(schema, table, columns); + } else { + String copyStmt = buildCopyStatementSqlString(); + Connection conn = data.db.getConnection(); + Statement stmt = conn.createStatement(); + final ResultSet resultSet = stmt.executeQuery(copyStmt); + int errorCount = 0; + while (resultSet.next()) { + String node = resultSet.getString("node"); + String uri = resultSet.getString("uri"); + int successCount = resultSet.getInt("success_count"); + errorCount = resultSet.getInt("error_count"); + String errors = resultSet.getString("errors"); + logError( + "Node: " + + node + + " URI: " + + uri + + " Success Count: " + + successCount + + " Error Count: " + + errorCount + + " Errors: " + + errors); + incrementLinesOutput(successCount); + incrementLinesRejected(errorCount); + } + conn.commit(); + stmt.close(); + conn.close(); + if (errorCount > 0) { + throw new HopException( + "Failed to COPY FROM CSV file to CrateDB: " + errorCount + " rows failed"); + } + } + + } catch (SQLException sqle) { + setErrors(1); + stopAll(); + setOutputDone(); // signal end to receiver(s) + throw new HopDatabaseException("Error executing COPY statements", sqle); + } catch (IOException ioe) { + setErrors(1); + stopAll(); + setOutputDone(); // signal end to receiver(s) + throw new HopTransformException("Error releasing resources", ioe); + } catch (CrateDBHopException e) { + throw new HopException(e); + } + } + + return false; + } + + if (first) { + first = false; + if (meta.isStreamToS3Csv()) { + + data.fieldnrs = new HashMap<>(); + + // if (meta.isTruncateTable()) { + // // truncateTable(); + // } + + meta.getFields(data.insertRowMeta, getTransformName(), null, null, this, metadataProvider); + + if (!meta.specifyFields()) { + // write all fields in the stream to CrateDB + // Just take the whole input row + data.insertRowMeta = getInputRowMeta().clone(); + data.selectedRowFieldIndices = new int[data.insertRowMeta.size()]; + // TODO Serasoft + // Is the statement below really needed?? + try { + getDbFields(); + } catch (HopException e) { + logError("Error getting database fields", e); + setErrors(1); + stopAll(); + setOutputDone(); // signal end to receiver(s) + return false; + } + + defineAllFieldsMetadataList(); + } else { + defineSelectedFieldsMetadataList(); + } + } + } + + data.outputRowMeta = getInputRowMeta().clone(); + + if (!meta.isUseHttpEndpoint()) { + if (meta.isStreamToS3Csv()) { + writeRowToFile(data.outputRowMeta, r); + } + } else { + appendRowAsJsonLine(data.outputRowMeta, r); + try { + writeIfBatchSizeRecordsAreReached(); + } catch (CrateDBHopException e) { + throw new HopException(e); + } + } + putRow(getInputRowMeta().clone(), r); + + return true; + } + + private void incrementLinesRejected(int count) { + for (int i = 0; i < count; i++) { + incrementLinesRejected(); + } + } + + private void incrementLinesOutput(int count) { + for (int i = 0; i < count; i++) { + incrementLinesOutput(); + } + } + + private void defineAllFieldsMetadataList() throws HopException { + for (int i = 0; i < meta.getFields().size(); i++) { + int streamFieldLocation = + data.insertRowMeta.indexOfValue(meta.getFields().get(i).getStreamField()); + if (streamFieldLocation < 0) { + throw new HopTransformException( + "Field [" + + meta.getFields().get(i).getStreamField() + + "] couldn't be found in the input stream!"); + } + + data.selectedRowFieldIndices[i] = streamFieldLocation; + + int dbFieldLocation = -1; + for (int e = 0; e < data.dbFields.size(); e++) { + String[] field = data.dbFields.get(e); + if (field[0].equalsIgnoreCase(meta.getFields().get(i).getDatabaseField())) { + dbFieldLocation = e; + break; + } + } + if (dbFieldLocation < 0) { + throw new HopException( + "Field [" + + meta.getFields().get(i).getDatabaseField() + + "] couldn't be found in the table!"); + } + IValueMeta inputValueMeta = getInputRowMeta().getValueMeta(streamFieldLocation); + + IValueMeta insertValueMeta = inputValueMeta.clone(); + insertValueMeta.setName(data.dbFields.get(dbFieldLocation)[0]); + + data.insertRowMeta.addValueMeta(insertValueMeta); + + data.fieldnrs.put( + meta.getFields().get(i).getDatabaseField().toUpperCase(), streamFieldLocation); + } + } + + private void defineSelectedFieldsMetadataList() throws HopTransformException { + // use the columns/fields mapping. + int numberOfInsertFields = meta.getFields().size(); + data.insertRowMeta = new RowMeta(); + + // Cache the position of the selected fields in the row array + data.selectedRowFieldIndices = new int[numberOfInsertFields]; + for (int i = 0; i < meta.getFields().size(); i++) { + CrateDBBulkLoaderField vbf = meta.getFields().get(i); + String inputFieldName = vbf.getStreamField(); + int inputFieldIdx = getInputRowMeta().indexOfValue(inputFieldName); + if (inputFieldIdx < 0) { + throw new HopTransformException( + BaseMessages.getString( + PKG, "CrateDBBulkLoader.Exception.FieldRequired", inputFieldName)); // $NON-NLS-1$ + } + data.selectedRowFieldIndices[i] = inputFieldIdx; + String insertFieldName = vbf.getDatabaseField(); + IValueMeta inputValueMeta = getInputRowMeta().getValueMeta(inputFieldIdx); + if (inputValueMeta == null) { + throw new HopTransformException( + BaseMessages.getString( + PKG, + "CrateDBBulkLoader.Exception.FailedToFindField", + vbf.getStreamField())); // $NON-NLS-1$ + } + IValueMeta insertValueMeta = inputValueMeta.clone(); + insertValueMeta.setName(insertFieldName); + data.insertRowMeta.addValueMeta(insertValueMeta); + data.fieldnrs.put(meta.getFields().get(i).getDatabaseField().toUpperCase(), inputFieldIdx); + } + } + + private void writeIfBatchSizeRecordsAreReached() throws HopException, CrateDBHopException { + int maxBatchSize = Integer.parseInt(meta.getBatchSize()); + if (data.httpBulkArgs.size() >= maxBatchSize) { + String[] columns = + meta.getFields().stream() + .map(CrateDBBulkLoaderField::getDatabaseField) + .toArray(String[]::new); + String schema = meta.getSchemaName(); + String table = meta.getTableName(); + writeBatchToCrateDB(schema, table, columns); + } + } + + private void writeBatchToCrateDB(String schema, String table, String[] columns) + throws HopException, CrateDBHopException { + try { + final HttpBulkImportResponse httpResponse = + bulkImportClient.batchInsert(schema, table, columns, data.httpBulkArgs); + // TODO Serasoft + // Review this way to calculate lines output + for (int i = 0; i < httpResponse.outputRows(); i++) { + incrementLinesOutput(); + } + for (int i = 0; i < httpResponse.rejectedRows(); i++) { + incrementLinesRejected(); + } + switch (httpResponse.statusCode()) { + case 200: + data.httpBulkArgs.clear(); + break; + case 401: + throw new HopException("Unauthorized access to CrateDB"); + default: + throw new HopException("Error sending bulk import request"); + } + if (200 == httpResponse.statusCode()) { + data.httpBulkArgs.clear(); + } else { + throw new HopException("Error sending bulk import request"); + } + } catch (JsonProcessingException e) { + throw new HopException("Error sending bulk import request ", e); + } + } + + private void appendRowAsJsonLine(IRowMeta rowMeta, Object[] row) throws HopTransformException { + Object[] args = new Object[rowMeta.size()]; + try { + for (int i = 0; i < data.insertRowMeta.size(); i++) { + IValueMeta v = data.insertRowMeta.getValueMeta(i); + args[i] = convertDatatypeIfNeeded(v, row[data.selectedRowFieldIndices[i]], i); + } + + data.convertedRowMetaReady = true; + + data.httpBulkArgs.add(args); + } catch (Exception e) { + throw new HopTransformException("Error writing JSON line to file", e); + } + } + + private String convertDatatypeIfNeeded(IValueMeta v, Object rowItem, int pos) + throws HopException { + IValueMeta vc = null; + String convertedValue = null; + + if (!data.convertedRowMetaReady && data.convertedRowMeta == null) + data.convertedRowMeta = data.insertRowMeta.clone(); + + switch (v.getType()) { + case IValueMeta.TYPE_STRING: + convertedValue = (String) rowItem; + break; + case IValueMeta.TYPE_INTEGER: + convertedValue = String.valueOf(rowItem); + break; + case IValueMeta.TYPE_TIMESTAMP: + vc = new ValueMetaString(); + vc.setName(v.getName()); + v.setConversionMask(TIMESTAMP_CONVERSION_MASK); + vc.setConversionMask(TIMESTAMP_CONVERSION_MASK); + convertedValue = (String) vc.convertData(v, rowItem); + break; + case IValueMeta.TYPE_DATE: + vc = new ValueMetaString(); + vc.setName(v.getName()); + v.setConversionMask(DATE_CONVERSION_MASK); + vc.setConversionMask(DATE_CONVERSION_MASK); + convertedValue = (String) vc.convertData(v, rowItem); + break; + default: + convertedValue = (String) rowItem; + break; + } + + logDetailed("Field: " + v.getName() + " - Converted Value: " + convertedValue); + + if (vc != null && !data.convertedRowMetaReady) data.convertedRowMeta.setValueMeta(pos, vc); + + return convertedValue; + } + + /** + * Closes a file so that its file handle is no longer open + * + * @return true if we successfully closed the file + */ + private boolean closeFile() { + boolean returnValue = false; + + try { + if (data.writer != null) { + data.writer.flush(); + data.writer.close(); + } + data.writer = null; + if (log.isDebug()) { + logDebug("Closing normal file ..."); + } + + returnValue = true; + } catch (Exception e) { + logError("Exception trying to close file: " + e.toString()); + setErrors(1); + returnValue = false; + } + return returnValue; + } + + private String buildCopyStatementSqlString() { + final DatabaseMeta databaseMeta = data.db.getDatabaseMeta(); + + StringBuilder sb = new StringBuilder(150); + sb.append("COPY "); + + sb.append( + databaseMeta.getQuotedSchemaTableCombination( + variables, + data.db.resolve(meta.getSchemaName()), + data.db.resolve(meta.getTableName()))); + + sb.append(" ("); + List fieldList = meta.getFields(); + for (int i = 0; i < fieldList.size(); i++) { + CrateDBBulkLoaderField field = fieldList.get(i); + if (i > 0) { + sb.append(", " + field.getDatabaseField()); + } else { + sb.append(field.getDatabaseField()); + } + } + sb.append(")"); + + String awsAccessKeyId = ""; + String awsSecretAccessKey = ""; + if (meta.isUseSystemEnvVars()) { + awsAccessKeyId = System.getenv("AWS_ACCESS_KEY_ID"); + awsSecretAccessKey = System.getenv("AWS_SECRET_ACCESS_KEY"); + } else { + awsAccessKeyId = resolve(meta.getAwsAccessKeyId()); + awsSecretAccessKey = resolve(meta.getAwsSecretAccessKey()); + } + + String awsSec = awsAccessKeyId + ":" + awsSecretAccessKey; + String filename = resolve(meta.getReadFromFilename()); + + String uriLeft = filename.substring(0, 5); + String uriRight = filename.substring(5, filename.length()); + + filename = uriLeft + awsSec + "@" + uriRight; + + sb.append(" FROM '" + filename + "'"); + sb.append(" WITH (format='csv', wait_for_completion=true"); + sb.append(", header=false"); + sb.append(", delimiter='" + CrateDBBulkLoaderMeta.DEFAULT_CSV_DELIMITER + "'"); + sb.append(")"); + sb.append(" RETURN SUMMARY"); + + logDetailed("Copy stmt: " + sb.toString()); + + return sb.toString(); + } + + /** + * Runs a desc table to get the fields, and field types from the database. Uses a desc table as + * opposed to the select * from table limit 0 that Hop normally uses to get the fields and types, + * due to the need to handle the Time type. The select * method through Hop does not give us the + * ability to differentiate time from timestamp. + * + * @throws HopException + */ + private void getDbFields() throws HopException { + data.dbFields = new ArrayList<>(); + + IRowMeta rowMeta = null; + + if (!StringUtils.isEmpty(resolve(meta.getSchemaName()))) { + rowMeta = data.db.getTableFields(meta.getSchemaName() + "." + meta.getTableName()); + } else { + rowMeta = data.db.getTableFields(meta.getTableName()); + } + try { + if (rowMeta.size() == 0) { + throw new HopException("No fields found in table"); + } + + for (int i = 0; i < rowMeta.size(); i++) { + String field[] = new String[2]; + field[0] = rowMeta.getValueMeta(i).getName().toUpperCase(); + field[1] = rowMeta.getValueMeta(i).getTypeDesc().toUpperCase(); + data.dbFields.add(field); + } + } catch (Exception ex) { + throw new HopException("Error getting database fields", ex); + } + } + + protected void verifyDatabaseConnection() throws HopException { + // Confirming Database Connection is defined. + if (meta.getConnection() == null) { + throw new HopException( + BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.Error.NoConnection")); + } + } + + /** + * Initialize the binary values of delimiters, enclosures, and escape characters + * + * @throws HopException + */ + private void initBinaryDataFields() throws HopException { + try { + data.binarySeparator = new byte[] {}; + data.binaryEnclosure = new byte[] {}; + data.binaryNewline = new byte[] {}; + data.escapeCharacters = new byte[] {}; + + data.binarySeparator = + resolve(CrateDBBulkLoaderMeta.DEFAULT_CSV_DELIMITER).getBytes(StandardCharsets.UTF_8); + data.binaryEnclosure = + resolve(CrateDBBulkLoaderMeta.ENCLOSURE).getBytes(StandardCharsets.UTF_8); + data.binaryNewline = + CrateDBBulkLoaderMeta.CSV_RECORD_DELIMITER.getBytes(StandardCharsets.UTF_8); + data.escapeCharacters = + CrateDBBulkLoaderMeta.CSV_ESCAPE_CHAR.getBytes(StandardCharsets.UTF_8); + + data.binaryNullValue = "".getBytes(StandardCharsets.UTF_8); + } catch (Exception e) { + throw new HopException("Unexpected error while encoding binary fields", e); + } + } + + /** + * Writes an individual row of data to a temp file + * + * @param rowMeta The metadata about the row + * @param row The input row + * @throws HopTransformException + */ + private void writeRowToFile(IRowMeta rowMeta, Object[] row) throws HopTransformException { + + try { + for (int i = 0; i < data.insertRowMeta.size(); i++) { + if (i > 0 && data.binarySeparator.length > 0) { + data.writer.write(data.binarySeparator); + } + + Object convertedValue = null; + IValueMeta v = data.insertRowMeta.getValueMeta(i); + convertedValue = convertDatatypeIfNeeded(v, row[data.selectedRowFieldIndices[i]], i); + writeField( + data.convertedRowMeta.getValueMeta(i), + convertedValue, + (!meta.isSpecifyFields() ? null : data.binaryNullValue)); + } + data.convertedRowMetaReady = true; + data.writer.write(data.binaryNewline); + } catch (Exception e) { + throw new HopTransformException("Error writing line", e); + } + } + + /** + * Writes an individual field to the temp file. + * + * @param v The metadata about the column + * @param valueData The data for the column + * @param nullString The bytes to put in the temp file if the value is null + * @throws HopTransformException + */ + private void writeField(IValueMeta v, Object valueData, byte[] nullString) + throws HopTransformException { + try { + byte[] str; + + // First check whether or not we have a null string set + // These values should be set when a null value passes + // + if (nullString != null && v.isNull(valueData)) { + str = nullString; + } else { + str = formatField(v, valueData); + } + + if (str != null && str.length > 0) { + List enclosures = null; + boolean writeEnclosures = false; + + if (v.isString()) { + writeEnclosures = true; + + if (containsSeparatorOrEnclosure( + str, data.binarySeparator, data.binaryEnclosure, data.escapeCharacters)) { + writeEnclosures = true; + } + } + + if (writeEnclosures) { + data.writer.write(data.binaryEnclosure); + enclosures = getEnclosurePositions(str); + } + + if (enclosures == null) { + data.writer.write(str); + } else { + // Skip the enclosures, escape them instead... + int from = 0; + for (Integer enclosure : enclosures) { + // Minus one to write the escape before the enclosure + int position = enclosure; + data.writer.write(str, from, position - from); + data.writer.write(data.escapeCharacters); // write enclosure a second time + from = position; + } + if (from < str.length) { + data.writer.write(str, from, str.length - from); + } + } + + if (writeEnclosures) { + data.writer.write(data.binaryEnclosure); + } + } + } catch (Exception e) { + throw new HopTransformException("Error writing field content to file", e); + } + } + + /** + * Takes an input field and converts it to bytes to be stored in the temp file. + * + * @param v The metadata about the column + * @param valueData The column data + * @return The bytes for the value + * @throws HopValueException + */ + private byte[] formatField(IValueMeta v, Object valueData) throws HopValueException { + if (v.isString()) { + if (v.isStorageBinaryString() + && v.getTrimType() == IValueMeta.TRIM_TYPE_NONE + && v.getLength() < 0 + && StringUtils.isEmpty(v.getStringEncoding())) { + return (byte[]) valueData; + } else { + String svalue = (valueData instanceof String) ? (String) valueData : v.getString(valueData); + + // trim or cut to size if needed. + // + return convertStringToBinaryString(v, Const.trimToType(svalue, v.getTrimType())); + } + } else { + return v.getBinaryString(valueData); + } + } + + /** + * Converts an input string to the bytes for the string + * + * @param v The metadata about the column + * @param string The column data + * @return The bytes for the value + * @throws HopValueException + */ + private byte[] convertStringToBinaryString(IValueMeta v, String string) { + int length = v.getLength(); + + if (string == null) { + return new byte[] {}; + } + + if (length > -1 && length < string.length()) { + // we need to truncate + String tmp = string.substring(0, length); + return tmp.getBytes(StandardCharsets.UTF_8); + + } else { + byte[] text; + text = string.getBytes(StandardCharsets.UTF_8); + + if (length > string.length()) { + // we need to pad this + + int size = 0; + byte[] filler; + filler = " ".getBytes(StandardCharsets.UTF_8); + size = text.length + filler.length * (length - string.length()); + + byte[] bytes = new byte[size]; + System.arraycopy(text, 0, bytes, 0, text.length); + if (filler.length == 1) { + java.util.Arrays.fill(bytes, text.length, size, filler[0]); + } else { + int currIndex = text.length; + for (int i = 0; i < (length - string.length()); i++) { + for (byte aFiller : filler) { + bytes[currIndex++] = aFiller; + } + } + } + return bytes; + } else { + // do not need to pad or truncate + return text; + } + } + } + + /** + * Check if a string contains separators or enclosures. Can be used to determine if the string + * needs enclosures around it or not. + * + * @param source The string to check + * @param separator The separator character(s) + * @param enclosure The enclosure character(s) + * @param escape The escape character(s) + * @return True if the string contains separators or enclosures + */ + @SuppressWarnings("Duplicates") + private boolean containsSeparatorOrEnclosure( + byte[] source, byte[] separator, byte[] enclosure, byte[] escape) { + boolean result = false; + + boolean enclosureExists = enclosure != null && enclosure.length > 0; + boolean separatorExists = separator != null && separator.length > 0; + boolean escapeExists = escape != null && escape.length > 0; + + // Skip entire test if neither separator nor enclosure exist + if (separatorExists || enclosureExists || escapeExists) { + + // Search for the first occurrence of the separator or enclosure + for (int index = 0; !result && index < source.length; index++) { + if (enclosureExists && source[index] == enclosure[0]) { + + // Potential match found, make sure there are enough bytes to support a full match + if (index + enclosure.length <= source.length) { + // First byte of enclosure found + result = true; // Assume match + for (int i = 1; i < enclosure.length; i++) { + if (source[index + i] != enclosure[i]) { + // Enclosure match is proven false + result = false; + break; + } + } + } + + } else if (separatorExists && source[index] == separator[0]) { + + // Potential match found, make sure there are enough bytes to support a full match + if (index + separator.length <= source.length) { + // First byte of separator found + result = true; // Assume match + for (int i = 1; i < separator.length; i++) { + if (source[index + i] != separator[i]) { + // Separator match is proven false + result = false; + break; + } + } + } + + } else if (escapeExists && source[index] == escape[0]) { + + // Potential match found, make sure there are enough bytes to support a full match + if (index + escape.length <= source.length) { + // First byte of separator found + result = true; // Assume match + for (int i = 1; i < escape.length; i++) { + if (source[index + i] != escape[i]) { + // Separator match is proven false + result = false; + break; + } + } + } + } + } + } + return result; + } + + /** + * Gets the positions of any double quotes or backslashes in the string + * + * @param str The string to check + * @return The positions within the string of double quotes and backslashes. + */ + private List getEnclosurePositions(byte[] str) { + List positions = null; + // +1 because otherwise we will not find it at the end + for (int i = 0, len = str.length; i < len; i++) { + // verify if on position i there is an enclosure + // + boolean found = true; + for (int x = 0; found && x < data.binaryEnclosure.length; x++) { + if (str[i + x] != data.binaryEnclosure[x]) { + found = false; + } + } + + if (!found) { + found = true; + for (int x = 0; found && x < data.escapeCharacters.length; x++) { + if (str[i + x] != data.escapeCharacters[x]) { + found = false; + } + } + } + + if (found) { + if (positions == null) { + positions = new ArrayList<>(); + } + positions.add(i); + } + } + return positions; + } + + @Override + public void stopRunning() throws HopException { + setStopped(true); + if (data.workerThread != null) { + synchronized (data.workerThread) { + if (data.workerThread.isAlive() && !data.workerThread.isInterrupted()) { + try { + data.workerThread.interrupt(); + data.workerThread.join(); + } catch (InterruptedException e) { // Checkstyle:OFF: + } + // Checkstyle:ONN: + } + } + } + + super.stopRunning(); + } + + void truncateTable() throws HopDatabaseException { + if (meta.isTruncateTable() && ((getCopy() == 0) || !Utils.isEmpty(getPartitionId()))) { + data.db.truncateTable(resolve(meta.getSchemaName()), resolve(meta.getTableName())); + } + } + + @Override + public void dispose() { + + setOutputDone(); + + try { + if (getErrors() > 0) { + data.db.rollback(); + } + } catch (HopDatabaseException e) { + logError("Unexpected error rolling back the database connection.", e); + } + + if (data.workerThread != null) { + try { + data.workerThread.join(); + } catch (InterruptedException e) { // Checkstyle:OFF: + } + // Checkstyle:ONN: + } + + if (data.db != null) { + data.db.disconnect(); + } + super.dispose(); + } +} diff --git a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderData.java b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderData.java new file mode 100644 index 00000000000..7328188eca1 --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderData.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader; + +import java.io.IOException; +import java.io.OutputStream; +import java.io.PipedInputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import org.apache.hop.core.database.Database; +import org.apache.hop.core.database.DatabaseMeta; +import org.apache.hop.core.row.IRowMeta; +import org.apache.hop.pipeline.transform.BaseTransformData; +import org.apache.hop.pipeline.transform.ITransformData; + +public class CrateDBBulkLoaderData extends BaseTransformData implements ITransformData { + protected Database db; + protected DatabaseMeta databaseMeta; + + protected int[] selectedRowFieldIndices; + + protected IRowMeta outputRowMeta; + protected IRowMeta insertRowMeta; + protected IRowMeta convertedRowMeta; + + protected boolean convertedRowMetaReady = false; + + // A list of table fields mapped to their data type. String[0] is the field name, String[1] is + // the CrateDB + // data type + public ArrayList dbFields; + + // Maps table fields to the location of the corresponding field on the input stream. + public Map fieldnrs; + + protected OutputStream writer; + + protected List httpBulkArgs = new ArrayList<>(); + // Byte arrays for constant characters put into output files. + public byte[] binarySeparator; + public byte[] binaryEnclosure; + public byte[] escapeCharacters; + public byte[] binaryNewline; + public byte[] binaryNullValue; + + protected PipedInputStream pipedInputStream; + + protected volatile Thread workerThread; + + public CrateDBBulkLoaderData() { + super(); + + db = null; + } + + public IRowMeta getInsertRowMeta() { + return insertRowMeta; + } + + public void close() throws IOException {} +} diff --git a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderDialog.java b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderDialog.java new file mode 100644 index 00000000000..fd9a52559a8 --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderDialog.java @@ -0,0 +1,1388 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.commons.lang.StringUtils; +import org.apache.hop.core.Const; +import org.apache.hop.core.DbCache; +import org.apache.hop.core.Props; +import org.apache.hop.core.SourceToTargetMapping; +import org.apache.hop.core.SqlStatement; +import org.apache.hop.core.database.Database; +import org.apache.hop.core.database.DatabaseMeta; +import org.apache.hop.core.exception.HopException; +import org.apache.hop.core.exception.HopTransformException; +import org.apache.hop.core.row.IRowMeta; +import org.apache.hop.core.row.IValueMeta; +import org.apache.hop.core.row.RowMeta; +import org.apache.hop.core.util.StringUtil; +import org.apache.hop.core.util.Utils; +import org.apache.hop.core.variables.IVariables; +import org.apache.hop.i18n.BaseMessages; +import org.apache.hop.pipeline.PipelineMeta; +import org.apache.hop.pipeline.transform.BaseTransformMeta; +import org.apache.hop.pipeline.transform.ITransformDialog; +import org.apache.hop.pipeline.transform.ITransformMeta; +import org.apache.hop.pipeline.transform.TransformMeta; +import org.apache.hop.ui.core.PropsUi; +import org.apache.hop.ui.core.database.dialog.DatabaseExplorerDialog; +import org.apache.hop.ui.core.database.dialog.SqlEditor; +import org.apache.hop.ui.core.dialog.BaseDialog; +import org.apache.hop.ui.core.dialog.EnterMappingDialog; +import org.apache.hop.ui.core.dialog.EnterSelectionDialog; +import org.apache.hop.ui.core.dialog.ErrorDialog; +import org.apache.hop.ui.core.widget.ColumnInfo; +import org.apache.hop.ui.core.widget.MetaSelectionLine; +import org.apache.hop.ui.core.widget.PasswordTextVar; +import org.apache.hop.ui.core.widget.TableView; +import org.apache.hop.ui.core.widget.TextVar; +import org.apache.hop.ui.pipeline.transform.BaseTransformDialog; +import org.eclipse.swt.SWT; +import org.eclipse.swt.custom.CTabFolder; +import org.eclipse.swt.custom.CTabItem; +import org.eclipse.swt.events.FocusAdapter; +import org.eclipse.swt.events.FocusEvent; +import org.eclipse.swt.events.FocusListener; +import org.eclipse.swt.events.ModifyListener; +import org.eclipse.swt.events.SelectionAdapter; +import org.eclipse.swt.events.SelectionEvent; +import org.eclipse.swt.layout.FormAttachment; +import org.eclipse.swt.layout.FormData; +import org.eclipse.swt.layout.FormLayout; +import org.eclipse.swt.widgets.Button; +import org.eclipse.swt.widgets.Composite; +import org.eclipse.swt.widgets.Control; +import org.eclipse.swt.widgets.Event; +import org.eclipse.swt.widgets.Label; +import org.eclipse.swt.widgets.Listener; +import org.eclipse.swt.widgets.MessageBox; +import org.eclipse.swt.widgets.Shell; +import org.eclipse.swt.widgets.TableItem; +import org.eclipse.swt.widgets.Text; + +public class CrateDBBulkLoaderDialog extends BaseTransformDialog implements ITransformDialog { + + private static final Class PKG = + CrateDBBulkLoaderMeta.class; // for i18n purposes, needed by Translator2!! + public static final String SYSTEM_DIALOG_ERROR_TITLE = "System.Dialog.Error.Title"; + + private MetaSelectionLine wConnection; + + private TextVar wSchema; + + private Label wlHttpEndpoint; + private TextVar wHttpEndpoint; + + private Label wlBatchSize; + private TextVar wBatchSize; + + private TextVar wTable; + + private Button wStreamToS3Csv; + + // private ComboVar wLoadFromExistingFileFormat; + + private TextVar wReadFromFilename; + + private Button wUseHTTPEndpoint; + + private TableView wFields; + + private Button wGetFields; + + private Button wDoMapping; + + private CrateDBBulkLoaderMeta input; + + private Map inputFields; + + private ColumnInfo[] ciFields; + + private Label wlUseSystemVars; + private Button wUseSystemVars; + private Label wlAccessKeyId; + private TextVar wAccessKeyId; + private Label wlSecretAccessKey; + private TextVar wSecretAccessKey; + + private Button wSpecifyFields; + + private TextVar wHttpLogin; + + private TextVar wHttpPassword; + + /** List of ColumnInfo that should have the field names of the selected database table */ + private List tableFieldColumns = new ArrayList<>(); + + private Label wlHttpLogin; + private Label wlHttpPassword; + + /** Constructor. */ + public CrateDBBulkLoaderDialog( + Shell parent, IVariables variables, Object in, PipelineMeta pipelineMeta, String sname) { + super(parent, variables, (BaseTransformMeta) in, pipelineMeta, sname); + input = (CrateDBBulkLoaderMeta) in; + inputFields = new HashMap<>(); + } + + /** Open the dialog. */ + public String open() { + + Shell parent = getParent(); + + shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN); + PropsUi.setLook(shell); + setShellImage(shell, input); + + ModifyListener lsMod = e -> input.setChanged(); + FocusListener lsFocusLost = + new FocusAdapter() { + @Override + public void focusLost(FocusEvent arg0) { + setTableFieldCombo(); + } + }; + backupChanged = input.hasChanged(); + + int middle = props.getMiddlePct(); + int margin = Const.MARGIN; + + FormLayout formLayout = new FormLayout(); + formLayout.marginWidth = Const.FORM_MARGIN; + formLayout.marginHeight = Const.FORM_MARGIN; + + shell.setLayout(formLayout); + shell.setText(BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.DialogTitle")); + + // TransformName line + wlTransformName = new Label(shell, SWT.RIGHT); + wlTransformName.setText(BaseMessages.getString("System.Label.TransformName")); + PropsUi.setLook(wlTransformName); + fdlTransformName = new FormData(); + fdlTransformName.left = new FormAttachment(0, 0); + fdlTransformName.right = new FormAttachment(middle, -margin); + fdlTransformName.top = new FormAttachment(0, margin * 2); + wlTransformName.setLayoutData(fdlTransformName); + wTransformName = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); + wTransformName.setText(transformName); + PropsUi.setLook(wTransformName); + wTransformName.addModifyListener(lsMod); + fdTransformName = new FormData(); + fdTransformName.left = new FormAttachment(middle, 0); + fdTransformName.top = new FormAttachment(0, margin * 2); + fdTransformName.right = new FormAttachment(100, 0); + wTransformName.setLayoutData(fdTransformName); + + Control lastControl = wTransformName; + + // Schema line... + Label wlSchema = new Label(shell, SWT.RIGHT); + wlSchema.setText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.TargetSchema.Label")); // $NON-NLS-1$ + PropsUi.setLook(wlSchema); + FormData fdlSchema = new FormData(); + fdlSchema.left = new FormAttachment(0, 0); + fdlSchema.right = new FormAttachment(middle, -margin); + fdlSchema.top = new FormAttachment(lastControl, margin * 2); + wlSchema.setLayoutData(fdlSchema); + + Button wbSchema = new Button(shell, SWT.PUSH | SWT.CENTER); + PropsUi.setLook(wbSchema); + wbSchema.setText(BaseMessages.getString("System.Button.Browse")); + FormData fdbSchema = new FormData(); + fdbSchema.right = new FormAttachment(100, 0); + fdbSchema.top = new FormAttachment(lastControl, margin * 2); + wbSchema.setLayoutData(fdbSchema); + + wSchema = new TextVar(variables, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); + PropsUi.setLook(wSchema); + wSchema.addModifyListener(lsMod); + wSchema.addFocusListener(lsFocusLost); + FormData fdSchema = new FormData(); + fdSchema.left = new FormAttachment(middle, 0); + fdSchema.top = new FormAttachment(lastControl, margin * 2); + fdSchema.right = new FormAttachment(wbSchema, -margin); + wSchema.setLayoutData(fdSchema); + + lastControl = wSchema; + + // Table line... + Label wlTable = new Label(shell, SWT.RIGHT); + wlTable.setText(BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.TargetTable.Label")); + PropsUi.setLook(wlTable); + FormData fdlTable = new FormData(); + fdlTable.left = new FormAttachment(0, 0); + fdlTable.right = new FormAttachment(middle, -margin); + fdlTable.top = new FormAttachment(lastControl, margin); + wlTable.setLayoutData(fdlTable); + + Button wbTable = new Button(shell, SWT.PUSH | SWT.CENTER); + PropsUi.setLook(wbTable); + wbTable.setText(BaseMessages.getString("System.Button.Browse")); + FormData fdbTable = new FormData(); + fdbTable.right = new FormAttachment(100, 0); + fdbTable.top = new FormAttachment(lastControl, margin); + wbTable.setLayoutData(fdbTable); + + wTable = new TextVar(variables, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); + PropsUi.setLook(wTable); + wTable.addModifyListener(lsMod); + wTable.addFocusListener(lsFocusLost); + FormData fdTable = new FormData(); + fdTable.top = new FormAttachment(lastControl, margin); + fdTable.left = new FormAttachment(middle, 0); + fdTable.right = new FormAttachment(wbTable, -margin); + wTable.setLayoutData(fdTable); + + lastControl = wTable; + + // Truncate table + // wlTruncate = new Label(shell, SWT.RIGHT); + // wlTruncate.setText(BaseMessages.getString(PKG, + // "CrateDBBulkLoaderDialog.TruncateTable.Label")); + // PropsUi.setLook(wlTruncate); + // FormData fdlTruncate = new FormData(); + // fdlTruncate.top = new FormAttachment(lastControl, margin); + // fdlTruncate.left = new FormAttachment(0, 0); + // fdlTruncate.right = new FormAttachment(middle, -margin); + // wlTruncate.setLayoutData(fdlTruncate); + // wTruncate = new Button(shell, SWT.CHECK); + // PropsUi.setLook(wTruncate); + // FormData fdTruncate = new FormData(); + // fdTruncate.top = new FormAttachment(lastControl, margin * 3); + // fdTruncate.left = new FormAttachment(middle, 0); + // fdTruncate.right = new FormAttachment(100, 0); + // wTruncate.setLayoutData(fdTruncate); + // SelectionAdapter lsTruncMod = + // new SelectionAdapter() { + // @Override + // public void widgetSelected(SelectionEvent arg0) { + // input.setChanged(); + // } + // }; + // wTruncate.addSelectionListener(lsTruncMod); + // wTruncate.addSelectionListener( + // new SelectionAdapter() { + // @Override + // public void widgetSelected(SelectionEvent e) { + // setFlags(); + // } + // }); + // lastControl = wlTruncate; + // + // // Truncate only when have rows + // Label wlOnlyWhenHaveRows = new Label(shell, SWT.RIGHT); + // wlOnlyWhenHaveRows.setText( + // BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.OnlyWhenHaveRows.Label")); + // PropsUi.setLook(wlOnlyWhenHaveRows); + // FormData fdlOnlyWhenHaveRows = new FormData(); + // fdlOnlyWhenHaveRows.top = new FormAttachment(lastControl, margin); + // fdlOnlyWhenHaveRows.left = new FormAttachment(0, 0); + // fdlOnlyWhenHaveRows.right = new FormAttachment(middle, -margin); + // wlOnlyWhenHaveRows.setLayoutData(fdlOnlyWhenHaveRows); + // wOnlyWhenHaveRows = new Button(shell, SWT.CHECK); + // wOnlyWhenHaveRows.setToolTipText( + // BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.OnlyWhenHaveRows.Tooltip")); + // PropsUi.setLook(wOnlyWhenHaveRows); + // FormData fdTruncateWhenHaveRows = new FormData(); + // fdTruncateWhenHaveRows.top = new FormAttachment(lastControl, margin * 3); + // fdTruncateWhenHaveRows.left = new FormAttachment(middle, 0); + // fdTruncateWhenHaveRows.right = new FormAttachment(100, 0); + // wOnlyWhenHaveRows.setLayoutData(fdTruncateWhenHaveRows); + // wOnlyWhenHaveRows.addSelectionListener(lsSelMod); + + lastControl = wlTable; + + CTabFolder wTabFolder = new CTabFolder(shell, SWT.BORDER); + PropsUi.setLook(wTabFolder, Props.WIDGET_STYLE_TAB); + + // Some buttons + wOk = new Button(shell, SWT.PUSH); + wOk.setText(BaseMessages.getString("System.Button.OK")); + wCreate = new Button(shell, SWT.PUSH); + wCreate.setText(BaseMessages.getString("System.Button.SQL")); + wCancel = new Button(shell, SWT.PUSH); + wCancel.setText(BaseMessages.getString("System.Button.Cancel")); + + addGeneralTab(wTabFolder, margin, middle, lsMod, lsFocusLost); + addAwsAuthenticationTab(wTabFolder, margin, middle, lsMod, lsFocusLost); + addHttpAuthenticationTab(wTabFolder, margin, middle, lsMod, lsFocusLost); + addFieldsTab(wTabFolder, margin, lsMod); + + setButtonPositions(new Button[] {wOk, wCancel, wCreate}, margin, null); + + FormData fdTabFolder = new FormData(); + fdTabFolder.left = new FormAttachment(0, 0); + fdTabFolder.top = new FormAttachment(lastControl, margin * 4); + fdTabFolder.right = new FormAttachment(100, 0); + fdTabFolder.bottom = new FormAttachment(wOk, -2 * margin); + wTabFolder.setLayoutData(fdTabFolder); + wTabFolder.setSelection(0); + + // Add listeners + wOk.addListener(SWT.Selection, c -> ok()); + wCancel.addListener(SWT.Selection, c -> cancel()); + wCreate.addListener(SWT.Selection, c -> sql()); + wGetFields.addListener(SWT.Selection, c -> get()); + wbTable.addListener(SWT.Selection, c -> getTableName()); + wbSchema.addListener(SWT.Selection, c -> getSchemaName()); + + // Set the shell size, based upon previous time... + setSize(); + getData(); + setTableFieldCombo(); + + toggleHttpEndpointFlags(); + toggleSpecifyFieldsFlags(); + toggleKeysSelection(); + + input.setChanged(backupChanged); + + BaseDialog.defaultShellHandling(shell, c -> ok(), c -> cancel()); + return transformName; + } + + private void addFieldsTab(CTabFolder wTabFolder, int margin, ModifyListener lsMod) { + + CTabItem wFieldsTab = new CTabItem(wTabFolder, SWT.NONE); + wFieldsTab.setText( + BaseMessages.getString( + PKG, "CrateDBBulkLoaderDialog.FieldsTab.TabItem.Label")); // $NON-NLS-1$ + + Composite wFieldsComp = new Composite(wTabFolder, SWT.NONE); + PropsUi.setLook(wFieldsComp); + + FormLayout fieldsCompLayout = new FormLayout(); + fieldsCompLayout.marginWidth = Const.FORM_MARGIN; + fieldsCompLayout.marginHeight = Const.FORM_MARGIN; + wFieldsComp.setLayout(fieldsCompLayout); + + // The fields table + Label wlFields = new Label(wFieldsComp, SWT.NONE); + wlFields.setText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.InsertFields.Label")); // $NON-NLS-1$ + PropsUi.setLook(wlFields); + FormData fdlUpIns = new FormData(); + fdlUpIns.left = new FormAttachment(0, 0); + fdlUpIns.top = new FormAttachment(0, margin); + wlFields.setLayoutData(fdlUpIns); + + int tableCols = 2; + int upInsRows = + (input.getFields() != null && !input.getFields().equals(Collections.emptyList()) + ? input.getFields().size() + : 1); + + ciFields = new ColumnInfo[tableCols]; + ciFields[0] = + new ColumnInfo( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.ColumnInfo.TableField"), + ColumnInfo.COLUMN_TYPE_CCOMBO, + new String[] {""}, + false); //$NON-NLS-1$ + ciFields[1] = + new ColumnInfo( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.ColumnInfo.StreamField"), + ColumnInfo.COLUMN_TYPE_CCOMBO, + new String[] {""}, + false); //$NON-NLS-1$ + tableFieldColumns.add(ciFields[0]); + wFields = + new TableView( + variables, + wFieldsComp, + SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL, + ciFields, + upInsRows, + lsMod, + props); + + wGetFields = new Button(wFieldsComp, SWT.PUSH); + wGetFields.setText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.GetFields.Button")); // $NON-NLS-1$ + FormData fdGetFields = new FormData(); + fdGetFields.top = new FormAttachment(wlFields, margin); + fdGetFields.right = new FormAttachment(100, 0); + wGetFields.setLayoutData(fdGetFields); + + wDoMapping = new Button(wFieldsComp, SWT.PUSH); + wDoMapping.setText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.DoMapping.Button")); // $NON-NLS-1$ + FormData fdDoMapping = new FormData(); + fdDoMapping.top = new FormAttachment(wGetFields, margin); + fdDoMapping.right = new FormAttachment(100, 0); + wDoMapping.setLayoutData(fdDoMapping); + + wDoMapping.addListener( + SWT.Selection, + new Listener() { + public void handleEvent(Event arg0) { + generateMappings(); + } + }); + + FormData fdFields = new FormData(); + fdFields.left = new FormAttachment(0, 0); + fdFields.top = new FormAttachment(wlFields, margin); + fdFields.right = new FormAttachment(wDoMapping, -margin); + fdFields.bottom = new FormAttachment(100, -2 * margin); + wFields.setLayoutData(fdFields); + + FormData fdFieldsComp = new FormData(); + fdFieldsComp.left = new FormAttachment(0, 0); + fdFieldsComp.top = new FormAttachment(0, 0); + fdFieldsComp.right = new FormAttachment(100, 0); + fdFieldsComp.bottom = new FormAttachment(100, 0); + wFieldsComp.setLayoutData(fdFieldsComp); + + wFieldsComp.layout(); + wFieldsTab.setControl(wFieldsComp); + + getFieldsFromPrevious(); + } + + private void getFieldsFromPrevious() { + // + // Search the fields in the background + // + + final Runnable runnable = + new Runnable() { + public void run() { + TransformMeta transformMeta = pipelineMeta.findTransform(transformName); + if (transformMeta != null) { + try { + IRowMeta row = pipelineMeta.getPrevTransformFields(variables, transformMeta); + + // Remember these fields... + for (int i = 0; i < row.size(); i++) { + inputFields.put(row.getValueMeta(i).getName(), Integer.valueOf(i)); + } + + setComboBoxes(); + } catch (HopException e) { + log.logError( + toString(), BaseMessages.getString("System.Dialog.GetFieldsFailed.Message")); + } + } + } + }; + new Thread(runnable).start(); + } + + private void addAwsAuthenticationTab( + CTabFolder wTabFolder, + int margin, + int middle, + ModifyListener lsMod, + FocusListener lsFocusLost) { + + CTabItem wOptionsTab = new CTabItem(wTabFolder, SWT.NONE); + wOptionsTab.setText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.AWSTab.TabItem.Label")); // $NON-NLS-1$ + + FormLayout optionsLayout = new FormLayout(); + optionsLayout.marginWidth = 3; + optionsLayout.marginHeight = 3; + + Composite wOptionsComp = new Composite(wTabFolder, SWT.NONE); + PropsUi.setLook(wOptionsComp); + wOptionsComp.setLayout(optionsLayout); + + wlUseSystemVars = new Label(wOptionsComp, SWT.RIGHT); + wlUseSystemVars.setText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.Authenticate.UseSystemVars.Label")); + wlUseSystemVars.setToolTipText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.Authenticate.UseSystemVars.Tooltip")); + PropsUi.setLook(wlUseSystemVars); + FormData fdlUseSystemVars = new FormData(); + fdlUseSystemVars.top = new FormAttachment(0, margin); + fdlUseSystemVars.left = new FormAttachment(0, 0); + fdlUseSystemVars.right = new FormAttachment(middle, -margin); + wlUseSystemVars.setLayoutData(fdlUseSystemVars); + + wUseSystemVars = new Button(wOptionsComp, SWT.CHECK); + wUseSystemVars.setSelection(true); + PropsUi.setLook(wUseSystemVars); + FormData fdUseSystemVars = new FormData(); + fdUseSystemVars.top = new FormAttachment(0, margin); + fdUseSystemVars.left = new FormAttachment(middle, 0); + fdUseSystemVars.right = new FormAttachment(100, 0); + wUseSystemVars.setLayoutData(fdUseSystemVars); + + Control lastControl = wlUseSystemVars; + + wUseSystemVars.addSelectionListener( + new SelectionAdapter() { + @Override + public void widgetSelected(SelectionEvent e) { + toggleKeysSelection(); + } + }); + + wlAccessKeyId = new Label(wOptionsComp, SWT.RIGHT); + wlAccessKeyId.setText("AWS_ACCESS_KEY_ID"); + PropsUi.setLook(wlAccessKeyId); + FormData fdlAccessKeyId = new FormData(); + fdlAccessKeyId.top = new FormAttachment(lastControl, margin); + fdlAccessKeyId.left = new FormAttachment(0, 0); + fdlAccessKeyId.right = new FormAttachment(middle, -margin); + wlAccessKeyId.setLayoutData(fdlAccessKeyId); + wAccessKeyId = new TextVar(variables, wOptionsComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); + PropsUi.setLook(wAccessKeyId); + FormData fdUseAccessKeyId = new FormData(); + fdUseAccessKeyId.top = new FormAttachment(lastControl, margin); + fdUseAccessKeyId.left = new FormAttachment(middle, 0); + fdUseAccessKeyId.right = new FormAttachment(100, 0); + wAccessKeyId.setLayoutData(fdUseAccessKeyId); + wAccessKeyId.addModifyListener(lsMod); + + lastControl = wAccessKeyId; + + wlSecretAccessKey = new Label(wOptionsComp, SWT.RIGHT); + wlSecretAccessKey.setText("AWS_SECRET_ACCESS_KEY"); + PropsUi.setLook(wlSecretAccessKey); + FormData fdlSecretAccessKey = new FormData(); + fdlSecretAccessKey.top = new FormAttachment(lastControl, margin); + fdlSecretAccessKey.left = new FormAttachment(0, 0); + fdlSecretAccessKey.right = new FormAttachment(middle, -margin); + wlSecretAccessKey.setLayoutData(fdlSecretAccessKey); + wSecretAccessKey = + new TextVar(variables, wOptionsComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER | SWT.PASSWORD); + PropsUi.setLook(wSecretAccessKey); + FormData fdSecretAccessKey = new FormData(); + fdSecretAccessKey.top = new FormAttachment(lastControl, margin); + fdSecretAccessKey.left = new FormAttachment(middle, 0); + fdSecretAccessKey.right = new FormAttachment(100, 0); + wSecretAccessKey.setLayoutData(fdSecretAccessKey); + wSecretAccessKey.addModifyListener(lsMod); + + FormData fdOptionsComp = new FormData(); + fdOptionsComp.left = new FormAttachment(0, 0); + fdOptionsComp.top = new FormAttachment(0, 0); + fdOptionsComp.right = new FormAttachment(100, 0); + fdOptionsComp.bottom = new FormAttachment(100, 0); + wOptionsComp.setLayoutData(fdOptionsComp); + + wOptionsComp.layout(); + wOptionsTab.setControl(wOptionsComp); + } + + private void addHttpAuthenticationTab( + CTabFolder wTabFolder, + int margin, + int middle, + ModifyListener lsMod, + FocusListener lsFocusLost) { + + CTabItem wHttpTab = new CTabItem(wTabFolder, SWT.NONE); + wHttpTab.setText( + BaseMessages.getString( + PKG, "CrateDBBulkLoaderDialog.HttpTab.TabItem.Label")); // $NON-NLS-1$ + + FormLayout httpLayout = new FormLayout(); + httpLayout.marginWidth = 3; + httpLayout.marginHeight = 3; + + Composite wHttpComp = new Composite(wTabFolder, SWT.NONE); + PropsUi.setLook(wHttpComp); + wHttpComp.setLayout(httpLayout); + + wlHttpLogin = new Label(wHttpComp, SWT.RIGHT); + wlHttpLogin.setText(BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.HttpLogin.Label")); + PropsUi.setLook(wlHttpLogin); + FormData fdlHttpLogin = new FormData(); + fdlHttpLogin.top = new FormAttachment(0, margin); + fdlHttpLogin.left = new FormAttachment(0, 0); + fdlHttpLogin.right = new FormAttachment(middle, -margin); + wlHttpLogin.setLayoutData(fdlHttpLogin); + wHttpLogin = new TextVar(variables, wHttpComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); + wHttpLogin.addModifyListener(lsMod); + wHttpLogin.setToolTipText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.HttpLogin.Tooltip")); + PropsUi.setLook(wHttpLogin); + FormData fdHttpLogin = new FormData(); + fdHttpLogin.top = new FormAttachment(0, margin); + fdHttpLogin.left = new FormAttachment(middle, 0); + fdHttpLogin.right = new FormAttachment(100, 0); + wHttpLogin.setLayoutData(fdHttpLogin); + + Control lastControl = wlHttpLogin; + + wlHttpPassword = new Label(wHttpComp, SWT.RIGHT); + wlHttpPassword.setText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.HttpPassword.Label")); + PropsUi.setLook(wlHttpPassword); + FormData fdlHttpPassword = new FormData(); + fdlHttpPassword.top = new FormAttachment(lastControl, margin); + fdlHttpPassword.left = new FormAttachment(0, 0); + fdlHttpPassword.right = new FormAttachment(middle, -margin); + wlHttpPassword.setLayoutData(fdlHttpPassword); + wHttpPassword = new PasswordTextVar(variables, wHttpComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); + wHttpPassword.addModifyListener(lsMod); + wHttpPassword.setToolTipText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.HttpPassword.Tooltip")); + PropsUi.setLook(wHttpPassword); + FormData fdHttpPassword = new FormData(); + fdHttpPassword.top = new FormAttachment(lastControl, margin); + fdHttpPassword.left = new FormAttachment(middle, 0); + fdHttpPassword.right = new FormAttachment(100, 0); + wHttpPassword.setLayoutData(fdHttpPassword); + + FormData fdHttpComp = new FormData(); + fdHttpComp.left = new FormAttachment(0, 0); + fdHttpComp.top = new FormAttachment(0, 0); + fdHttpComp.right = new FormAttachment(100, 0); + fdHttpComp.bottom = new FormAttachment(100, 0); + wHttpComp.setLayoutData(fdHttpComp); + + wHttpComp.layout(); + wHttpTab.setControl(wHttpComp); + } + + private void addGeneralTab( + CTabFolder wTabFolder, + int margin, + int middle, + ModifyListener lsMod, + FocusListener lsFocusLost) { + + CTabItem wMainTab = new CTabItem(wTabFolder, SWT.NONE); + wMainTab.setText( + BaseMessages.getString( + PKG, "CrateDBBulkLoaderDialog.MainTab.TabItem.Label")); // $NON-NLS-1$ + + FormLayout mainLayout = new FormLayout(); + mainLayout.marginWidth = 3; + mainLayout.marginHeight = 3; + + Composite wMainComp = new Composite(wTabFolder, SWT.NONE); + PropsUi.setLook(wMainComp); + wMainComp.setLayout(mainLayout); + + FormData fdMainComp = new FormData(); + fdMainComp.left = new FormAttachment(0, 0); + fdMainComp.top = new FormAttachment(0, 0); + fdMainComp.right = new FormAttachment(100, 0); + fdMainComp.bottom = new FormAttachment(100, 0); + wMainComp.setLayoutData(fdMainComp); + + // Connection line + DatabaseMeta dbm = pipelineMeta.findDatabase(input.getConnection(), variables); + wConnection = addConnectionLine(wMainComp, wTransformName, input.getDatabaseMeta(), null); + wConnection.addModifyListener(lsMod); + + Control lastControl = wConnection; + + SelectionAdapter lsSelMod = + new SelectionAdapter() { + @Override + public void widgetSelected(SelectionEvent arg0) { + input.setChanged(); + } + }; + + // Use HTTP Endpoint + Label wlUseHTTPEndpoint = new Label(wMainComp, SWT.RIGHT); + wlUseHTTPEndpoint.setText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.UseHTTPEndpoint.Label")); + PropsUi.setLook(wlUseHTTPEndpoint); + FormData fdlUseHttpEndpoint = new FormData(); + fdlUseHttpEndpoint.top = new FormAttachment(lastControl, margin); + fdlUseHttpEndpoint.left = new FormAttachment(0, 0); + fdlUseHttpEndpoint.right = new FormAttachment(middle, -margin); + wlUseHTTPEndpoint.setLayoutData(fdlUseHttpEndpoint); + + wUseHTTPEndpoint = new Button(wMainComp, SWT.CHECK); + PropsUi.setLook(wUseHTTPEndpoint); + FormData fdUseHTTPEndpoint = new FormData(); + fdUseHTTPEndpoint.top = new FormAttachment(lastControl, margin); + fdUseHTTPEndpoint.left = new FormAttachment(middle, 0); + fdUseHTTPEndpoint.right = new FormAttachment(100, 0); + wUseHTTPEndpoint.setLayoutData(fdUseHTTPEndpoint); + wUseHTTPEndpoint.addSelectionListener(lsSelMod); + wUseHTTPEndpoint.addSelectionListener( + new SelectionAdapter() { + @Override + public void widgetSelected(SelectionEvent arg0) { + toggleHttpEndpointFlags(); + } + }); + + lastControl = wlUseHTTPEndpoint; + + // HttpEndpoint line... + wlHttpEndpoint = new Label(wMainComp, SWT.RIGHT); + wlHttpEndpoint.setText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.HTTPEndpoint.Label")); // $NON-NLS-1$ + PropsUi.setLook(wlHttpEndpoint); + FormData fdlHttpEndpoint = new FormData(); + fdlHttpEndpoint.left = new FormAttachment(0, 0); + fdlHttpEndpoint.right = new FormAttachment(middle, -margin); + fdlHttpEndpoint.top = new FormAttachment(lastControl, margin); + wlHttpEndpoint.setLayoutData(fdlHttpEndpoint); + + wHttpEndpoint = new TextVar(variables, wMainComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); + PropsUi.setLook(wHttpEndpoint); + FormData fdHttpEndpoint = new FormData(); + fdHttpEndpoint.left = new FormAttachment(middle, 0); + fdHttpEndpoint.top = new FormAttachment(lastControl, margin); + fdHttpEndpoint.right = new FormAttachment(100, 0); + wHttpEndpoint.addModifyListener(lsMod); + wHttpEndpoint.addFocusListener(lsFocusLost); + wHttpEndpoint.setLayoutData(fdHttpEndpoint); + + lastControl = wHttpEndpoint; + + // Batch Size + wlBatchSize = new Label(wMainComp, SWT.RIGHT); + wlBatchSize.setText( + BaseMessages.getString( + PKG, "CrateDBBulkLoaderDialog.HTTPEndpointBatchSize.Label")); // $NON-NLS-1$ + PropsUi.setLook(wlBatchSize); + FormData fdlBatchSize = new FormData(); + fdlBatchSize.left = new FormAttachment(0, 0); + fdlBatchSize.right = new FormAttachment(middle, -margin); + fdlBatchSize.top = new FormAttachment(lastControl, margin); + wlBatchSize.setLayoutData(fdlBatchSize); + + wBatchSize = new TextVar(variables, wMainComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); + PropsUi.setLook(wBatchSize); + wBatchSize.addModifyListener(lsMod); + wBatchSize.addFocusListener(lsFocusLost); + FormData fdBatchSize = new FormData(); + fdBatchSize.left = new FormAttachment(middle, 0); + fdBatchSize.top = new FormAttachment(lastControl, margin); + fdBatchSize.right = new FormAttachment(100, 0); + wBatchSize.setLayoutData(fdBatchSize); + + lastControl = wBatchSize; + + // Specify fields + Label wlSpecifyFields = new Label(wMainComp, SWT.RIGHT); + wlSpecifyFields.setText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.SpecifyFields.Label")); + PropsUi.setLook(wlSpecifyFields); + FormData fdlSpecifyFields = new FormData(); + fdlSpecifyFields.top = new FormAttachment(lastControl, margin); + fdlSpecifyFields.left = new FormAttachment(0, 0); + fdlSpecifyFields.right = new FormAttachment(middle, -margin); + wlSpecifyFields.setLayoutData(fdlSpecifyFields); + wSpecifyFields = new Button(wMainComp, SWT.CHECK); + PropsUi.setLook(wSpecifyFields); + FormData fdSpecifyFields = new FormData(); + fdSpecifyFields.top = new FormAttachment(lastControl, margin); + fdSpecifyFields.left = new FormAttachment(middle, 0); + fdSpecifyFields.right = new FormAttachment(100, 0); + wSpecifyFields.setLayoutData(fdSpecifyFields); + wSpecifyFields.addSelectionListener(lsSelMod); + wSpecifyFields.addSelectionListener( + new SelectionAdapter() { + @Override + public void widgetSelected(SelectionEvent arg0) { + toggleSpecifyFieldsFlags(); + } + }); + + lastControl = wSpecifyFields; + + Label wlStreamToS3Csv = new Label(wMainComp, SWT.RIGHT); + wlStreamToS3Csv.setText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.StreamCsvToS3.Label")); + wlStreamToS3Csv.setToolTipText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.StreamCsvToS3.Tooltip")); + PropsUi.setLook(wlStreamToS3Csv); + FormData fdlStreamToS3Csv = new FormData(); + fdlStreamToS3Csv.top = new FormAttachment(lastControl, margin); + fdlStreamToS3Csv.left = new FormAttachment(0, 0); + fdlStreamToS3Csv.right = new FormAttachment(middle, -margin); + wlStreamToS3Csv.setLayoutData(fdlStreamToS3Csv); + + wStreamToS3Csv = new Button(wMainComp, SWT.CHECK); + wStreamToS3Csv.setToolTipText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.StreamCsvToS3.ToolTip")); + PropsUi.setLook(wStreamToS3Csv); + FormData fdStreamToS3Csv = new FormData(); + fdStreamToS3Csv.top = new FormAttachment(lastControl, margin); + fdStreamToS3Csv.left = new FormAttachment(middle, 0); + fdStreamToS3Csv.right = new FormAttachment(100, 0); + wStreamToS3Csv.setLayoutData(fdStreamToS3Csv); + wStreamToS3Csv.setSelection(true); + + lastControl = wlStreamToS3Csv; + + Label wlReadFromFile = new Label(wMainComp, SWT.RIGHT); + wlReadFromFile.setText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.ReadFromFile.Label")); + wlReadFromFile.setToolTipText( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.ReadFromFile.Tooltip")); + PropsUi.setLook(wlReadFromFile); + FormData fdlReadFromFile = new FormData(); + fdlReadFromFile.top = new FormAttachment(lastControl, margin * 2); + fdlReadFromFile.left = new FormAttachment(0, 0); + fdlReadFromFile.right = new FormAttachment(middle, -margin); + wlReadFromFile.setLayoutData(fdlReadFromFile); + + wReadFromFilename = new TextVar(variables, wMainComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); + PropsUi.setLook(wReadFromFilename); + wReadFromFilename.addModifyListener(lsMod); + wReadFromFilename.addFocusListener(lsFocusLost); + wReadFromFilename.getTextWidget().setMessage("s3://"); + + FormData fdReadFromFile = new FormData(); + fdReadFromFile.top = new FormAttachment(lastControl, margin * 2); + fdReadFromFile.left = new FormAttachment(middle, 0); + fdReadFromFile.right = new FormAttachment(100, -margin); + wReadFromFilename.setLayoutData(fdReadFromFile); + + wMainComp.layout(); + wMainTab.setControl(wMainComp); + } + + public void toggleHttpEndpointFlags() { + wlHttpEndpoint.setEnabled(wUseHTTPEndpoint.getSelection()); + wHttpEndpoint.setEnabled(wUseHTTPEndpoint.getSelection()); + wlBatchSize.setEnabled(wUseHTTPEndpoint.getSelection()); + wBatchSize.setEnabled(wUseHTTPEndpoint.getSelection()); + wlHttpLogin.setEnabled(wUseHTTPEndpoint.getSelection()); + wHttpLogin.setEnabled(wUseHTTPEndpoint.getSelection()); + wlHttpPassword.setEnabled(wUseHTTPEndpoint.getSelection()); + wHttpPassword.setEnabled(wUseHTTPEndpoint.getSelection()); + wStreamToS3Csv.setEnabled(!wUseHTTPEndpoint.getSelection()); + wReadFromFilename.setEnabled(!wUseHTTPEndpoint.getSelection()); + } + + public void toggleSpecifyFieldsFlags() { + boolean specifyFields = wSpecifyFields.getSelection(); + wFields.setEnabled(specifyFields); + wGetFields.setEnabled(specifyFields); + wDoMapping.setEnabled(specifyFields); + } + + /** + * Reads in the fields from the previous transforms and from the ONE next transform and opens an + * EnterMappingDialog with this information. After the user did the mapping, those information is + * put into the Select/Rename table. + */ + private void generateMappings() { + + // Determine the source and target fields... + // + IRowMeta sourceFields; + IRowMeta targetFields; + + try { + sourceFields = pipelineMeta.getPrevTransformFields(variables, transformMeta); + } catch (HopException e) { + new ErrorDialog( + shell, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderDialog.DoMapping.UnableToFindSourceFields.Title"), + BaseMessages.getString( + PKG, "CrateDBBulkLoaderDialog.DoMapping.UnableToFindSourceFields.Message"), + e); + return; + } + + // refresh data + input.setTablename(variables.resolve(wTable.getText())); + ITransformMeta transformMeta = this.transformMeta.getTransform(); + if (StringUtils.isEmpty(input.getConnection())) { + input.setConnection(wConnection.getText()); + } + try { + targetFields = transformMeta.getRequiredFields(variables); + } catch (HopException e) { + new ErrorDialog( + shell, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderDialog.DoMapping.UnableToFindTargetFields.Title"), + BaseMessages.getString( + PKG, "CrateDBBulkLoaderDialog.DoMapping.UnableToFindTargetFields.Message"), + e); + return; + } + + String[] inputNames = new String[sourceFields.size()]; + for (int i = 0; i < sourceFields.size(); i++) { + IValueMeta value = sourceFields.getValueMeta(i); + inputNames[i] = value.getName(); + } + + // Create the existing mapping list... + // + List mappings = new ArrayList<>(); + StringBuffer missingSourceFields = new StringBuffer(); + StringBuffer missingTargetFields = new StringBuffer(); + + int nrFields = wFields.nrNonEmpty(); + for (int i = 0; i < nrFields; i++) { + TableItem item = wFields.getNonEmpty(i); + String source = item.getText(2); + String target = item.getText(1); + + int sourceIndex = sourceFields.indexOfValue(source); + if (sourceIndex < 0) { + missingSourceFields.append(Const.CR + " " + source + " --> " + target); + } + int targetIndex = targetFields.indexOfValue(target); + if (targetIndex < 0) { + missingTargetFields.append(Const.CR + " " + source + " --> " + target); + } + if (sourceIndex < 0 || targetIndex < 0) { + continue; + } + + SourceToTargetMapping mapping = new SourceToTargetMapping(sourceIndex, targetIndex); + mappings.add(mapping); + } + + // show a confirm dialog if some missing field was found + // + if (missingSourceFields.length() > 0 || missingTargetFields.length() > 0) { + + String message = ""; + if (missingSourceFields.length() > 0) { + message += + BaseMessages.getString( + PKG, + "CrateDBBulkLoaderDialog.DoMapping.SomeSourceFieldsNotFound", + missingSourceFields.toString()) + + Const.CR; + } + if (missingTargetFields.length() > 0) { + message += + BaseMessages.getString( + PKG, + "CrateDBBulkLoaderDialog.DoMapping.SomeTargetFieldsNotFound", + missingSourceFields.toString()) + + Const.CR; + } + message += Const.CR; + message += + BaseMessages.getString( + PKG, "CrateDBBulkLoaderDialog.DoMapping.SomeFieldsNotFoundContinue") + + Const.CR; + int answer = + BaseDialog.openMessageBox( + shell, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderDialog.DoMapping.SomeFieldsNotFoundTitle"), + message, + SWT.ICON_QUESTION | SWT.YES | SWT.NO); + boolean goOn = (answer & SWT.YES) != 0; + if (!goOn) { + return; + } + } + EnterMappingDialog d = + new EnterMappingDialog( + CrateDBBulkLoaderDialog.this.shell, + sourceFields.getFieldNames(), + targetFields.getFieldNames(), + mappings); + mappings = d.open(); + + // mappings == null if the user pressed cancel + // + if (mappings != null) { + // Clear and re-populate! + // + wFields.table.removeAll(); + wFields.table.setItemCount(mappings.size()); + for (int i = 0; i < mappings.size(); i++) { + SourceToTargetMapping mapping = (SourceToTargetMapping) mappings.get(i); + TableItem item = wFields.table.getItem(i); + item.setText(2, sourceFields.getValueMeta(mapping.getSourcePosition()).getName()); + item.setText(1, targetFields.getValueMeta(mapping.getTargetPosition()).getName()); + } + wFields.setRowNums(); + wFields.optWidth(true); + } + } + + private void setTableFieldCombo() { + Runnable fieldLoader = + () -> { + // clear + for (int i = 0; i < tableFieldColumns.size(); i++) { + ColumnInfo colInfo = (ColumnInfo) tableFieldColumns.get(i); + colInfo.setComboValues(new String[] {}); + } + if (!StringUtil.isEmpty(wTable.getText())) { + DatabaseMeta databaseMeta = pipelineMeta.findDatabase(wConnection.getText(), variables); + if (databaseMeta != null) { + try (Database db = new Database(loggingObject, variables, databaseMeta)) { + db.connect(); + + String schemaTable = + databaseMeta.getQuotedSchemaTableCombination( + variables, + variables.resolve(wSchema.getText()), + variables.resolve(wTable.getText())); + IRowMeta r = db.getTableFields(schemaTable); + if (null != r) { + String[] fieldNames = r.getFieldNames(); + if (null != fieldNames) { + for (int i = 0; i < tableFieldColumns.size(); i++) { + ColumnInfo colInfo = (ColumnInfo) tableFieldColumns.get(i); + colInfo.setComboValues(fieldNames); + } + } + } + } catch (Exception e) { + for (int i = 0; i < tableFieldColumns.size(); i++) { + ColumnInfo colInfo = (ColumnInfo) tableFieldColumns.get(i); + colInfo.setComboValues(new String[] {}); + } + // ignore any errors here. drop downs will not be + // filled, but no problem for the user + } + } + } + }; + shell.getDisplay().asyncExec(fieldLoader); + } + + protected void setComboBoxes() { + // Something was changed in the row. + // + final Map fields = new HashMap<>(); + + // Add the currentMeta fields... + fields.putAll(inputFields); + + Set keySet = fields.keySet(); + List entries = new ArrayList<>(keySet); + + String[] fieldNames = (String[]) entries.toArray(new String[entries.size()]); + + if (PropsUi.getInstance().isSortFieldByName()) { + Const.sortStrings(fieldNames); + } + ciFields[1].setComboValues(fieldNames); + } + + /** Copy information from the meta-data input to the dialog fields. */ + public void getData() { + if (!StringUtils.isEmpty(input.getConnection())) { + wConnection.setText(input.getConnection()); + } + if (!StringUtils.isEmpty(input.getSchemaName())) { + wSchema.setText(input.getSchemaName()); + } + if (!StringUtils.isEmpty(input.getTableName())) { + wTable.setText(input.getTableName()); + } + + wUseHTTPEndpoint.setSelection(input.isUseHttpEndpoint()); + + if (input.isUseHttpEndpoint()) { + wHttpEndpoint.setText(input.getHttpEndpoint()); + wBatchSize.setText(Const.NVL(input.getBatchSize(), "")); + wHttpLogin.setText(Const.NVL(input.getHttpLogin(), "")); + wHttpPassword.setText(Const.NVL(input.getHttpPassword(), "")); + } + + wUseSystemVars.setSelection(input.isUseSystemEnvVars()); + if (!input.isUseSystemEnvVars()) { + if (!StringUtil.isEmpty(input.getAwsAccessKeyId())) { + wAccessKeyId.setText(input.getAwsAccessKeyId()); + } + if (!StringUtils.isEmpty(input.getAwsSecretAccessKey())) { + wSecretAccessKey.setText(input.getAwsSecretAccessKey()); + } + } + + wStreamToS3Csv.setSelection(input.isStreamToS3Csv()); + if (!StringUtils.isEmpty(input.getReadFromFilename())) { + wReadFromFilename.setText(input.getReadFromFilename()); + } + + // wTruncate.setSelection(input.isTruncateTable()); + // wOnlyWhenHaveRows.setSelection(input.isOnlyWhenHaveRows()); + + wSpecifyFields.setSelection(input.specifyFields()); + + for (int i = 0; i < input.getFields().size(); i++) { + CrateDBBulkLoaderField vbf = input.getFields().get(i); + TableItem item = wFields.table.getItem(i); + if (vbf.getDatabaseField() != null) { + item.setText(1, vbf.getDatabaseField()); + } + if (vbf.getStreamField() != null) { + item.setText(2, vbf.getStreamField()); + } + } + + wTransformName.selectAll(); + } + + private void cancel() { + transformName = null; + input.setChanged(backupChanged); + dispose(); + } + + private void getInfo(CrateDBBulkLoaderMeta info) { + + if (!StringUtils.isEmpty(wConnection.getText())) { + info.setConnection(wConnection.getText()); + } + + if (!StringUtils.isEmpty(wSchema.getText())) { + info.setSchemaName(wSchema.getText()); + } + + if (!StringUtils.isEmpty(wTable.getText())) { + info.setTablename(wTable.getText()); + } + + info.setUseHttpEndpoint(wUseHTTPEndpoint.getSelection()); + if (wUseHTTPEndpoint.getSelection()) { + info.setHttpEndpoint(wHttpEndpoint.getText()); + info.setBatchSize(wBatchSize.getText()); + info.setHttpLogin(wHttpLogin.getText()); + info.setHttpPassword(wHttpPassword.getText()); + } + + if (wUseSystemVars.getSelection()) { + info.setUseSystemEnvVars(wUseSystemVars.getSelection()); + } else { + info.setUseSystemEnvVars(wUseSystemVars.getSelection()); + if (!StringUtils.isEmpty(wAccessKeyId.getText())) { + info.setAwsAccessKeyId(wAccessKeyId.getText()); + } + if (!StringUtil.isEmpty(wSecretAccessKey.getText())) { + info.setAwsSecretAccessKey(wSecretAccessKey.getText()); + } + } + // info.setTruncateTable(wTruncate.getSelection()); + // info.setOnlyWhenHaveRows(wOnlyWhenHaveRows.getSelection()); + info.setStreamToS3Csv(wStreamToS3Csv.getSelection()); + + if (!StringUtils.isEmpty(wReadFromFilename.getText())) { + info.setReadFromFilename(wReadFromFilename.getText()); + } + + info.setSpecifyFields(wSpecifyFields.getSelection()); + + int nrRows = wFields.nrNonEmpty(); + info.getFields().clear(); + + for (int i = 0; i < nrRows; i++) { + TableItem item = wFields.getNonEmpty(i); + CrateDBBulkLoaderField vbf = + new CrateDBBulkLoaderField( + Const.NVL(item.getText(1), ""), Const.NVL(item.getText(2), "")); + info.getFields().add(vbf); + } + } + + private void ok() { + if (StringUtil.isEmpty(wTransformName.getText())) { + return; + } + + transformName = wTransformName.getText(); // return value + + getInfo(input); + + if (Utils.isEmpty(input.getConnection())) { + MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); + mb.setMessage( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.ConnectionError.DialogMessage")); + mb.setText(BaseMessages.getString(SYSTEM_DIALOG_ERROR_TITLE)); + mb.open(); + return; + } + + dispose(); + } + + private void getSchemaName() { + DatabaseMeta databaseMeta = pipelineMeta.findDatabase(wConnection.getText(), variables); + if (databaseMeta != null) { + try (Database database = new Database(loggingObject, variables, databaseMeta)) { + database.connect(); + String[] schemas = database.getSchemas(); + + if (null != schemas && schemas.length > 0) { + schemas = Const.sortStrings(schemas); + EnterSelectionDialog dialog = + new EnterSelectionDialog( + shell, + schemas, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderDialog.AvailableSchemas.Title", wConnection.getText()), + BaseMessages.getString( + PKG, + "CrateDBBulkLoaderDialog.AvailableSchemas.Message", + wConnection.getText())); + String d = dialog.open(); + if (d != null) { + wSchema.setText(Const.NVL(d, "")); + setTableFieldCombo(); + } + + } else { + org.apache.hop.ui.core.dialog.MessageBox mb = + new org.apache.hop.ui.core.dialog.MessageBox(shell, SWT.OK | SWT.ICON_ERROR); + mb.setMessage(BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.NoSchema.Error")); + mb.setText(BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.GetSchemas.Error")); + mb.open(); + } + } catch (Exception e) { + new ErrorDialog( + shell, + BaseMessages.getString(PKG, SYSTEM_DIALOG_ERROR_TITLE), + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.ErrorGettingSchemas"), + e); + } + } + } + + private void getTableName() { + + String connectionName = wConnection.getText(); + if (StringUtil.isEmpty(connectionName)) { + return; + } + DatabaseMeta databaseMeta = pipelineMeta.findDatabase(connectionName, variables); + + if (databaseMeta != null) { + log.logDebug( + toString(), + BaseMessages.getString( + PKG, "CrateDBBulkLoaderDialog.Log.LookingAtConnection", databaseMeta.toString())); + + DatabaseExplorerDialog std = + new DatabaseExplorerDialog( + shell, SWT.NONE, variables, databaseMeta, pipelineMeta.getDatabases()); + std.setSelectedSchemaAndTable(wSchema.getText(), wTable.getText()); + if (std.open()) { + wSchema.setText(Const.NVL(std.getSchemaName(), "")); + wTable.setText(Const.NVL(std.getTableName(), "")); + } + } else { + MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); + mb.setMessage( + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.ConnectionError2.DialogMessage")); + mb.setText(BaseMessages.getString(SYSTEM_DIALOG_ERROR_TITLE)); + mb.open(); + } + } + + /** Fill up the fields table with the incoming fields. */ + private void get() { + try { + IRowMeta r = pipelineMeta.getPrevTransformFields(variables, transformName); + if (r != null && !r.isEmpty()) { + BaseTransformDialog.getFieldsFromPrevious( + r, wFields, 1, new int[] {1, 2}, new int[] {}, -1, -1, null); + } + } catch (HopException ke) { + new ErrorDialog( + shell, + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.FailedToGetFields.DialogTitle"), + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.FailedToGetFields.DialogMessage"), + ke); //$NON-NLS-1$ //$NON-NLS-2$ + } + } + + // Generate code for create table... + // Conversions done by Database + // + private void sql() { + try { + CrateDBBulkLoaderMeta info = new CrateDBBulkLoaderMeta(); + DatabaseMeta databaseMeta = pipelineMeta.findDatabase(wConnection.getText(), variables); + + getInfo(info); + IRowMeta prev = pipelineMeta.getPrevTransformFields(variables, transformName); + TransformMeta transformMeta = pipelineMeta.findTransform(transformName); + + if (info.specifyFields()) { + // Only use the fields that were specified. + IRowMeta prevNew = new RowMeta(); + + for (int i = 0; i < info.getFields().size(); i++) { + CrateDBBulkLoaderField vbf = info.getFields().get(i); + IValueMeta insValue = prev.searchValueMeta(vbf.getStreamField()); + if (insValue != null) { + IValueMeta insertValue = insValue.clone(); + insertValue.setName(vbf.getDatabaseField()); + prevNew.addValueMeta(insertValue); + } else { + throw new HopTransformException( + BaseMessages.getString( + PKG, + "CrateDBBulkLoaderDialog.FailedToFindField.Message", + vbf.getStreamField())); // $NON-NLS-1$ + } + } + prev = prevNew; + } + + SqlStatement sql = + info.getSqlStatements(variables, pipelineMeta, transformMeta, prev, metadataProvider); + if (!sql.hasError()) { + if (sql.hasSql()) { + SqlEditor sqledit = + new SqlEditor( + shell, SWT.NONE, variables, databaseMeta, DbCache.getInstance(), sql.getSql()); + sqledit.open(); + } else { + MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION); + mb.setMessage(BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.NoSQL.DialogMessage")); + mb.setText(BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.NoSQL.DialogTitle")); + mb.open(); + } + } else { + MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); + mb.setMessage(sql.getError()); + mb.setText(BaseMessages.getString(SYSTEM_DIALOG_ERROR_TITLE)); + mb.open(); + } + } catch (HopException ke) { + new ErrorDialog( + shell, + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.BuildSQLError.DialogTitle"), + BaseMessages.getString(PKG, "CrateDBBulkLoaderDialog.BuildSQLError.DialogMessage"), + ke); + } + } + + @Override + public String toString() { + return this.getClass().getName(); + } + + public void toggleKeysSelection() { + wlAccessKeyId.setEnabled(!wUseSystemVars.getSelection()); + wAccessKeyId.setEnabled(!wUseSystemVars.getSelection()); + wlSecretAccessKey.setEnabled(!wUseSystemVars.getSelection()); + wSecretAccessKey.setEnabled(!wUseSystemVars.getSelection()); + } +} diff --git a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderField.java b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderField.java new file mode 100644 index 00000000000..b0e6b3e4804 --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderField.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader; + +import java.util.Objects; +import org.apache.hop.metadata.api.HopMetadataProperty; + +public class CrateDBBulkLoaderField { + + public CrateDBBulkLoaderField() {} + + public CrateDBBulkLoaderField(String fieldDatabase, String fieldStream) { + this.databaseField = fieldDatabase; + this.streamField = fieldStream; + } + + @HopMetadataProperty( + key = "stream_name", + injectionKey = "STREAM_FIELDNAME", + injectionKeyDescription = "CrateDBBulkLoader.Inject.FIELDSTREAM") + private String streamField; + + @HopMetadataProperty( + key = "column_name", + injectionKey = "DATABASE_FIELDNAME", + injectionKeyDescription = "CrateDBBulkLoader.Inject.FIELDDATABASE") + private String databaseField; + + public String getStreamField() { + return streamField; + } + + public void setStreamField(String streamField) { + this.streamField = streamField; + } + + public String getDatabaseField() { + return databaseField; + } + + public void setDatabaseField(String databaseField) { + this.databaseField = databaseField; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CrateDBBulkLoaderField that = (CrateDBBulkLoaderField) o; + return streamField.equals(that.streamField) && databaseField.equals(that.databaseField); + } + + @Override + public int hashCode() { + return Objects.hash(streamField, databaseField); + } +} diff --git a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderMeta.java b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderMeta.java new file mode 100644 index 00000000000..5f412f96559 --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoaderMeta.java @@ -0,0 +1,937 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import org.apache.hop.core.CheckResult; +import org.apache.hop.core.Const; +import org.apache.hop.core.ICheckResult; +import org.apache.hop.core.IProvidesModelerMeta; +import org.apache.hop.core.SqlStatement; +import org.apache.hop.core.annotations.Transform; +import org.apache.hop.core.database.Database; +import org.apache.hop.core.database.DatabaseMeta; +import org.apache.hop.core.exception.HopDatabaseException; +import org.apache.hop.core.exception.HopException; +import org.apache.hop.core.exception.HopTransformException; +import org.apache.hop.core.row.IRowMeta; +import org.apache.hop.core.row.IValueMeta; +import org.apache.hop.core.row.RowMeta; +import org.apache.hop.core.util.StringUtil; +import org.apache.hop.core.variables.IVariables; +import org.apache.hop.i18n.BaseMessages; +import org.apache.hop.metadata.api.HopMetadataProperty; +import org.apache.hop.metadata.api.IHopMetadataProvider; +import org.apache.hop.pipeline.DatabaseImpact; +import org.apache.hop.pipeline.PipelineMeta; +import org.apache.hop.pipeline.transform.BaseTransformMeta; +import org.apache.hop.pipeline.transform.ITransformData; +import org.apache.hop.pipeline.transform.TransformMeta; + +@Transform( + id = "CrateDBBulkLoader", + image = "CrateDBBulkLoader.svg", + name = "i18n::BaseTransform.TypeLongDesc.CrateDBBulkLoaderMessage", + description = "i18n::BaseTransform.TypeTooltipDesc.CrateDBBulkLoaderMessage", + categoryDescription = "i18n:org.apache.hop.pipeline.transform:BaseTransform.Category.Bulk", + documentationUrl = "/pipeline/transforms/cratedb-bulkloader.html", + isIncludeJdbcDrivers = true, + classLoaderGroup = "crate-db") +public class CrateDBBulkLoaderMeta + extends BaseTransformMeta + implements IProvidesModelerMeta { + private static final Class PKG = CrateDBBulkLoaderMeta.class; + + public static final String DEFAULT_CSV_DELIMITER = ","; + public static final String CSV_RECORD_DELIMITER = "\n"; + public static final String CSV_ESCAPE_CHAR = "\""; + public static final String ENCLOSURE = "\""; + + @HopMetadataProperty( + key = "connection", + injectionKey = "CONNECTIONNAME", + injectionKeyDescription = "CrateDBBulkLoader.Injection.CONNECTIONNAME") + private String connection; + + @HopMetadataProperty( + key = "schema", + injectionKey = "SCHEMANAME", + injectionKeyDescription = "CrateDBBulkLoader.Injection.SCHEMANAME") + private String schemaName; + + @HopMetadataProperty( + key = "table", + injectionKey = "TABLENAME", + injectionKeyDescription = "CrateDBBulkLoader.Injection.TABLENAME") + private String tablename; + + @HopMetadataProperty( + key = "use_system_env_vars", + injectionKey = "USE_SYSTEM_ENV_VARS", + injectionKeyDescription = "") + private boolean useSystemEnvVars; + + @HopMetadataProperty(key = "use_http_endpoint") + private boolean useHttpEndpoint; + + @HopMetadataProperty(key = "http_endpoint") + private String httpEndpoint; + + @HopMetadataProperty(key = "batch_size") + private String batchSize; + + @HopMetadataProperty( + key = "aws_access_key_id", + injectionKey = "AWS_ACCESS_KEY_ID", + injectionKeyDescription = "") + private String awsAccessKeyId; + + @HopMetadataProperty( + key = "aws_secret_access_key", + injectionKey = "AWS_SECRET_ACCESS_KEY", + injectionKeyDescription = "") + private String awsSecretAccessKey; + + @HopMetadataProperty( + key = "truncate", + injectionKey = "TRUNCATE_TABLE", + injectionKeyDescription = "CrateDBBulkLoader.Injection.TruncateTable.Field") + private boolean truncateTable; + + @HopMetadataProperty( + key = "only_when_have_rows", + injectionKey = "ONLY_WHEN_HAVE_ROWS", + injectionKeyDescription = "CrateDBBulkLoader.Inject.OnlyWhenHaveRows.Field") + private boolean onlyWhenHaveRows; + + @HopMetadataProperty( + key = "stream_to_s3", + injectionKey = "STREAM_TO_S3", + injectionKeyDescription = "") + private boolean streamToS3Csv = true; + + /** CSV: Trim whitespace */ + @HopMetadataProperty(key = "trim_whitespace", injectionKeyDescription = "") + private boolean trimWhitespace; + + /** CSV: Convert column value to null if */ + @HopMetadataProperty(key = "null_if", injectionKeyDescription = "") + private String nullIf; + + /** + * CSV: Should the load fail if the column count in the row does not match the column count in the + * table + */ + @HopMetadataProperty(key = "error_column_mismatch", injectionKeyDescription = "") + private boolean errorColumnMismatch; + + /** JSON: Strip nulls from JSON */ + @HopMetadataProperty(key = "strip_null", injectionKeyDescription = "") + private boolean stripNull; + + /** Do we explicitly select the fields to update in the database */ + @HopMetadataProperty(key = "specify_fields", injectionKeyDescription = "") + private boolean specifyFields; + + @HopMetadataProperty( + key = "load_from_filename", + injectionKey = "LOAD_FROM_FILENAME", + injectionKeyDescription = "") + private String readFromFilename; + + @HopMetadataProperty(key = "http_login") + private String httpLogin; + + @HopMetadataProperty(key = "http_password", password = true) + private String httpPassword; + + @HopMetadataProperty( + groupKey = "fields", + key = "field", + injectionGroupKey = "FIELDS", + injectionGroupDescription = "CrateDBBulkLoader.Injection.FIELDS", + injectionKey = "FIELDSTREAM", + injectionKeyDescription = "CrateDBBulkLoader.Injection.FIELDSTREAM") + /** Fields containing the values in the input stream to insert */ + private List fields; + + @HopMetadataProperty( + groupKey = "fields", + key = "field", + injectionGroupKey = "FIELDS", + injectionGroupDescription = "CrateDBBulkLoader.Injection.FIELDS", + injectionKey = "FIELDDATABASE", + injectionKeyDescription = "CrateDBBulkLoader.Injection.FIELDDATABASE") + /** Fields in the table to insert */ + private String[] fieldDatabase; + + public CrateDBBulkLoaderMeta() { + super(); // allocate BaseTransformMeta + + fields = new ArrayList<>(); + } + + public Object clone() { + return super.clone(); + } + + public String getHttpLogin() { + return httpLogin; + } + + public void setHttpLogin(String httpLogin) { + this.httpLogin = httpLogin; + } + + public String getHttpPassword() { + return httpPassword; + } + + public void setHttpPassword(String httpPassword) { + this.httpPassword = httpPassword; + } + + /** + * @return returns the database connection name + */ + public String getConnection() { + return connection; + } + + /** + * sets the database connection name + * + * @param connection the database connection name to set + */ + public void setConnection(String connection) { + this.connection = connection; + } + + public boolean isUseHttpEndpoint() { + return useHttpEndpoint; + } + + public void setUseHttpEndpoint(boolean useHttpEndpoint) { + this.useHttpEndpoint = useHttpEndpoint; + } + + public String getHttpEndpoint() { + return httpEndpoint == null ? "" : httpEndpoint; + } + + public void setHttpEndpoint(String httpEndpoint) { + this.httpEndpoint = httpEndpoint; + } + + public String getBatchSize() { + return batchSize == null ? "0" : batchSize; + } + + public void setBatchSize(String batchSize) { + this.batchSize = batchSize; + } + + /* + */ + /** + * @return Returns the database. + */ + public DatabaseMeta getDatabaseMeta() { + return null; + } + + /** + * @return Returns the tablename. + */ + public String getTableName() { + return tablename; + } + + /** + * @deprecated use {@link #getTableName()} + */ + public String getTablename() { + return getTableName(); + } + + /** + * @param tablename The tablename to set. + */ + public void setTablename(String tablename) { + this.tablename = tablename; + } + + /** + * @return Returns the truncate table flag. + */ + public boolean isTruncateTable() { + return truncateTable; + } + + /** + * @param truncateTable The truncate table flag to set. + */ + public void setTruncateTable(boolean truncateTable) { + this.truncateTable = truncateTable; + } + + /** + * @return Returns the onlyWhenHaveRows flag. + */ + public boolean isOnlyWhenHaveRows() { + return onlyWhenHaveRows; + } + + /** + * @param onlyWhenHaveRows The onlyWhenHaveRows to set. + */ + public void setOnlyWhenHaveRows(boolean onlyWhenHaveRows) { + this.onlyWhenHaveRows = onlyWhenHaveRows; + } + + /** + * @param specifyFields The specify fields flag to set. + */ + public void setSpecifyFields(boolean specifyFields) { + this.specifyFields = specifyFields; + } + + public boolean isStreamToS3Csv() { + return streamToS3Csv; + } + + public void setStreamToS3Csv(boolean streamToS3Csv) { + this.streamToS3Csv = streamToS3Csv; + } + + /** + * CSV: + * + * @return Should whitespace in the fields be trimmed + */ + public boolean isTrimWhitespace() { + return trimWhitespace; + } + + /** + * CSV: Set if the whitespace in the files should be trimmmed + * + * @param trimWhitespace true/false + */ + public void setTrimWhitespace(boolean trimWhitespace) { + this.trimWhitespace = trimWhitespace; + } + + /** + * CSV: + * + * @return Comma delimited list of strings to convert to Null + */ + public String getNullIf() { + return nullIf; + } + + /** + * CSV: Set the string constants to convert to Null + * + * @param nullIf Comma delimited list of constants + */ + public void setNullIf(String nullIf) { + this.nullIf = nullIf; + } + + /** + * CSV: + * + * @return Should the load error if the number of columns in the table and in the CSV do not match + */ + public boolean isErrorColumnMismatch() { + return errorColumnMismatch; + } + + /** + * CSV: Set if the load should error if the number of columns in the table and in the CSV do not + * match + * + * @param errorColumnMismatch true/false + */ + public void setErrorColumnMismatch(boolean errorColumnMismatch) { + this.errorColumnMismatch = errorColumnMismatch; + } + + /** + * JSON: + * + * @return Should null values be stripped out of the JSON + */ + public boolean isStripNull() { + return stripNull; + } + + /** + * JSON: Set if null values should be stripped out of the JSON + * + * @param stripNull true/false + */ + public void setStripNull(boolean stripNull) { + this.stripNull = stripNull; + } + + public String getReadFromFilename() { + return readFromFilename; + } + + public void setReadFromFilename(String readFromFilename) { + this.readFromFilename = readFromFilename; + } + + public List getFields() { + return fields; + } + + public void setFields(List fields) { + this.fields = fields; + } + + /** + * @return Returns the specify fields flag. + */ + public boolean specifyFields() { + return specifyFields; + } + + public boolean isSpecifyFields() { + return specifyFields; + } + + public String getAwsAccessKeyId() { + return awsAccessKeyId; + } + + public void setAwsAccessKeyId(String awsAccessKeyId) { + this.awsAccessKeyId = awsAccessKeyId; + } + + public String getAwsSecretAccessKey() { + return awsSecretAccessKey; + } + + public void setAwsSecretAccessKey(String awsSecretAccessKey) { + this.awsSecretAccessKey = awsSecretAccessKey; + } + + public boolean isUseSystemEnvVars() { + return useSystemEnvVars; + } + + public void setUseSystemEnvVars(boolean useSystemEnvVars) { + this.useSystemEnvVars = useSystemEnvVars; + } + + public void setDefault() { + tablename = ""; + + // To be compatible with pre-v3.2 (SB) + specifyFields = false; + } + + @Override + public void check( + List remarks, + PipelineMeta pipelineMeta, + TransformMeta transformMeta, + IRowMeta prev, + String[] input, + String[] output, + IRowMeta info, + IVariables variables, + IHopMetadataProvider metadataProvider) { + + Database db = null; + + try { + + DatabaseMeta databaseMeta = + metadataProvider.getSerializer(DatabaseMeta.class).load(variables.resolve(connection)); + + if (databaseMeta != null) { + CheckResult cr = + new CheckResult( + ICheckResult.TYPE_RESULT_OK, + BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.CheckResult.ConnectionExists"), + transformMeta); + remarks.add(cr); + + db = new Database(loggingObject, variables, databaseMeta); + + try { + db.connect(); + + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_OK, + BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.CheckResult.ConnectionOk"), + transformMeta); + remarks.add(cr); + + if (!StringUtil.isEmpty(tablename)) { + String schemaTable = + databaseMeta.getQuotedSchemaTableCombination( + variables, db.resolve(schemaName), db.resolve(tablename)); + // Check if this table exists... + String realSchemaName = db.resolve(schemaName); + String realTableName = db.resolve(tablename); + if (db.checkTableExists(realSchemaName, realTableName)) { + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_OK, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderMeta.CheckResult.TableAccessible", schemaTable), + transformMeta); + remarks.add(cr); + + IRowMeta r = db.getTableFields(schemaTable); + if (r != null) { + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_OK, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderMeta.CheckResult.TableOk", schemaTable), + transformMeta); + remarks.add(cr); + + String error_message = ""; + boolean error_found = false; + // OK, we have the table fields. + // Now see what we can find as previous transform... + if (prev != null && prev.size() > 0) { + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_OK, + BaseMessages.getString( + PKG, + "CrateDBBulkLoaderMeta.CheckResult.FieldsReceived", + "" + prev.size()), + transformMeta); + remarks.add(cr); + + if (!specifyFields()) { + // Starting from prev... + for (int i = 0; i < prev.size(); i++) { + IValueMeta pv = prev.getValueMeta(i); + int idx = r.indexOfValue(pv.getName()); + if (idx < 0) { + error_message += + "\t\t" + pv.getName() + " (" + pv.getTypeDesc() + ")" + Const.CR; + error_found = true; + } + } + if (error_found) { + error_message = + BaseMessages.getString( + PKG, + "CrateDBBulkLoaderMeta.CheckResult.FieldsNotFoundInOutput", + error_message); + + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_ERROR, error_message, transformMeta); + remarks.add(cr); + } else { + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_OK, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderMeta.CheckResult.AllFieldsFoundInOutput"), + transformMeta); + remarks.add(cr); + } + } else { + // Specifying the column names explicitly + for (int i = 0; i < getFieldDatabase().length; i++) { + int idx = r.indexOfValue(getFieldDatabase()[i]); + if (idx < 0) { + error_message += "\t\t" + getFieldDatabase()[i] + Const.CR; + error_found = true; + } + } + if (error_found) { + error_message = + BaseMessages.getString( + PKG, + "CrateDBBulkLoaderMeta.CheckResult.FieldsSpecifiedNotInTable", + error_message); + + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_ERROR, error_message, transformMeta); + remarks.add(cr); + } else { + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_OK, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderMeta.CheckResult.AllFieldsFoundInOutput"), + transformMeta); + remarks.add(cr); + } + } + + error_message = ""; + if (!specifyFields()) { + // Starting from table fields in r... + for (int i = 0; i < getFieldDatabase().length; i++) { + IValueMeta rv = r.getValueMeta(i); + int idx = prev.indexOfValue(rv.getName()); + if (idx < 0) { + error_message += + "\t\t" + rv.getName() + " (" + rv.getTypeDesc() + ")" + Const.CR; + error_found = true; + } + } + if (error_found) { + error_message = + BaseMessages.getString( + PKG, + "CrateDBBulkLoaderMeta.CheckResult.FieldsNotFound", + error_message); + + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_WARNING, error_message, transformMeta); + remarks.add(cr); + } else { + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_OK, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderMeta.CheckResult.AllFieldsFound"), + transformMeta); + remarks.add(cr); + } + } else { + // Specifying the column names explicitly + for (int i = 0; i < fields.size(); i++) { + CrateDBBulkLoaderField vbf = fields.get(i); + int idx = prev.indexOfValue(vbf.getStreamField()); + if (idx < 0) { + error_message += "\t\t" + vbf.getStreamField() + Const.CR; + error_found = true; + } + } + if (error_found) { + error_message = + BaseMessages.getString( + PKG, + "CrateDBBulkLoaderMeta.CheckResult.FieldsSpecifiedNotFound", + error_message); + + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_ERROR, error_message, transformMeta); + remarks.add(cr); + } else { + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_OK, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderMeta.CheckResult.AllFieldsFound"), + transformMeta); + remarks.add(cr); + } + } + } else { + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_ERROR, + BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.CheckResult.NoFields"), + transformMeta); + remarks.add(cr); + } + } else { + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_ERROR, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderMeta.CheckResult.TableNotAccessible"), + transformMeta); + remarks.add(cr); + } + } else { + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_ERROR, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderMeta.CheckResult.TableError", schemaTable), + transformMeta); + remarks.add(cr); + } + } else { + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_ERROR, + BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.CheckResult.NoTableName"), + transformMeta); + remarks.add(cr); + } + } catch (HopException e) { + cr = + new CheckResult( + ICheckResult.TYPE_RESULT_ERROR, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderMeta.CheckResult.UndefinedError", e.getMessage()), + transformMeta); + remarks.add(cr); + } finally { + db.disconnect(); + } + } else { + CheckResult cr = + new CheckResult( + ICheckResult.TYPE_RESULT_ERROR, + BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.CheckResult.NoConnection"), + transformMeta); + remarks.add(cr); + } + } catch (HopException e) { + CheckResult cr = + new CheckResult( + ICheckResult.TYPE_RESULT_ERROR, + BaseMessages.getString( + PKG, "CrateDBBulkLoaderMeta.CheckResult.UndefinedError", e.getMessage()), + transformMeta); + remarks.add(cr); + } + + // See if we have input streams leading to this transform! + if (input.length > 0) { + CheckResult cr = + new CheckResult( + ICheckResult.TYPE_RESULT_OK, + BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.CheckResult.ExpectedInputOk"), + transformMeta); + remarks.add(cr); + } else { + CheckResult cr = + new CheckResult( + ICheckResult.TYPE_RESULT_ERROR, + BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.CheckResult.ExpectedInputError"), + transformMeta); + remarks.add(cr); + } + } + + public void analyseImpact( + IVariables variables, + List impact, + PipelineMeta pipelineMeta, + TransformMeta transformMeta, + IRowMeta prev, + String[] input, + String[] output, + IRowMeta info, + IHopMetadataProvider metadataProvider) + throws HopTransformException { + + try { + DatabaseMeta databaseMeta = + metadataProvider.getSerializer(DatabaseMeta.class).load(variables.resolve(connection)); + + // The values that are entering this transform are in "prev": + if (prev != null) { + for (int i = 0; i < prev.size(); i++) { + IValueMeta v = prev.getValueMeta(i); + DatabaseImpact ii = + new DatabaseImpact( + DatabaseImpact.TYPE_IMPACT_WRITE, + pipelineMeta.getName(), + transformMeta.getName(), + databaseMeta.getDatabaseName(), + tablename, + v.getName(), + v.getName(), + v != null ? v.getOrigin() : "?", + "", + "Type = " + v.toStringMeta()); + impact.add(ii); + } + } + } catch (HopException e) { + throw new HopTransformException( + "Unable to get databaseMeta for connection: " + Const.CR + variables.resolve(connection)); + } + } + + public SqlStatement getSqlStatements( + IVariables variables, + PipelineMeta pipelineMeta, + TransformMeta transformMeta, + IRowMeta prev, + IHopMetadataProvider metadataProvider) { + + DatabaseMeta databaseMeta = pipelineMeta.findDatabase(connection, variables); + + SqlStatement retval = + new SqlStatement(transformMeta.getName(), databaseMeta, null); // default: nothing to do! + + if (databaseMeta != null) { + if (prev != null && prev.size() > 0) { + if (!StringUtil.isEmpty(tablename)) { + Database db = new Database(loggingObject, variables, databaseMeta); + try { + db.connect(); + + String schemaTable = + databaseMeta.getQuotedSchemaTableCombination(variables, schemaName, tablename); + String cr_table = db.getDDL(schemaTable, prev); + + // Empty string means: nothing to do: set it to null... + if (cr_table == null || cr_table.length() == 0) { + cr_table = null; + } + + retval.setSql(cr_table); + } catch (HopDatabaseException dbe) { + retval.setError( + BaseMessages.getString( + PKG, "CrateDBBulkLoaderMeta.Error.ErrorConnecting", dbe.getMessage())); + } finally { + db.disconnect(); + } + } else { + retval.setError(BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.Error.NoTable")); + } + } else { + retval.setError(BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.Error.NoInput")); + } + } else { + retval.setError(BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.Error.NoConnection")); + } + + return retval; + } + + public IRowMeta getRequiredFields(IVariables variables) throws HopException { + String realTableName = variables.resolve(tablename); + String realSchemaName = variables.resolve(schemaName); + + DatabaseMeta databaseMeta = + getParentTransformMeta().getParentPipelineMeta().findDatabase(connection, variables); + + if (databaseMeta != null) { + Database db = new Database(loggingObject, variables, databaseMeta); + try { + db.connect(); + + if (!StringUtil.isEmpty(realTableName)) { + String schemaTable = + databaseMeta.getQuotedSchemaTableCombination( + variables, realSchemaName, realTableName); + + // Check if this table exists... + if (db.checkTableExists(realSchemaName, realTableName)) { + return db.getTableFields(schemaTable); + } else { + throw new HopException( + BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.Exception.TableNotFound")); + } + } else { + throw new HopException( + BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.Exception.TableNotSpecified")); + } + } catch (Exception e) { + throw new HopException( + BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.Exception.ErrorGettingFields"), e); + } finally { + db.disconnect(); + } + } else { + throw new HopException( + BaseMessages.getString(PKG, "CrateDBBulkLoaderMeta.Exception.ConnectionNotDefined")); + } + } + + /** + * @return Fields containing the fieldnames in the database insert. + */ + public String[] getFieldDatabase() { + return fieldDatabase; + } + + /** + * @param fieldDatabase The fields containing the names of the fields to insert. + */ + public void setFieldDatabase(String[] fieldDatabase) { + this.fieldDatabase = fieldDatabase; + } + + /** + * @return the schemaName + */ + public String getSchemaName() { + return schemaName; + } + + /** + * @param schemaName the schemaName to set + */ + public void setSchemaName(String schemaName) { + this.schemaName = schemaName; + } + + public boolean supportsErrorHandling() { + return true; + } + + @Override + public String getMissingDatabaseConnectionInformationMessage() { + // use default message + return null; + } + + @Override + public RowMeta getRowMeta(IVariables variables, ITransformData transformData) { + return (RowMeta) ((CrateDBBulkLoaderData) transformData).getInsertRowMeta(); + } + + @Override + public List getDatabaseFields() { + List items = Collections.emptyList(); + if (specifyFields()) { + items = new ArrayList<>(); + for (CrateDBBulkLoaderField vbf : fields) { + items.add(vbf.getDatabaseField()); + } + } + return items; + } + + @Override + public List getStreamFields() { + List items = Collections.emptyList(); + if (specifyFields()) { + items = new ArrayList<>(); + for (CrateDBBulkLoaderField vbf : fields) { + items.add(vbf.getStreamField()); + } + } + return items; + } +} diff --git a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/BulkImportClient.java b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/BulkImportClient.java new file mode 100644 index 00000000000..6cc7fd1881c --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/BulkImportClient.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader.http; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.hop.core.exception.HopException; +import org.apache.hop.pipeline.transforms.cratedbbulkloader.http.exceptions.CrateDBHopException; +import org.apache.hop.pipeline.transforms.cratedbbulkloader.http.exceptions.UnauthorizedCrateDBAccessException; + +public class BulkImportClient { + + private final String httpEndpoint; + private final String authorizationHeader; + + public BulkImportClient(String httpEndpoint, String username, String password) { + this.httpEndpoint = httpEndpoint; + this.authorizationHeader = header(username, password); + } + + private String header(String username, String password) { + String auth = username + ":" + password; + String encodedAuth = Base64.getEncoder().encodeToString(auth.getBytes()); + String authHeader = "Basic " + encodedAuth; + return authHeader; + } + + public HttpBulkImportResponse batchInsert( + String schema, String table, String[] columns, List args) + throws HopException, CrateDBHopException, JsonProcessingException { + ObjectMapper mapper = new ObjectMapper(); + BulkImportRequest body = new BulkImportRequest(schema + "." + table, List.of(columns), args); + HttpRequest request = + HttpRequest.newBuilder() + .uri(URI.create(httpEndpoint)) + .header("Content-Type", "application/json") + .header("Accept", "application/json") + .header("Authorization", authorizationHeader) + .POST( + HttpRequest.BodyPublishers.ofString( + mapper.writeValueAsString(body), StandardCharsets.UTF_8)) + .build(); + + HttpClient client = HttpClient.newHttpClient(); + + final HttpResponse httpResponse; + try { + httpResponse = client.send(request, HttpResponse.BodyHandlers.ofString()); + } catch (IOException e) { + throw new HopException("Couldn't process the request", e); + } catch (InterruptedException e) { + throw new HopException("Couldn't process the request", e); + } + switch (httpResponse.statusCode()) { + case 200: + return HttpBulkImportResponse.fromHttpResponse(httpResponse); + case 401: // For crateDB there's no difference between 401 and 403, it always returns 401, and + // then the internal error code helps to distinguish between the two. + throw new UnauthorizedCrateDBAccessException(httpResponse.body()); + default: + throw new CrateDBHopException(httpResponse.statusCode(), httpResponse.body()); + } + } + + private static class BulkImportRequest { + private String stmt; + private List bulkArgs; + + public BulkImportRequest(String table, List columns, List bulkArgs) { + final String argsPlaceholders = + columns.stream().map(c -> "?").collect(Collectors.joining(", ")); + this.stmt = + "INSERT INTO " + + table + + " (" + + String.join(", ", columns) + + ") VALUES (" + + argsPlaceholders + + ")"; + this.bulkArgs = bulkArgs; + } + + public String getStmt() { + return stmt; + } + + @JsonProperty("bulk_args") + public List getBulkArgs() { + return bulkArgs; + } + } +} diff --git a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/CrateDBHttpResponse.java b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/CrateDBHttpResponse.java new file mode 100644 index 00000000000..35f5b9d576c --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/CrateDBHttpResponse.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader.http; + +import java.util.ArrayList; +import java.util.List; + +public class CrateDBHttpResponse { + + List results; + Float duration; + + public CrateDBHttpResponse() { + results = new ArrayList<>(); + } + + public Float getDuration() { + return duration; + } + + public List getResults() { + return results; + } +} diff --git a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/HttpBulkImportResponse.java b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/HttpBulkImportResponse.java new file mode 100644 index 00000000000..376063cbb3a --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/HttpBulkImportResponse.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader.http; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.net.http.HttpResponse; +import org.apache.hop.core.exception.HopException; + +public class HttpBulkImportResponse { + + private final long outputRows; + + private final int statusCode; + + private final long rejectedRows; + + private HttpBulkImportResponse(int statusCode, Long outputRows, Long rejectedRows) { + this.statusCode = statusCode; + this.outputRows = outputRows; + this.rejectedRows = rejectedRows; + } + + public static HttpBulkImportResponse fromHttpResponse(HttpResponse response) + throws HopException { + ObjectMapper mapper = + new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + + try { + CrateDBHttpResponse crateDBHttpResponse = + mapper.readValue(response.body(), CrateDBHttpResponse.class); + long outputRows = + crateDBHttpResponse.results.parallelStream() + .filter(r -> r.getRowCount() > 0) + .mapToInt(RowMetrics::getRowCount) + .sum(); + long rejectedRows = + crateDBHttpResponse.results.parallelStream() + .filter(r -> r.getRowCount() < 0) + .mapToInt(RowMetrics::getRowCount) + .count(); + return new HttpBulkImportResponse(response.statusCode(), outputRows, rejectedRows); + } catch (JsonProcessingException e) { + throw new HopException("Unable to parse body of response", e); + } + } + + public long outputRows() { + return outputRows; + } + + public int statusCode() { + return statusCode; + } + + public long rejectedRows() { + return rejectedRows; + } +} diff --git a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/HttpImportMetrics.java b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/HttpImportMetrics.java new file mode 100644 index 00000000000..684fb293f6b --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/HttpImportMetrics.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader.http; + +public class HttpImportMetrics { + private int rows; + private int bytes; + private int duration; + + public HttpImportMetrics(int rows, int bytes, int duration) { + this.rows = rows; + this.bytes = bytes; + this.duration = duration; + } + + public int getRows() { + return rows; + } + + public int getBytes() { + return bytes; + } + + public int getDuration() { + return duration; + } +} diff --git a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/RowMetrics.java b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/RowMetrics.java new file mode 100644 index 00000000000..a046b107d1d --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/RowMetrics.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader.http; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class RowMetrics { + + @JsonProperty("rowcount") + private int rowCount; + + public RowMetrics() { + // Empty constructor needed for Jackson + } + + public int getRowCount() { + return rowCount; + } +} diff --git a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/exceptions/CrateDBHopException.java b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/exceptions/CrateDBHopException.java new file mode 100644 index 00000000000..55c5bc1e561 --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/exceptions/CrateDBHopException.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader.http.exceptions; + +public class CrateDBHopException extends Exception { + public CrateDBHopException(int statusCode, String body) {} + + public CrateDBHopException(String message, int statusCode, String body) { + super(String.format("%s: %d %s", message, statusCode, body)); + } +} diff --git a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/exceptions/UnauthorizedCrateDBAccessException.java b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/exceptions/UnauthorizedCrateDBAccessException.java new file mode 100644 index 00000000000..fd4c3f62d29 --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/exceptions/UnauthorizedCrateDBAccessException.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader.http.exceptions; + +public class UnauthorizedCrateDBAccessException extends CrateDBHopException { + public UnauthorizedCrateDBAccessException(String body) { + super("Unauthorized CrateDB access", 401, body); + } +} diff --git a/plugins/transforms/cratedbbulkloader/src/main/resources/CrateDBBulkLoader.svg b/plugins/transforms/cratedbbulkloader/src/main/resources/CrateDBBulkLoader.svg new file mode 100644 index 00000000000..3be557b9798 --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/resources/CrateDBBulkLoader.svg @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + diff --git a/plugins/transforms/cratedbbulkloader/src/main/resources/org/apache/hop/pipeline/transforms/cratedbbulkloader/messages/messages_en_US.properties b/plugins/transforms/cratedbbulkloader/src/main/resources/org/apache/hop/pipeline/transforms/cratedbbulkloader/messages/messages_en_US.properties new file mode 100644 index 00000000000..5b9ec205316 --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/resources/org/apache/hop/pipeline/transforms/cratedbbulkloader/messages/messages_en_US.properties @@ -0,0 +1,125 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +BaseTransform.TypeLongDesc.CrateDBBulkLoaderMessage=CrateDB bulk loader +BaseTransform.TypeTooltipDesc.CrateDBBulkLoaderMessage=Bulk load data into a CrateDB database table + +CrateDBBulkLoaderDialog.MainTab.TabItem.Label=General +CrateDBBulkLoaderDialog.AWSTab.TabItem.Label=AWS Authentication +CrateDBBulkLoaderDialog.HttpTab.TabItem.Label=HTTP Authentication +CrateDBBulkLoaderDialog.FieldsTab.TabItem.Label=Fields + +CrateDBBulkLoaderDialog.AvailableSchemas.Title=Available schemas +CrateDBBulkLoaderDialog.AvailableSchemas.Message=Please select a schema name +CrateDBBulkLoaderDialog.NoSchema.Error=There is no schema available. +CrateDBBulkLoaderDialog.GetSchemas.Error=ERROR +CrateDBBulkLoaderDialog.ErrorGettingSchemas=Error getting schemas + +CrateDBBulkLoaderDialog.StreamCsvToS3.Label=Stream to S3 +CrateDBBulkLoaderDialog.StreamCsvToS3.Tooltip=Writes the current pipeline stream to a CSV file in an S3 bucket before copying into CrateDB. +CrateDBBulkLoaderDialog.ReadFromFile.Label=Read from file name/path +CrateDBBulkLoaderDialog.ReadFromFile.Tooltip=Name of the file from which data will be read. Is the ''Stream to S3'' flag is checked,\n the file will be created by the transform itself otherwise we expect to find the file\n already available on that S3 path. +CrateDBBulkLoader.Connection.Connected=Connected to database {0} +CrateDBBulkLoaderDialog.AuthenticationType.Label=Authentication type +CrateDBBulkLoaderDialog.Authenticate.UseSystemVars.Label=Use AWS system variables +CrateDBBulkLoaderDialog.Authenticate.UseSystemVars.Tooltip=specify whether you want to use the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment (operating system) variables, or specify values for this transform only + +CrateDBBulkLoaderDialog.HttpPassword.Label=HTTP Password +CrateDBBulkLoaderDialog.HttpPassword.Tooltip=HTTP password used to authenticate the call +CrateDBBulkLoaderDialog.HttpLogin.Label=HTTP Login +CrateDBBulkLoaderDialog.HttpLogin.Tooltip=HTTP login used to authenticate the call + + +CrateDBBulkLoader.Exception.FailedToFindField=Could not find field {0} in stream +CrateDBBulkLoader.Exception.FieldRequired=Field [{0}] is required and couldn''t be found\! +CrateDBBulkLoaderDialog.BuildSQLError.DialogMessage=Unable to build the SQL statement because of an error +CrateDBBulkLoaderDialog.BuildSQLError.DialogTitle=Couldn''t build SQL +CrateDBBulkLoaderDialog.ColumnInfo.StreamField=Stream field +CrateDBBulkLoaderDialog.ColumnInfo.TableField=Table field +CrateDBBulkLoaderDialog.ConnectionError.DialogMessage=Please select a valid connection\! +CrateDBBulkLoaderDialog.ConnectionError2.DialogMessage=Please select a valid database connection first\! +CrateDBBulkLoaderDialog.DialogTitle=CrateDB bulk loader +CrateDBBulkLoaderDialog.DoMapping.Button=Enter field mapping +CrateDBBulkLoaderDialog.DoMapping.SomeFieldsNotFoundContinue=Certain fields could not be found in the existing mapping, do you want continue? +CrateDBBulkLoaderDialog.DoMapping.SomeFieldsNotFoundTitle=Certain referenced fields were not found\! +CrateDBBulkLoaderDialog.DoMapping.SomeSourceFieldsNotFound=These source fields were not found\\\: {0} +CrateDBBulkLoaderDialog.DoMapping.SomeTargetFieldsNotFound=These target fields were not found\\\: {0} +CrateDBBulkLoaderDialog.DoMapping.UnableToFindSourceFields.Message=It was not possible to retrieve the source fields for this transform because of an error\: +CrateDBBulkLoaderDialog.DoMapping.UnableToFindSourceFields.Title=Error getting source fields +CrateDBBulkLoaderDialog.DoMapping.UnableToFindTargetFields.Message=It was not possible to retrieve the target fields for this transform because of an error\: +CrateDBBulkLoaderDialog.DoMapping.UnableToFindTargetFields.Title=Error getting target fields +CrateDBBulkLoaderDialog.FailedToFindField.Message=Could not find field {0} in stream +CrateDBBulkLoaderDialog.FailedToGetFields.DialogMessage=Unable to get fields from previous transforms because of an error +CrateDBBulkLoaderDialog.FailedToGetFields.DialogTitle=Get fields failed +CrateDBBulkLoaderDialog.GetFields.Button=\ &Get fields +CrateDBBulkLoaderDialog.InsertFields.Label=Fields to insert\: +CrateDBBulkLoaderDialog.Log.LookingAtConnection=Looking at connection\: {0} + +CrateDBBulkLoaderDialog.NoSQL.DialogMessage=No SQL needs to be executed to make this transform function properly. +CrateDBBulkLoaderDialog.NoSQL.DialogTitle=OK +CrateDBBulkLoaderDialog.SpecifyFields.Label=Specify database fields +CrateDBBulkLoaderDialog.UseHTTPEndpoint.Label=Use HTTP Endpoint +CrateDBBulkLoaderDialog.HTTPEndpoint.Label=HTTP Endpoint +CrateDBBulkLoaderDialog.HTTPEndpointBatchSize.Label=Batch size +CrateDBBulkLoaderDialog.TargetSchema.Label=Target schema +CrateDBBulkLoaderDialog.IAMRole.Label=IAM Role +CrateDBBulkLoaderDialog.TargetTable.Label=Target table +CrateDBBulkLoaderMeta.CheckResult.AllFieldsFound=All fields in the table are found in the input stream, coming from previous transforms +CrateDBBulkLoaderMeta.CheckResult.AllFieldsFoundInOutput=All fields, coming from previous transforms, are found in the output table +CrateDBBulkLoaderMeta.CheckResult.ConnectionExists=Connection exists +CrateDBBulkLoaderMeta.CheckResult.ConnectionOk=Connection to database OK +CrateDBBulkLoaderMeta.CheckResult.ExpectedInputError=No input received from other transforms\! +CrateDBBulkLoaderMeta.CheckResult.ExpectedInputOk=Transform is receiving info from other transforms. +CrateDBBulkLoaderMeta.CheckResult.FieldsNotFound=Fields in table, not found in input stream\:\n\n{0} +CrateDBBulkLoaderMeta.CheckResult.FieldsNotFoundInOutput=Fields in input stream, not found in output table\:\n\n{0} +CrateDBBulkLoaderMeta.CheckResult.FieldsReceived=Transform is connected to previous one, receiving {0} fields +CrateDBBulkLoaderMeta.CheckResult.FieldsSpecifiedNotFound=Specified fields not found in input stream\:\n\n{0} +CrateDBBulkLoaderMeta.CheckResult.FieldsSpecifiedNotInTable=Specified table fields not found in output table\:\n\n{0} +CrateDBBulkLoaderMeta.CheckResult.NoConnection=Please select or create a connection to use +CrateDBBulkLoaderMeta.CheckResult.NoFields=Couldn''t find fields from previous transforms, check the hops...\! +CrateDBBulkLoaderMeta.CheckResult.NoTableName=No table name was entered in this transform. +CrateDBBulkLoaderMeta.CheckResult.TableAccessible=Table [{0}] exists and is accessible +CrateDBBulkLoaderMeta.CheckResult.TableError=Table [{0}] doesn''t exist or can''t be read on this database connection. +CrateDBBulkLoaderMeta.CheckResult.TableNotAccessible=Couldn''t read the table info, please check the table-name & permissions. +CrateDBBulkLoaderMeta.CheckResult.TableOk=Table [{0}] is readeable and we got the fields from it. +CrateDBBulkLoaderMeta.CheckResult.UndefinedError=An error occurred\: {0} +CrateDBBulkLoaderMeta.Error.ErrorConnecting=I was unable to connect to the database to verify the status of the table\: {0} +CrateDBBulkLoaderMeta.Error.NoConnection=There is no connection defined in this transform. +CrateDBBulkLoaderMeta.Error.NoInput=Not receiving any fields from previous transforms. Check the previous transforms for errors & the connecting hops. +CrateDBBulkLoaderMeta.Error.NoTable=No table is defined on this connection. +CrateDBBulkLoaderMeta.Exception.ConnectionNotDefined=Unable to determine the required fields because the database connection wasn''t defined. +CrateDBBulkLoaderMeta.Exception.ErrorGettingFields=Unable to determine the required fields. +CrateDBBulkLoaderMeta.Exception.TableNotFound=Unable to determine the required fields because the specified database table couldn''t be found. +CrateDBBulkLoaderMeta.Exception.TableNotSpecified=Unable to determine the required fields because the database table name wasn''t specified. +CrateDBBulkLoader.Injection.CONNECTIONNAME=The name of the database connection to get table names from. +CrateDBBulkLoader.Injection.FIELDS=Fields +CrateDBBulkLoader.Injection.SCHEMANAME=The name of the database schema to use. +CrateDBBulkLoader.Injection.TABLENAME=The name of the table to insert records into. +CrateDBBulkLoader.Injection.MAIN_OPTIONS=Main Options +CrateDBBulkLoader.Injection.DIRECT=Set this option to insert data into the Read Optimized Store with a COPY DIRECT statement. +CrateDBBulkLoader.Injection.ABORTONERROR=Set this option to abort and rollback data loading upon an error. +CrateDBBulkLoader.Injection.EXCEPTIONSFILENAME=The optional filename to write messages about rejected records. +CrateDBBulkLoader.Injection.REJECTEDDATAFILENAME=The optional filename to write the rejected rows of data. +CrateDBBulkLoader.Injection.STREAMNAME=The optional name of the stream which appears in the vt_load_stream table. +CrateDBBulkLoader.Injection.DATABASE_FIELDS=Fields +CrateDBBulkLoader.Injection.FIELDSTREAM=The source field names containing the values to insert. +CrateDBBulkLoader.Injection.FIELDDATABASE=The target field names to insert into the CrateDB table. +CrateDBBulkLoaderDialog.TruncateTable.Label=Truncate table +CrateDBBulkLoaderDialog.OnlyWhenHaveRows.Label=Truncate on first row +CrateDBBulkLoader.Injection.TruncateTable.Field=Truncate table +CrateDBBulkLoader.Inject.OnlyWhenHaveRows.Field=Truncate on first row diff --git a/plugins/transforms/cratedbbulkloader/src/main/resources/org/apache/hop/pipeline/transforms/cratedbbulkloader/messages/messages_it_IT.properties b/plugins/transforms/cratedbbulkloader/src/main/resources/org/apache/hop/pipeline/transforms/cratedbbulkloader/messages/messages_it_IT.properties new file mode 100644 index 00000000000..1692cdb4726 --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/main/resources/org/apache/hop/pipeline/transforms/cratedbbulkloader/messages/messages_it_IT.properties @@ -0,0 +1,19 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + diff --git a/plugins/transforms/cratedbbulkloader/src/test/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/BulkImportClientIT.java b/plugins/transforms/cratedbbulkloader/src/test/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/BulkImportClientIT.java new file mode 100644 index 00000000000..82d79065e9d --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/test/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/BulkImportClientIT.java @@ -0,0 +1,163 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader.http; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; + +import com.fasterxml.jackson.core.JsonProcessingException; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.List; +import org.apache.hop.core.exception.HopException; +import org.apache.hop.pipeline.transforms.cratedbbulkloader.http.exceptions.CrateDBHopException; +import org.apache.hop.pipeline.transforms.cratedbbulkloader.http.exceptions.UnauthorizedCrateDBAccessException; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.testcontainers.cratedb.CrateDBContainer; +import org.testcontainers.utility.MountableFile; + +public class BulkImportClientIT { + + private static String CRATE_ENDPOINT; + + private static Connection connection; + + @ClassRule + public static CrateDBContainer crateDBContainer = + new CrateDBContainer("crate") + .withCopyFileToContainer( + MountableFile.forClasspathResource("crate.yml"), "/crate/config/crate.yml") + .withExposedPorts(4200, 5432); + + @BeforeClass + public static void setupAll() throws SQLException { + CRATE_ENDPOINT = + "http://" + + crateDBContainer.getHost() + + ":" + + crateDBContainer.getMappedPort(4200) + + "/_sql"; + connection = crateDBContainer.createConnection(""); + + connection + .createStatement() + .execute("CREATE TABLE crate.foo (id INT PRIMARY KEY, name VARCHAR(10), description TEXT)"); + + connection.createStatement().execute("CREATE USER alice WITH (password='password')"); + connection.createStatement().execute("GRANT ALL PRIVILEGES TO alice"); + + connection.createStatement().execute("CREATE USER bob WITH (password='password')"); + connection.createStatement().execute("GRANT DQL ON SCHEMA crate TO bob"); + } + + @Test + public void whenDataSizeGreaterThanMaxSize_shouldReturnRejectedRows() + throws HopException, JsonProcessingException, CrateDBHopException { + BulkImportClient client = new BulkImportClient(CRATE_ENDPOINT, "alice", "password"); + + var response = + client.batchInsert( + "crate", + "foo", + new String[] {"id", "name", "description"}, + List.of( + new Object[] {1, "Very Long Name", "This is Alice"}, + new Object[] {2, "Bob", "This is Bob"})); + + assertEquals(200, response.statusCode()); + assertEquals(0, response.outputRows()); + assertEquals(2, response.rejectedRows()); + } + + @Test + public void whenRequestIsValid_shouldReturn200AndResult() + throws HopException, JsonProcessingException, SQLException, CrateDBHopException { + BulkImportClient client = new BulkImportClient(CRATE_ENDPOINT, "alice", "password"); + HttpBulkImportResponse response = + client.batchInsert( + "crate", + "foo", + new String[] {"id", "name", "description"}, + List.of( + new Object[] {1, "Alice", "This is Alice"}, + new Object[] {2, "Bob", "This is Bob"})); + + connection.createStatement().execute("REFRESH TABLE crate.foo"); + ResultSet rs = connection.createStatement().executeQuery("SELECT * FROM crate.foo"); + + int size = 0; + while (rs.next()) { + size++; + } + assertEquals(200, response.statusCode()); + assertEquals(3, rs.getMetaData().getColumnCount()); + assertEquals(2, size); + } + + @Test + public void whenWrongPassword_shouldThrowUnauthorizedException() + throws HopException, JsonProcessingException, SQLException, CrateDBHopException { + BulkImportClient client = new BulkImportClient(CRATE_ENDPOINT, "alice", "wrongpassword"); + + assertThrows( + UnauthorizedCrateDBAccessException.class, + () -> + client.batchInsert( + "crate", + "foo", + new String[] {"id", "name", "description"}, + List.of( + new Object[] {1, "Alice", "This is Alice"}, + new Object[] {2, "Bob", "This is Bob"}))); + } + + @Test + public void whenWrongUser_shouldThrowUnauthorizedException() { + BulkImportClient client = new BulkImportClient(CRATE_ENDPOINT, "charlie", "apassword"); + + assertThrows( + UnauthorizedCrateDBAccessException.class, + () -> + client.batchInsert( + "crate", + "foo", + new String[] {"id", "name", "description"}, + List.of( + new Object[] {1, "Alice", "This is Alice"}, + new Object[] {2, "Bob", "This is Bob"}))); + } + + @Test + public void whenUserNotAuthorized_shouldThrowUnauthorizedInsteadOfForbidden() { + BulkImportClient client = new BulkImportClient(CRATE_ENDPOINT, "bob", "password"); + + assertThrows( + UnauthorizedCrateDBAccessException.class, + () -> + client.batchInsert( + "crate", + "foo", + new String[] {"id", "name", "description"}, + List.of( + new Object[] {1, "Alice", "This is Alice"}, + new Object[] {2, "Bob", "This is Bob"}))); + } +} diff --git a/plugins/transforms/cratedbbulkloader/src/test/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/HttpBulkImportResponseTest.java b/plugins/transforms/cratedbbulkloader/src/test/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/HttpBulkImportResponseTest.java new file mode 100644 index 00000000000..d9d7afe97a8 --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/test/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/http/HttpBulkImportResponseTest.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hop.pipeline.transforms.cratedbbulkloader.http; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; +import static org.mockito.Mockito.when; + +import java.net.http.HttpResponse; +import org.apache.hop.core.exception.HopException; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; + +public class HttpBulkImportResponseTest { + + private static final String MOCK_ENDPOINT = "http://localhost:4200/_sql"; + private static final String MOCK_USER = "alice"; + private static final String MOCK_PASSWORD = "password"; + + private HttpResponse mockResponse; + private CrateDBHttpResponse crateDBHttpResponse; + private BulkImportClient client; + + @Before + public void setup() { + mockResponse = Mockito.mock(HttpResponse.class); + crateDBHttpResponse = new CrateDBHttpResponse(); + client = new BulkImportClient(MOCK_ENDPOINT, MOCK_USER, MOCK_PASSWORD); + } + + @Test + public void shouldCalculateOutputRows() throws HopException { + String responseBody = + "{\"cols\":[],\"duration\":20.493383,\"results\":[{\"rowcount\": 1},{\"rowcount\": 1},{\"rowcount\": 1},{\"rowcount\": 1},{\"rowcount\": 1}]}"; + when(mockResponse.body()).thenReturn(responseBody); + when(mockResponse.statusCode()).thenReturn(200); + + HttpBulkImportResponse httpBulkImportResponse = + HttpBulkImportResponse.fromHttpResponse(mockResponse); + + assertEquals(200, httpBulkImportResponse.statusCode()); + assertEquals(5, httpBulkImportResponse.outputRows()); + } + + @Test + public void shouldCountRejectedRows() throws HopException { + String responseBody = + "{\"cols\":[],\"duration\":20.493383,\"results\":[{\"rowcount\": 1},{\"rowcount\": 1},{\"rowcount\": 1},{\"rowcount\": 1},{\"rowcount\":-2}]}"; + // as per documentation: rowcount == 1 means imported row, while rowcount == -2 means rejected + // row + when(mockResponse.body()).thenReturn(responseBody); + when(mockResponse.statusCode()).thenReturn(200); + + HttpBulkImportResponse httpBulkImportResponse = + HttpBulkImportResponse.fromHttpResponse(mockResponse); + + assertEquals(200, httpBulkImportResponse.statusCode()); + assertEquals(4, httpBulkImportResponse.outputRows()); + assertEquals(1, httpBulkImportResponse.rejectedRows()); + } + + @Test + public void shouldReturnZeroOutputRowsWhenEmpty() throws HopException { + String emptyResultsResponseBody = "{\"cols\":[],\"duration\":20.493383,\"results\":[]}"; + when(mockResponse.statusCode()).thenReturn(200); + when(mockResponse.body()).thenReturn(emptyResultsResponseBody); + + HttpBulkImportResponse httpBulkImportResponse = + HttpBulkImportResponse.fromHttpResponse(mockResponse); + + assertEquals(0, httpBulkImportResponse.outputRows()); + } + + @Test + public void shouldThrowRuntimeExceptionWhenInvalidJson() { + when(mockResponse.body()).thenReturn("invalid json"); + + assertThrows(HopException.class, () -> HttpBulkImportResponse.fromHttpResponse(mockResponse)); + } +} diff --git a/plugins/transforms/cratedbbulkloader/src/test/resources/crate.yml b/plugins/transforms/cratedbbulkloader/src/test/resources/crate.yml new file mode 100644 index 00000000000..ceef64391e8 --- /dev/null +++ b/plugins/transforms/cratedbbulkloader/src/test/resources/crate.yml @@ -0,0 +1,36 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +auth: + host_based: + enabled: true + config: + 0: + user: crate + # address: _local_ + method: trust + 99: + method: password + +network.host: _local_,_site_ + +# Paths +path: + logs: /data/log + data: /data/data +blobs: + path: /data/blobs diff --git a/plugins/transforms/pom.xml b/plugins/transforms/pom.xml index 0b2f7bdbec1..76cfdbe1d32 100644 --- a/plugins/transforms/pom.xml +++ b/plugins/transforms/pom.xml @@ -78,6 +78,7 @@ combinationlookup concatfields constant + cratedbbulkloader creditcardvalidator cubeinput cubeoutput