Compare commits

..

446 Commits

Author SHA1 Message Date
zshen-talend
7b8c60f92a fix(TDQ-20754): revert code to keep project is lowercase (#7742) 2022-07-08 16:17:45 +08:00
Dmytro Grygorenko
0442b1a092 chore(TDI-48185): backport leftovers for "nimbus-jose" update. (#7718) 2022-07-04 11:48:27 +03:00
Dmytro Grygorenko
2cbde0756f chore(TDI-48134): backport "nimbus-jose" lib update. (#7714) 2022-07-04 11:12:58 +03:00
Abdulhamid Rashankar
4543d6a129 APPINT-34489 : profile added for prometheus dependencies (#7700) 2022-06-29 13:56:08 +02:00
zshen-talend
a61882573f fix(TDQ-19623): fix junit about BuildJobHandlerTest.testBuildJobWithTDM (#7692) (#7697) 2022-06-29 13:04:31 +08:00
kjwang
748e0d218f Kjwang/fix tup 35887 npe 73 (#7691)
"build job" working in 7.2.1 fails in 8.0.1: Failed to generate code :
NPE org.talend.designer.dbmap.language.oracle.OracleGenerationManager.addQuoteForSpecialChar
https://jira.talendforge.org/browse/TUP-35887
2022-06-29 11:38:02 +08:00
bhe-talendbj
0752bfd4fa fix(TUP-35942): update relevant job only after renaming joblet (#7678) 2022-06-29 11:20:29 +08:00
Jill Yan
503e187f96 APPINT-34694 build routlet module (#7674)
Co-authored-by: jillyan <yan955599@gmail.com>
2022-06-29 10:48:36 +08:00
zshen-talend
09bdc7e42d fix(TDQ-19623): make sure project name never force lowercase (#7659) 2022-06-17 10:13:47 +08:00
zshen-talend
885fe944f5 fix(TDQ-19623): make sure build job action work for reference project (#7653) 2022-06-15 18:17:34 +08:00
apoltavtsev
2087cdfe68 fix(APPINT-34416) Migrate to CXF 3.4.7 (#7626) 2022-06-07 15:42:14 +02:00
Oleksandr Zhelezniak
81345dd1b0 fix(TDI-47895): oracle input float length (#7545)
* force to use 38 as length for NUMBER (FLOAT) in oracle when getPrecision returns value bigger than 38
* getPrecision for Float Oracle returns 126.
* It's expected binary precision for FLOAT Oracle type. The decimal precision for it is 38.
2022-06-07 09:43:50 +03:00
sbieliaievl
d2982b93e7 bugfix(APPINT-33830) - migration for demos (#7600)
* Added rename timeout parameters for tRESTClient migration

* Added rename timeout parameters for tRESTClient migration

* Comment changes for RenameTimeoutParameterstRESTClient

* Code refactoring in RenameTimeoutParameterstRESTClient
2022-06-06 09:54:00 +03:00
wang wei
b2ce9e2ee2 fix(TDI-48037): CVE: org.postgresql:postgresql:42.2.14 (#7574) 2022-06-02 16:37:38 +08:00
Jill Yan
25d0a3c978 jill/fix-m73/APPINT-34618 add mssql driver to import-package for child job (#7606)
* APPINT-34618 add mssql driver to import-package for child job

* APPINT-34618 remove mssql-jdbc in build lib

Co-authored-by: jillyan <yan955599@gmail.com>
2022-06-02 14:36:39 +08:00
sbliu
1d4e07ebd0 chore(TUP-35245) upgrade cxf to 3.4.7 (#7475) 2022-06-02 09:30:45 +08:00
wang wei
2a591f79ce fix(TDI-47803): Compile error after applying Patch_20220107_R2022-01_v1-7.3.1 (#7474) 2022-06-01 11:29:36 +08:00
sbliu
30d42547e5 chore(TUP-35684) upgrade xmlbeans. (#7590) 2022-05-31 16:12:28 +08:00
bhe-talendbj
fe468cbf15 chore(TUP-35685): upgrade database driver (#7575) 2022-05-31 11:39:04 +08:00
vyu-talend
ce2b399fe4 chore(TDI-47796):upgrade cxf to 3.4.7 (#7570)
* fix(TDI-47796):backport talend-ws from master to 7.3 to align

* chore(TDI-47796):bump cxf version to 3.4.7

* chore(TDI-47796):upgrade talend-ws version for twebservice comps
2022-05-30 15:51:59 +08:00
Chao MENG
55aba706f6 Cmeng/fix/tup 35608 db version 7 (#7572)
* fix(TUP-35608): Stat and Logs screen - Database connection dropdown
incorrect
https://jira.talendforge.org/browse/TUP-35608

* fix(TUP-35608): Stat and Logs screen - Database connection dropdown
incorrect
https://jira.talendforge.org/browse/TUP-35608
2022-05-26 19:45:11 +08:00
Zhiwei Xue
4ee53f979e fix(TUP-31233):Routine pom is not updated after installed the required (#7547)
* fix(TUP-31233):Routine pom is not updated after installed the required
jar of user routine

* fix(TUP-31233):Routine pom is not updated after installed the required
jar of user routine

* fix(TUP-31233):Routine pom is not updated after installed the required
jar of user routine
2022-05-25 18:04:51 +08:00
Dmytro Sylaiev
51142f7983 fix(TDI-47452): Fix codegen for not valid xpath expression (#7175) 2022-05-16 12:50:26 +03:00
jiezhang-tlnd
e2697a8226 fix(TUP-33047)Help links in Studio (#7472) 2022-05-09 10:38:41 +08:00
sponomarova
86eb6ab2fe fix(TBD-13798): CVE: Replace log4j1.x by reload4j for org.talend.libraries.hadoop.mapr.5.0.0 (#7450) 2022-05-05 16:56:39 +03:00
Emmanuel GALLOIS
1690847460 fix(TCOMP-2045): master backport (#7495) 2022-05-05 09:33:02 +02:00
wang wei
461258adc9 fix(TDI-47819): tJDBCInput component results in error with Dynamic schema inTalend latest versions. (#7426) 2022-05-05 13:44:11 +08:00
Emmanuel GALLOIS
3e4d33ab73 feat(TDI-47764): bump component-runtime to 1.38.6 (#7384)
* feat(TDI-47764): Bump component-runtime to 1.38.6

* feat(TDI-47764): Bump component-runtime to 1.38.6

* remove snapshot

Co-authored-by: jzhao-talend <jzhao@talend.com>
2022-05-05 10:19:29 +08:00
Oleksandr Zhelezniak
962e6b754b chore(TDI-47688): bump jackson (#7385)
* jackson: 2.11.3 -> 2.13.2
* jackson-databind: 2.11.3 -> 2.13.2.2
2022-04-29 15:24:30 +03:00
Emmanuel GALLOIS
302b9a4921 feat(TCOMP-2045): Pass meta information about columns (#7100) 2022-04-29 12:56:31 +02:00
pyzhou
02f5d99a23 Pyzhou/tdi 43829 zip4j2 tfile archive 7.3 (#7478)
* feat(TDI-43829): Add support for zip4j 2.x libraries to tFileArchive/unArchive components

# Conflicts:
#	main/plugins/org.talend.designer.components.libs/libs_src/talendzip/pom.xml

# Conflicts:
#	main/plugins/org.talend.designer.components.localprovider/components/tFileUnarchive/tFileUnarchive_java.xml

* fix(TDI-43829): move IntegrityUtil.java

# Conflicts:
#	main/plugins/org.talend.designer.components.libs/libs_src/checkArchive/pom.xml
2022-04-28 16:45:02 +08:00
hcyi
a34a1fb6dc fix(TUP-35078):tJDBCSCDELT does not work correctly on Snowflake. (#7397)
* fix(TUP-35078):tJDBCSCDELT does not work correctly on Snowflake.

* fix(TUP-35078):tJDBCSCDELT does not work correctly on Snowflake.

* fix(TUP-35078):add junits
2022-04-25 17:18:57 +08:00
Dmytro Sylaiev
5c2049fb1b fix(TDI-47802): Fix short strings as Clobs (#7425)
* Add warning for long strings as varchar
2022-04-20 20:43:13 +03:00
vyu-talend
ddafd9607d fix(TDI-46584):modify limit label. (#7458) 2022-04-20 18:11:00 +08:00
Chao MENG
12339ad3ab chore: fix swtbot issue (#7448) 2022-04-19 11:12:53 +08:00
Jane Ding
76c8733bad fix(TUP-35238):Drag and drop Redshift metadata to list components lost (#7418)
some components
https://jira.talendforge.org/browse/TUP-35238
2022-04-15 15:47:19 +08:00
pyzhou
50a9456491 feat(TDI-47687):tScp reuse session (#7417)
* feat(TDI-47687):tScp reuse session

* feat(TDI-47687): tSCPFileList

* feat(TDI-47687): tSCPFileList compile error
2022-04-12 11:44:53 +08:00
wang wei
bbb82ade0d fix(TDI-46584): tServiceNowInput Limit disable option in Advanced settings not working (#6717)
* fix(TDI-46584): tServiceNowInput Limit disable option in Advanced
settings not working

* fix something

* fix more

* fix(TDI-46584):fix the job won't stop issue.

* fix(TDI-46584):fix always reading-all-records issue.

Co-authored-by: vyu <vyu@talend.com>
2022-04-11 14:25:29 +08:00
Chao MENG
b5e8c7d605 fix(TUP-35034): CI - stop the build if custom component is missing
https://jira.talendforge.org/browse/TUP-35034
2022-04-07 20:13:35 +08:00
Chao MENG
97bc429216 fix(TUP-35034): CI - stop the build if custom component is missing (#7402)
https://jira.talendforge.org/browse/TUP-35034
2022-04-07 16:34:48 +08:00
clesaec
0d0d0bc9db TDI-47287 : change ftpput (#7352) 2022-04-04 07:37:58 +02:00
Chao MENG
ecd65bd27d fix(TUP-35034): CI - stop the build if custom component is missing
https://jira.talendforge.org/browse/TUP-35034
2022-04-01 19:07:51 +08:00
wwang-talend
95614a456c fix(TDI-47536): CVE : logback 2022-03-31 08:31:33 +08:00
wang wei
45fd8866a1 fix(TDI-47633): CVE: Replace log4j1.x by reload4j or upgrade to log4j2 (#7332) 2022-03-30 18:25:40 +08:00
jiezhang-tlnd
662cb3ce5d fix(TUP-34954)CVE: Replace log4j1.x by reload4j (#7331)
* fix(TUP-34954)CVE: Replace log4j1.x by reload4j

* update junits

* exclude log4j from slf4j-log4j12
2022-03-30 17:36:25 +08:00
Chao MENG
15d8e5966f Cmeng/fix/tup 35034 err on component missing m73 (#7328)
* fix(TUP-34378): Job could build out successfully if contains component which cannot be loaded (#7286)

* fix(TUP-34378): Job could build out successfully if contains component
which cannot be loaded
https://jira.talendforge.org/browse/TUP-34378

* fix(TUP-35034): CI - stop the build if custom component is missing
https://jira.talendforge.org/browse/TUP-35034

* fix(TUP-35034): CI - stop the build if custom component is missing
https://jira.talendforge.org/browse/TUP-35034

* fix(TUP-35034): CI - stop the build if custom component is missing
https://jira.talendforge.org/browse/TUP-35034

* fix(TUP-35034): CI - stop the build if custom component is missing
https://jira.talendforge.org/browse/TUP-35034
2022-03-30 16:10:45 +08:00
bhe-talendbj
092cd497a7 feat(TUP-33809): check modulelist (#7209) (#7285)
* feat(TUP-33809): check modulelist

* feat(TUP-33809): get all sub jobs
2022-03-30 11:40:39 +08:00
Dmytro Grygorenko
b8adbf0321 fix(TDI-47670): fix Xpath "count" function for unformatted XML file. (#7355) 2022-03-29 16:33:43 +03:00
wang wei
2510f3ad4f fix(TDI-47589): "Write Excel 2007 format" leads to tFileOutputExcel poor performance(#7261) 2022-03-28 14:43:23 +08:00
Xilai Dai
00c50f158a fix(APPINT-34443) add missing import packages for tRESTClient (#7334) 2022-03-28 13:35:23 +08:00
Oleksandr Zhelezniak
f2d9ef95dc fix(TDI-47651): bump Redshift driver to 1.2.55.1083 (#7315) 2022-03-24 11:02:47 +02:00
jzhao
1a627ac736 fix(TDI-47373):update IBM MQ client lib to 9.2.4.0 (#7281) 2022-03-16 15:12:14 +08:00
pyzhou
cd0646d89c Pyzhou/tdi 47528 CVE mina sshd 7.3 (#7279)
* fix(TDI-47528):CVE mina-sshd

* upgrade talend-scp-helper version

* compile error

* compile error

* compile error

* timeout
2022-03-11 11:28:40 +08:00
Chao MENG
544eecb22d fix(TUP-30599): tMap drag and drop does not work seamlessly in Linux GUI (#7267)
as it does in Windows
https://jira.talendforge.org/browse/TUP-30599
2022-03-04 16:13:11 +08:00
wang wei
d2717e757e fix(TDI-47230): close thread pool for spring boot case (#7272) 2022-03-03 17:08:47 +08:00
pyzhou
e2ef1e4028 Revert "Revert "fix(TDI-47385): tMap error when tenary operator is used (#7188)""
This reverts commit b7bea1900e.
2022-02-28 21:18:02 +08:00
Laurent BOURGEOIS
074579381d chore(TBD-13325):CVE on protobuf-java-2.5.0 (#7179) 2022-02-28 10:29:59 +01:00
pyzhou
b7bea1900e Revert "fix(TDI-47385): tMap error when tenary operator is used (#7188)"
This reverts commit cbe2bb000d.
2022-02-28 16:09:46 +08:00
Dmytro Ochkas
3859b1e957 fix(TDI-47385): tMap error when tenary operator is used (#7188)
Add parentheses so that user don't add them manually
2022-02-28 15:59:52 +08:00
Liu Xinquan
74d2a8410e fix(TBD-13419) Class not found happen in spark job calling a standard… (#7163) (#7237) 2022-02-28 14:53:03 +08:00
sbliu
1f7a4875ef fix(TUP-34769) add junit (#7253) 2022-02-28 14:25:29 +08:00
wwang-talend
637aa45e61 fix(TDI-47230): use fixedthreadpool for TDM 2022-02-28 10:45:00 +08:00
hcyi
75be28c5b1 fix(TUP-34593):Cannot connect to Salesforce with password starting with double-quotes (#7244)
* fix(TUP-34593):Cannot connect to Salesforce with password starting with
double-quotes

* fix(TUP-34593):Cannot connect to Salesforce with password starting with
double-quotes

* fix(TUP-34593):add junits
2022-02-25 17:52:24 +08:00
jiezhang-tlnd
00cc76bb3c fix(TUP-34609)Cannot retrieve Module Name in tSalesforceInput with (#7246)
* fix(TUP-34609)Cannot retrieve Module Name in tSalesforceInput with
password starting with double-quotes

* change condition
2022-02-25 17:48:05 +08:00
wang wei
35d710ca54 fix(TDI-47230): avoid create thread for every task for
twritexmlfield/twritejsonfield in loop
2022-02-24 14:55:43 +08:00
sbliu
c78507196c fix(TUP-34769) Error fields for Rejects get dropped from tSalesforceOutput after Sync Columns. (#7232)
fix bug TUP-33497
2022-02-23 17:56:24 +08:00
Dmytro Grygorenko
7a60d7fdbb fix(TDI-47524): replace non-dynamic column for PosgresqlOutput with dynamic schema. (#7224) 2022-02-21 10:22:28 +02:00
kjwang
e05ae031de TUP-34156 UI is broken for TCK Table structures in 8.0.1 + nighty (#7205)
TUP-34156 UI is broken for TCK Table structures in 8.0.1 + nighty
https://jira.talendforge.org/browse/TUP-34156
2022-02-17 11:47:41 +08:00
Dmytro Ochkas
3028e357b1 fix(TDI-47435): tPostgreSqlOutput 'Debug query mode' and 'Upsert' (#7167)
* Modified some DB templates so they initialize unresolved variable when dataAction is 'Upsert'
2022-02-16 19:14:13 +02:00
Dmytro Grygorenko
7dab8ce963 fix(TDI-47437): check for replaced column for Postgres Upsert action. (#7206) 2022-02-16 16:24:22 +02:00
Dmytro Ochkas
35b20bb39e fix(TDI-47436): tPostgreSqlOutput upsert and dynamic column (#7172)
* ON CONFLICT clause now contains not only dynamic columns but not dynamic ones as well
2022-02-16 16:05:20 +02:00
pyzhou
25d58e1fd2 fix(TDI-47396):tSCPGet doesn't always close session (#7134) 2022-02-14 17:53:39 +08:00
pyzhou
ea023892f0 fix(TDI-47336):Align snowflake driver (#7155) 2022-01-27 13:12:29 +08:00
wang wei
d6fd1cbe18 fix(TDI-47395): tPostgresqlOutput cannot select Insert as Action on Data when disable the Log4j(#7136) 2022-01-26 16:49:52 +08:00
bhe-talendbj
ce1fd92c74 fix(TUP-34535): disable log4j for dataproc (#7154)
* fix(TUP-34535): disable log4j for dataproc

* fix(TUP-34535): add log4j2 check

* fix(TUP-34535): disable log4j2 only
2022-01-24 18:00:52 +08:00
Wei Hua
bc4b7a489a APPINT-33765 [7.3.1 patch] ClassNotFoundException in runtime when DB connection (SQLServer) in joblet without using alias (#7089) 2022-01-21 10:30:28 +08:00
Jill Yan
e51292ea35 APPINT-31044 (#7097)
Co-authored-by: jillyan <yan955599@gmail.com>
2022-01-19 17:04:29 +08:00
jzhao
c2b7b58302 fix(TDI-46871):tFTPXXX component host can't use expression without "()" (#6742) 2022-01-14 15:07:15 +08:00
kjwang
893b214fcf TUP-34282 CVE: log4j-core(1.2-api)-[2-2.16.0) (Fix junit) (#7096)
https://jira.talendforge.org/browse/TUP-34282
2022-01-10 10:54:05 +08:00
kjwang
c0d3e099dd TUP-34282 CVE: log4j-core(1.2-api)-[2-2.16.0) (#7082)
* TUP-34282 CVE: log4j-core(1.2-api)-[2-2.16.0)
https://jira.talendforge.org/browse/TUP-34282
2022-01-07 10:02:18 +08:00
Oleksandr Zhelezniak
3970eb25af fix(TDI-46410): update namespace in xml element (#7018) 2022-01-04 14:40:17 +02:00
wang wei
79361417eb fix(TDI-47212): CVE: jackson-mapper(core)-asl-1.9.15-TALEND (#7073) 2021-12-31 17:07:03 +08:00
pyzhou
1240c228c1 fix(TDI-47234):tXSLT regression (#7066)
* fix(TDI-47234):tXSLT regression

* correct migration result

* Revert "correct migration result"

This reverts commit 677ff8525e.
2021-12-31 11:01:49 +08:00
Zhiwei Xue
47953ea772 workitem(TUP-34111):slow studio build time for remote execution (#7040) 2021-12-30 16:05:14 +08:00
jiezhang-tlnd
8bf40999cb fix(TUP-33863)Update of Metadata Hadoop Cluster does not change spark (#6945)
* fix(TUP-33863)Update of Metadata Hadoop Cluster does not change spark
version when propagated
https://jira.talendforge.org/browse/TUP-33863

* fix(TUP-33863)Update of Metadata Hadoop Cluster does not change spark
version when propagated

* add common judgment

* add common fix

* Update of Metadata Hadoop Cluster does not change spark version when
propagated
2021-12-30 11:18:09 +08:00
Wei Hua
d8824c7d80 APPINT-34077 RuntimeException: java.lang.NoClassDefFoundError: org/apache/cxf/databinding/DataBinding (#7061) 2021-12-29 14:49:10 +08:00
Emmanuel GALLOIS
c89e5a35db fix(TDI-47213): correct migration issue w/ context + function call (#6999)
* fix(TDI-47213): correct migration issue w/ context + function call

* fix(TCOMP-2053): correct migration issue w/ context + function call
2021-12-29 11:25:23 +08:00
ypiel
22b2899392 chore: bump tck/1.38.5 (#7052) 2021-12-29 11:07:48 +08:00
Jane Ding
5c5e4a561f fix(TUP-33830):[7.3] ref project compilation error (#7054)
* fix(TUP-33830):[7.3] ref project compilation error
https://jira.talendforge.org/browse/TUP-33830

* fix(TUP-33830):[7.3] ref project compilation error
https://jira.talendforge.org/browse/TUP-33830
2021-12-29 10:23:57 +08:00
Zhiwei Xue
a972cbe1ec fix(TUP-33966):Studio/commandline fails to generate code for the job, (#6978)
throws "Invalid thread access"
2021-12-29 09:45:53 +08:00
sbliu
79599d9db9 chore(TUP-33956) remove xercesImpl from libraries needed. (#7029) 2021-12-27 15:26:21 +08:00
Dmytro Grygorenko
e0c0d31522 fix(TDI-47228): tPostgresqlOutput Upsert existing connection (#7048) 2021-12-20 17:07:23 +02:00
Emmanuel GALLOIS
bc6944ddf6 feat(TDI-47278): bump component-runtime to 1.38.4 (#7047) 2021-12-20 14:24:26 +01:00
bhe-talendbj
93afe3609d fix(TBD-13373): User did not initialize spark context when I run a simple spark job with HDI4 (#7034) 2021-12-16 19:49:34 +08:00
bhe-talendbj
6aca900a7e fix(TUP-34105): upgrade log4j2 to 2.16.0 (#7026) (#7030) 2021-12-15 17:29:59 +08:00
bhe-talendbj
bfee355f0a fix(TUP-34105): upgrade log4j2 (#7007) 2021-12-15 10:22:01 +08:00
Dmytro Grygorenko
a5e1cc47e7 fix(TDI-47179): enable batch checkbox for Postgres Upsert. (#6962) 2021-12-08 11:33:09 +02:00
bhe-talendbj
1d1adfc74c feat(TUP-33718): fix distribution version for modules (#6983) 2021-12-07 17:52:13 +08:00
clesaec
86556e52fe TDI-47170 : go root directory for sftp (duplicate code) (#6959) 2021-12-03 09:26:53 +01:00
bhe-talendbj
2e3af5d1d1 feat(TUP-33718): load dynamic distribution jars (#6965) 2021-12-03 11:18:00 +08:00
kjwang
eacf6b2edc TUP-33443 The method tFileInputDelimited_1_onSubJobError(Exception, (#6938)
TUP-33443 The method tFileInputDelimited_1_onSubJobError(Exception,String, Map<String,Object>) is undefined for the type xxxxx
https://jira.talendforge.org/browse/TUP-33443
2021-11-29 10:06:53 +08:00
Emmanuel GALLOIS
f8db539d1d fix(TCOMP-1963): adapt correct Metadata from schema (#6708) 2021-11-26 12:20:19 +01:00
Emmanuel GALLOIS
9041181162 feat(TDI-46838): bump component-runtime to 1.38 (#6716) 2021-11-25 23:44:59 +01:00
hzhao-talendbj
3379f247d4 feat(TUP-29207) add one field 'withAdminPermissions' to webservice stub class, update the wsdl file. (#6917)
Co-authored-by: sbliu <sbliu@talend.com>
2021-11-25 14:53:48 +08:00
Jill Yan
0f587babf5 Revert "Revert "APPINT-33788 (#6743)" (#6762)" (#6937)
This reverts commit df42fc827b.
2021-11-25 10:22:45 +08:00
wang wei
21135a3514 fix(TDI-46850): Upgrade ActiveMQ Jars to 5.15.15 (#6725) 2021-11-24 17:49:48 +08:00
wang wei
483a0f4308 fix(TDI-47078): [7.3]Multiple SQL Statements in SQL Template => Duplicate local variable(#6909) 2021-11-24 15:11:25 +08:00
clesaec
9580f89ac6 TDI-46905 : treat absolute path (#6821) 2021-11-23 08:38:25 +01:00
bhe-talendbj
816212547d fix(TUP-32838): Custom value in a table column with suggestable values is not allowed (#6819)
* fix(TUP-32838): editable properties table

* fix(TUP-32838): support editable properties table

* fix(TUP-32838): support editable properties table

* fix(TUP-32838): support editable properties table

* fix(TUP-32838): rename property name

* fix(TUP-32838): editable for all of suggestable parameters

* fix(TUP-32838): resolve comments

* fix(TUP-32838): avoid invoking component
2021-11-23 14:50:47 +08:00
Tetyana Meronyk
b67a5e2bce fix(TDI-46986): tFTPGet component unable to get file from FileZilla Server, with a particular Filemask in Studio, after September patch, Patch_20210924_R2021-09_v2-7.3.1 (#6920) 2021-11-22 18:51:06 +02:00
Tetyana Meronyk
b2f1487ae9 fix(TDI-46791): tBigQueryInput - Component returning extra row of NULL values (#6751) 2021-11-22 17:27:08 +02:00
Dmytro Grygorenko
8c01f53dd0 feat(TDI-44717): tPostgresqlOutput Upsert feature (#6878)
* feat(TDI-44717): initial PoC.

* feat(TDI-44717): PoC simplified, now works on dynamic schema (but still draft).

* feat(TDI-44717): some corrections.

* feat(TDI-44717): finalized PoC.

* feat(TDI-44717): more cleanup.

* feat(TDI-44717): some corrections.

* feat(TDI-44717): final implementation.

* feat(TDI-44717): changes after code review.
2021-11-22 14:20:13 +02:00
Dmytro Grygorenko
f057c13965 fix(TDI-46912): tELTOutput cannot locate & drop existing table in Snowflake. (#6916) 2021-11-22 14:09:30 +02:00
Dmytro Grygorenko
d0bd3a8de2 fix(TDI-46746): CVE - update "protobuf-java" to 3.4.0 2021-11-22 09:38:53 +02:00
Jill Yan
529bb5a138 APPINT-34090 (#6903)
Co-authored-by: jillyan <yan955599@gmail.com>
2021-11-19 17:34:20 +08:00
pyzhou
bc920a8e4a fix(TDI-46857):Upgrade bcprov-jdk15on-1.69 2021-11-18 10:10:08 +08:00
vyu-talend
a4f1afa7d9 feat(TDI-45016):add query time out. (#6539)
* feat(TDI-45016):add query time out.

* feat(TDI-45016):optimize codes.

* feat(TDI-45016):fix bugs.

* feat(TDI-45016):fix some issues.
2021-11-17 17:06:05 +08:00
clesaec
240510aa13 TDI-47072 : fix context (#6895) 2021-11-17 08:56:51 +01:00
Jane Ding
4ea4723304 fix(TUP-26679):tMap editor was freezed (#4601) (#6894)
https://jira.talendforge.org/browse/TUP-26679
2021-11-17 10:22:45 +08:00
wang wei
05e1edbbc1 fix(TDI-46962): tAmazonAuoraOutput compilation errors (#6872) 2021-11-16 10:11:21 +08:00
Olha V
1eebeae0c2 chore(TDI-46752): CVE: Oauth 2.0 SDK with OpenID (#6811) (#6891)
* chore(TDI-46752): CVE: Oauth 2.0 SDK with OpenID

* chore(TDI-46752): CVE: Oauth 2.0 SDK with OpenID

* bump talend-mscrm in tMicrosoftCrm

* chore(TDI-46752): CVE: Oauth 2.0 SDK with OpenID

* add content-type dependency
2021-11-12 16:04:02 +02:00
Tetyana Meronyk
122af47d85 fix(TDI-46834): Additional columns in tDBOutput doesn`t generate code properly (#6825) (#6868)
* fix(TDI-46834): Additional columns in tDBOutput doesn`t generate code properly

(cherry picked from commit 82243d59ac)
2021-11-03 15:32:51 +02:00
Oleksandr Zhelezniak
6f373d1522 fix(TDI-46930): wrap filename (#6846)
* wrap a filename in parenthesis to explicit handle java operations priority
2021-11-02 10:35:21 +02:00
bhe-talendbj
b6161bdc13 fix: NPE caused by missing route (#6850) 2021-11-01 10:44:20 +08:00
hcyi
df7fd9386e fix(TUP-33449):tPreJob trigger fails with GSS exception when "Set path (#6839)
to custom hadoop jar" is checked in 7.3.1
2021-10-28 15:50:00 +08:00
Dmytro Sylaiev
bf6cbaf984 fix(TDI-46790): Fix fetching big files GSGet (#6841) 2021-10-27 14:55:24 +03:00
hzhao-talendbj
c8b5f40e43 fix(TUP-33485): [tELTMap] Delimited identifiers does not work for (#6828)
* fix(TUP-33485): [tELTMap] Delimited identifiers does not work for

* fix(TUP-33485): [tELTMap] Delimited identifiers does not work for

* fix(TUP-33485): [tELTMap] Delimited identifiers does not work for
2021-10-27 16:38:34 +08:00
bgunics-talend
4e39ebf917 fix(TDI-45710):tXSLT support for streaming input and output (#6251)
* Added streaming support on inputs

* Added InputStream for the 2 input files

* Added support for input / output columns

* Removed the label as it was confusing than helpful.
Changed the XML / XSLT label as it supports Stream.

Output however does NOT support stream so changed it in the Override to File only.

* Upgraded the 15 year old saxon9.jar
My tests passed.

* fix xml column was not used issue

* According to the new writing standard, only the capitalize the first character of first word

Co-authored-by: pyzhou <pyzhou@talend.com>
2021-10-27 10:49:59 +08:00
apoltavtsev
106244a15f fix(APPINT-33909) tREST as OSGI published by CI failed to run in Runtime with ClientHandlerException 2021-10-19 06:05:04 +02:00
Jill Yan
df42fc827b Revert "APPINT-33788 (#6743)" (#6762)
This reverts commit 9d12d4b08a.
2021-10-18 14:15:36 +08:00
Jill Yan
9d12d4b08a APPINT-33788 (#6743) 2021-10-15 16:59:54 +08:00
Andreas Mattes
a6c5e1f537 TPRUN-1919 Add support for log assignment to TESB task. (#6662) 2021-10-14 15:59:41 +02:00
jzhao
06462122bb feat(TDI-45864):Support File components with ORC file format (DI) (#6722)
* feat(TDI-45864):Support File components with ORC file format (DI)

* feat(TDI-45864):Date type read/write issue

* feat(TDI-45864):Double/Float from DB to ORC issue

* feat(TDI-45864):sync orc-core version
2021-10-14 16:23:24 +08:00
jzhao-talend
2c3eafb7c2 chore(TDI-46682):twebservice remove useless urlpath 2021-10-14 12:44:38 +08:00
jzhao
eef575ea44 fix(TDI-46682):Can't send e-mails via smtp.office365.com need to upgrade mail-1.4.7.jar to latest version (#6721)
* fix(TDI-46682):Can't send e-mails via smtp.office365.com need to upgrade
mail-1.4.7.jar to latest version

* fix(TDI-46682):update for tPOP/tFileInputMail/tVtigerCRMXX/tWebserviceXX
2021-10-14 12:24:31 +08:00
chmyga
7be4c6a799 fix(TDI-46777): tBigQueryOutput 24 hour format (#6689) 2021-10-13 17:45:21 +03:00
wang wei
f852743538 fix(TDI-46774): tS3Put doesn't copy the whole file over if using cache (#6714) 2021-10-12 10:38:19 +08:00
vadym-drokov
f590b9214d APPINT-33842: NPE in tDB*** when selecting "data source alias" (#6705) 2021-10-08 11:51:04 +02:00
Dmytro Sylaiev
3544beccec fix(TDI-46759): Handle booleans as digits for OB (#6688) 2021-10-08 12:29:34 +03:00
Dmytro Grygorenko
195c2997d2 fix(TDI-46616): CVE - update "commons-io" to 2.8.0 (#6631) 2021-10-07 10:30:14 +03:00
Dmytro Grygorenko
74f1c89f0b fix(TDI-46084): align versions of "jersey-core" and "jersey-client". (#6647)
* fix(TDI-46084): align versions of "jersey-core" and "jersey-client".

* fix(TDI-46084): add "jsr311-api" library.
2021-10-07 10:09:24 +03:00
apoltavtsev
9134549c94 fix(APPINT-33694) Route Splitter is not working with "XPATH" (#6702) 2021-10-05 15:08:27 +02:00
Emmanuel GALLOIS
9e472b477a feat(TDI-46769): bump to component-runtime 1.37.0 (#6673) 2021-10-04 10:52:15 +02:00
bhe-talendbj
41516246d1 fix(TUP-31095): fix CWE-95 (#6695) (#6697) 2021-09-30 11:41:56 +08:00
sbliu
f9fc607e59 fix(TUP-32947) fix tuj compile error, fix unit test. (#6691)
fix unit test: set parameter with correct type.
2021-09-30 09:38:52 +08:00
sbliu
59ace742af fix(TUP-32947): Handle concatenating context variables, build correct update sql with real table name. (#6674) 2021-09-28 16:39:22 +08:00
Dmytro Grygorenko
1e35baefb1 fix(TDI-46614): CVE - update "commons-compress" to 1.21 (#6584)
* fix(TDI-46614): CVE - update "commons-compress" to 1.21

* fix(TDI-46614): version format corrected.

* fix(TDI-46614): keep version for "checkArchive" lib.
2021-09-27 17:13:34 +03:00
Jane Ding
bbe4460cd4 fix(TUP-33067):tAmazonAuroraInput and tAmazonAuroraOutput list multiple (#6678)
different version driver moudules
https://jira.talendforge.org/browse/TUP-33067
2021-09-27 21:06:48 +08:00
zyuan-talend
35e6bf01f9 fix(TUP-32758):show in connection dropdown and keep the built-in while propertytype + db version are compatible. (#6656) 2021-09-26 15:04:31 +08:00
Jane Ding
4191ba8730 fix(TBD-13063):got wrong module for tAmazonAuroraInput (#6668)
https://jira.talendforge.org/browse/TBD-13063
2021-09-22 15:52:48 +08:00
AlixMetivier
22cad3b97a feat(TBD-12334): add run submit mode to DBR (#6629) 2021-09-17 10:03:19 +02:00
jzhao
87fbabdccd fix(TDI-46527):Dataloss with Parquet components for timestamp field with nano seconds(#6627) 2021-09-17 13:37:14 +08:00
clesaec
b62c16ff6a TDI-46721 - shorter generic code (#6640)
* TDI-46721 - shorter generic code
2021-09-16 12:54:51 +02:00
wang wei
32408cd9a7 fix(TDI-46252): not add that parameter as some case not work, and user can set it self as no better way to control it(#6648) 2021-09-16 10:24:22 +08:00
Wei Hua
94a80b55d4 APPINT-33503 '@' not generated with tRESTResponse after migration. (#6649)
- Add migration task to set default value
2021-09-15 20:47:28 +08:00
Jill Yan
c7a9cc1145 APPINT-33092 omit ds aliase if not set (#6635)
* APPINT-33092 omit ds aliase if not set

* APPINT-33092 fix conflict

* APPINT-33092 revert

* APPINT-33092 revert

* APPINT-33902 fix NPE

Co-authored-by: jillyan <yan955599@gmail.com>
2021-09-15 18:33:51 +08:00
jzhao
85ed098bcb fix(TDI-46511):[7.3.1] tMomInput: "Sleeping time" parameter is not used in code generated for Websphere MQ (#6602) 2021-09-15 11:27:55 +08:00
wang wei
25637d3857 fix(TDI-45468): support streaming append (#6623) 2021-09-15 11:05:16 +08:00
Dmytro Grygorenko
b3774b643b fix(TDI-46612): add ERROR_MESSAGE to the DBRow components. (#6626) 2021-09-14 16:40:51 +03:00
jzhao
51d6fb4cac fix(TDI-46315):add migration task for changing the value of one dbversion item (#6606)
* fix(TDI-46315):add migration task for changing the value of one
dbversion item

* fix(TDI-46315):sap component and sap metadata have different module
name/mvnurl.

Co-authored-by: Hongchun Yi <hcyi@talend.com>
2021-09-14 14:50:39 +08:00
wang wei
37ae765116 fix(TDI-46252): fix tAmazonAuroraRow (#6634) 2021-09-14 09:56:38 +08:00
Dmytro Sylaiev
1b891d23f8 fix(TDI-46610): Add missing logout before disconnect FTPS FTPGet (#6576) 2021-09-13 16:35:27 +03:00
vadym-drokov
26c3b77921 APPINT-33638: Datasource ignored when DB components in Joblet (#6612)
* APPINT-33638: Datasource ignored when DB components in Joblet

* Compilation error is corrected

Co-authored-by: apoltavtsev <apoltavtsev@gmail.com>
2021-09-13 11:25:11 +02:00
wang wei
6af4903291 fix(TDI-46348): studio fix for delta format (#6574) 2021-09-13 11:42:12 +08:00
pyzhou
f3a1279436 feat(TDI-46591):tFTPGet create infinite folder (#6582) 2021-09-12 07:38:45 +08:00
Emmanuel GALLOIS
667e43c56e feat(TDI-46568): bump to component-runtime 1.36.1 (#6625)
* feat(TDI-46568): bump component-runtime to 1.36.0
* feat(TDI-46568): bump to component-runtime 1.36.1
2021-09-10 12:48:13 +02:00
Wei Hua
6120adbd1e APPINT-33503 '@' not generated with tRESTResponse after migration. (#6616)
* APPINT-33503 '@' not generated with tRESTResponse after migration.

* set endpoint value
2021-09-10 18:07:15 +08:00
Denis Sergent
63dae01a82 dsergent_fix_APPINT_33649 (#6609)
To avoid class loading issues at runtime (in Talend Runtime), we exclude mssql jar library from OSGi builds.
2021-09-10 10:29:44 +02:00
Emmanuel GALLOIS
52b46db595 feat(TDI-46568): bump component-runtime to 1.36.0 (#6593) 2021-09-10 09:19:56 +02:00
wang wei
5cac10311a fix(TDI-46555): Unrecognized type id_byte[] while doing dynamic schema mapping (#6528) 2021-09-10 14:53:11 +08:00
hzhao-talendbj
fc995fd934 chore(TUP-32664): CVE: commons-compress-[1.19,1.20] (#6605)
* chore(TUP-32664): CVE: commons-compress-[1.19,1.20]

* chore(TUP-32664): CVE: commons-compress-[1.19,1.20]
2021-09-09 10:56:06 +08:00
Olha V
0c1aa7d269 chore(TDI-46132): Update adal4j in tMicrosoftCrm (#6435) 2021-09-08 14:21:52 +03:00
sbliu
58cb31cd0d fix(TUP-32548) Fix Table widget columns for Advanced panel. (#6598) 2021-09-07 15:34:17 +08:00
hzhao-talendbj
2de786b6db fix(TUP-32744): Job latest revision version doesn't match the correct (#6581)
* fix(TUP-32744): Job latest revision version doesn't match the correct
latest SVN revision

* fix(TUP-32744): Job latest revision version doesn't match the correct

* fix(TUP-32744): Job latest revision version doesn't match the correct
2021-09-06 16:18:49 +08:00
hzhao-talendbj
4aa47a90a9 fix(TUP-32790): StackOverflowError occurs by tELTMap (#6592)
* fix(TUP-32790): StackOverflowError occurs by tELTMap when table name is
directly input in Japanese with escaped double quote

* fix(TUP-32790): StackOverflowError occurs by tELTMap
2021-09-06 11:46:27 +08:00
zyuan-talend
018e3e3e06 fix(TUP-32677):set default job version for tRunJob. (#6590) 2021-09-03 10:35:18 +08:00
Jane Ding
69168c56b8 fix(TUP-32671):AS400 Special character (pound sign £) in table column (#6567)
* fix(TUP-32671):AS400 Special character (pound sign £) in table column
names causing errors
https://jira.talendforge.org/browse/TUP-32671

* fix(TUP-32671):AS400 Special character (pound sign £) in table column
names causing errors
https://jira.talendforge.org/browse/TUP-32671

* fix(TUP-32671):AS400 Special character (pound sign £) in table column
names causing errors
https://jira.talendforge.org/browse/TUP-32671
2021-09-02 09:30:31 +08:00
sbliu
cfb02c57c3 Revert "fix(TUP-32548) Fix Table widget columns for Advanced panel. (#6537)" (#6585)
This reverts commit 732e383f8e.
2021-09-01 18:38:23 +08:00
jiezhang-tlnd
1b20a2d08c fix(TUP-30342)Studio misplaces the component dragged from Metadata (#6562) 2021-08-30 10:03:36 +08:00
Tetyana Meronyk
d8af56e14f feat(TDI-42314): tAmazonAuroraRow component (#6577) 2021-08-27 14:25:38 +03:00
jzhao
23070c60a1 fix(TDI-46587):tParquetOutput - DI Job - Dynamic Issue (#6570)
* fix(TDI-46587):tParquetOutput - DI Job - Dynamic Issue

* negative decimal value write issue
2021-08-27 18:43:20 +08:00
pyzhou
7278437430 Pyzhou/tdi 46333 t ftp list name as400 7.3 (#6554)
* fix(TDI-46333): change ListFile to ListName for AS400

# Conflicts:
#	main/plugins/org.talend.designer.components.localprovider/components/tFTPGet/tFTPGet_begin.javajet

* fix compile error in tFTPDelete

* fix compile error in tFTPGet
2021-08-27 11:10:28 +08:00
zyuan-talend
0d1d63b882 fix(TUP-32567): decode hex value in job script. (#6563) 2021-08-26 17:56:29 +08:00
sbliu
732e383f8e fix(TUP-32548) Fix Table widget columns for Advanced panel. (#6537) 2021-08-26 09:41:56 +08:00
sbliu
c7e01ebe67 fix(TUP-31910) Duplicating component with Existing connection resets the connection setting. (#6545) 2021-08-26 09:40:53 +08:00
Dmytro Grygorenko
4b365b194b chore(TDI-46270): improve logging for tFileList component. (#6561) 2021-08-25 16:28:10 +03:00
bhe-talendbj
7f7c963cdc fix(TUP-32682): set category for tck InputSchemaParameter (#6565) (#6568) 2021-08-25 17:42:34 +08:00
pyzhou
586bcb9d23 fix(TDI-46031):tRedshift set default log writer as null (#6560) 2021-08-24 15:20:27 +08:00
wang wei
bac3605a26 fix(TDI-46185): MySQL Connection issues due to TLS 1.2 after java upgrade to 1.8.0_292 (#6306) 2021-08-24 09:07:51 +08:00
vyu-talend
c37faee0d1 Vyu/tdi 46059 missing jars in tservicexxx (#6532)
* fix(TDI-46059):change the mvn url and upload jars

* fix(TDI-46059):change mvn path to official.
2021-08-19 16:49:56 +08:00
sbliu
bca1ab75b5 Revert "fix(TUP-31910) Duplicating component with Existing connection resets the connection setting. (#6487)" (#6541)
This reverts commit 250580cddd.
2021-08-18 10:13:19 +08:00
msjian
e03c026b74 fix(TDQ-19520): fix NPE (#6486) 2021-08-17 07:57:35 +00:00
Dmytro Sylaiev
342a7350be fix(TDI-46496): Hide password property for sftp not pass auth (#6488) 2021-08-16 17:27:17 +03:00
Jane Ding
4cc1dd3de9 fix(TUP-32278)Fix the migration task FixProjectResourceLink execute in (#6525)
every item
https://jira.talendforge.org/browse/TUP-32278
2021-08-12 11:19:06 +08:00
zyuan-talend
ffb98f3f6f fix(TUP-30465): fix the file explorer open on Linux. (#6524) 2021-08-11 17:31:11 +08:00
qiongli
62c2e341c5 fix(TDQ-19637):Judge 'tdqReportRun' in main/sub-job when generate code (#6514)
Co-authored-by: qiongli <qiongli@192.168.1.102>
2021-08-11 15:48:52 +08:00
hcyi
d22a213e38 fix(TUP-32438):Parameter (Component List) is empty but is required. (#6503)
* fix(TUP-32438):Parameter (Component List) is empty but is required.

* fix(TUP-32438):Parameter (Component List) is empty but is required.
2021-08-11 15:01:32 +08:00
hzhao-talendbj
5a3fd2ef23 chore(TUP-32327): remove dup dependency for maven plugins in job (#6501)
chore(TUP-32327): remove dup dependency for maven plugins in job  template
2021-08-11 14:35:24 +08:00
wang wei
f6448c1316 fix(TDI-46475): Deprecate the list of components in 7.3 (#6497) 2021-08-10 15:38:14 +08:00
Jane Ding
b863480a14 fix(TUP-32383):Migration executed at every logon (#6515)
https://jira.talendforge.org/browse/TUP-32383
2021-08-10 11:37:54 +08:00
Emmanuel GALLOIS
19a9126382 chore(studio731): bump component-runtime to 1.35.1 (#6509)
* feat(TDI-46542): fix module inclusion in dependencies.txt when build is java9+
2021-08-09 09:22:49 +02:00
Carlos Badillo
8e9540b70b Revert "fix(TBD-12184):Password field missing for tImpalaRow (#6461)"
This reverts commit e27ab22bd0.
2021-08-09 08:25:35 +02:00
Carlos Badillo
8c0a3390a6 Revert "Cbadillo/fix/tbd 12184 (#6481)"
This reverts commit 36fd68a527.
2021-08-09 08:25:04 +02:00
jzhao
9eac6bc883 feat(TDI-46423):tSQSOutput - Get Amazon SQS Response - "Message ID" (#6477)
* feat(TDI-46423):tSQSOutput - Get Amazon SQS Response - "Message ID"

* feat(TDI-46423):fix migration

* fix(TDI-46423):keep same field name"MessageId" between input and output
2021-08-06 18:25:08 +08:00
zyuan-talend
37a0af7c4e feat(TUP-30465):remove older versions of job. (#6455) 2021-08-06 17:31:13 +08:00
Max
167f9aa41e fix(TBD-12466): httpclient upgraded to 4.5.13 (#6420)
Co-authored-by: Svitlana Ponomarova <sponomarova@talend.com>
2021-08-05 19:36:48 +03:00
sbliu
250580cddd fix(TUP-31910) Duplicating component with Existing connection resets the connection setting. (#6487) 2021-08-05 11:36:09 +08:00
hzhao-talendbj
1f8d842706 chore(TUP-32326): CVE: maven-compat-3.0[3.2.3-3.6.3] (#6470)
* chore(TUP-32326): CVE: maven-compat-3.0[3.2.3-3.6.3]

* TUP-32326 revert esb change
2021-08-04 16:52:30 +08:00
Jane Ding
cc3061d762 fix(TUP-32310):Migration task for checkbox to "true" for existing (#6489)
components will make the components from deactive to active.
https://jira.talendforge.org/browse/TUP-32310
2021-08-04 14:42:38 +08:00
kjwang
da2d50fbe2 TUP-32307 Studio Integration plugin: Support of "Table" values in TCK (#6456)
* TUP-32307 Studio Integration plugin: Support of "Table" values in TCK
components.
https://jira.talendforge.org/browse/TUP-32307
2021-08-04 14:16:30 +08:00
Jane Ding
9b9a1dfaed fix(TUP-32333):Not able to access a repository resource file in TMC (#6480)
https://jira.talendforge.org/browse/TUP-32333
2021-08-03 10:02:33 +08:00
sbliu
cfb02bd135 fix(TUP-32120) tELTMap:left join works as inner join (#6459)
tELTMap:correct update sql when explict join set for table, make explict join parsed it is, but not default inner join
2021-07-30 18:14:12 +08:00
cbadillo1603
36fd68a527 Cbadillo/fix/tbd 12184 (#6481)
* fix(TBD-12184):Password field missing for tImpalaRow

* fix(TBD-12184):Password field missing for tImpalaRow
2021-07-30 10:49:04 +02:00
cbadillo1603
e27ab22bd0 fix(TBD-12184):Password field missing for tImpalaRow (#6461) 2021-07-30 10:39:56 +02:00
sbliu
0776ee5b9f chore(TUP-31799) upgrade plexus-archiver. (#6469) 2021-07-29 19:17:02 +08:00
jzhao
8b1bc0e1ac fix(TDI-46279):tFileInputParquet can't read INT96 type from hive table (#6443)
* provide method to get time millis from nanotime
2021-07-29 15:28:20 +08:00
jiezhang-tlnd
9715a9e018 fix(TUP-31506)Arrow is not in the right direction when there are more (#6457)
than one input component
2021-07-27 16:13:46 +08:00
clesaec
62e441c8d7 TCOMP-1920 : active if fix (#6451)
* TCOMP-1920 : active if fix

* TCOMP-1920 : add assertions

Co-authored-by: jiezhang-tlnd <jie.zhang@talend.com>
2021-07-27 15:53:46 +08:00
Zhiwei Xue
665fd4c320 fix(TUP-32001):Tacokit slow to start (#6369) (#6430)
* fix(TUP-32001):Tacokit slow to start (#6369)

* fix(TUP-32001): fix TCK server readiness checker

Co-authored-by: bhe-talendbj <bhe@talend.com>
2021-07-26 11:01:04 +08:00
Dmytro Sylaiev
fdabd40f24 fix(TDI-46355): Add logout for ftp and ftps before disconnect (#6383)
* A bit reduce code duplication
2021-07-23 11:37:22 +03:00
jiezhang-tlnd
617dbff52b bugfix(TUP-26206):'Refresh Preview' can not work for Json with context (#4480) (#6439)
bugfix(TUP-26206):'Refresh Preview' can not work for Json with context mode

Co-authored-by: hwang-talend <hwang@talend.com>
2021-07-21 17:57:14 +08:00
Dmytro Sylaiev
1740c3626a Dsylaiev/tdi 46367 sftp http proxy enchancement (#6407)
* fix(TDI-46367): Add support for local proxy config for tHTTPRequest

* fix(TDI-46367): Add nonProxy support for ftp,sftp,ftps
2021-07-21 10:27:16 +03:00
sbliu
5b0e0b449c fix(TUP-32255) fix Studio create webservices metadata fail. (#6449) 2021-07-20 21:03:41 +08:00
hcyi
103b26f50b Hcyi/tup 31123 7.3 (#6389)
* feat(TUP-31123):In studio, do not store login credentials on disk for
TAC and reprompt instead.

* feat(TUP-31123):improve for In studio, do not store login credentials on
disk for TAC and reprompt instead.

* feat(TUP-31123):update the message

* feat(TUP-31123):update the messages
2021-07-15 15:57:45 +08:00
wang wei
d7b050ded7 fix(TDI-46396): align net.minidev:accessors-smart and org.ow2.asm:asm:jar for studio (#6414) 2021-07-15 09:51:01 +08:00
sponomarova
d642a8efda fix(TBD-12218): zookeeper cleanup (#6426) 2021-07-14 15:49:01 +03:00
wang wei
85f43e22db fix(TDI-45795): Upgrade components to use the latest version of cxf / remove axis 1 2021-07-14 17:28:25 +08:00
mbasiuk-talend
e6e3581be6 feat(TDI-45914): bigquery stored procedures (#6363)
* feat(TDI-45914): implement feature for Input component

* feat(TDI-45914): implement feature for SQLRow component

* feat(TDI-45914): implement feature for BulkExec component

* feat(TDI-45914): remove die on error, put error message

* feat(TDI-45914): update bulkexec with gson, reuse version

* feat(TDI-45914): fix SQLRow as standalone component statistic return

* feat(TDI-45914): improve logging code pieces

* feat(TDI-45914): remove extra whitespaces

* feat(TDI-45914): fix gson object missing cid

* feat(TDI-45914): fix gson object missing cid for BulkExec

* feat(TDI-45914): add return values for tBigqueryOutput

* feat(TDI-45914): fix BulkExec and Output statistics, remove virtual cid part
2021-07-14 06:36:32 +03:00
Emmanuel GALLOIS
079173a85e chore(studio731): bump component-runtime to 1.34.1 (#6422) 2021-07-13 09:39:58 +02:00
pyzhou
cf4e374d71 fix(TDI-46340):tMongoDB convert bson to string migration (#6387) 2021-07-13 15:36:29 +08:00
wang wei
13f68ebe73 fix(TDI-46393): runtime log log the password field for tcompv0 as we don't filter HIDDEN_TEXT type field (#6410) 2021-07-13 15:31:18 +08:00
bhe-talendbj
667a4a2649 chore(TUP-31163): upgrade daikon to 0.31.12 (#6330)
* chore(TUP-31163): upgrade daikon to 0.31.12

* chore(TUP-31163): upgrade daikon to 0.31.12
2021-07-13 15:27:59 +08:00
jzhao
46a1f26e66 fix(TDI-46081):Oracle BLOB data not ingesting in table when using Dynamic schema (#6421) 2021-07-13 14:49:10 +08:00
chmyga
e16aa8da65 fix(TDI-46165): tBigQueryInput deletes the token properties file for … (#6329)
* fix(TDI-46165): tBigQueryInput deletes the token properties file for OAuth2.0

* Handle auth problems in a different catch block

* fix(TDI-46165): check for 401 error code
2021-07-13 09:36:09 +03:00
ypiel
781a5addc6 feat(TDI-46398): bump tMSCrmInOut to talend-mscrm:3.8-20210707 (#6417) 2021-07-12 14:37:09 +03:00
chmyga
fe44ae77b4 fix(TDI-45907): single insert Bigdecimal (#6384)
* fix(TDI-45907): single insert Bigdecimal

* Set scale for bigdecimals

* fix(TDI-45907): fix indents

* fix(TDI-45907): consider precision is not set

Co-authored-by: Dmytro Chmyga <dmytro.chmyga@globallogic.com>
2021-07-12 11:59:53 +03:00
sbliu
515028bad5 fix(TUP-31869) Studio, TCK components: Table element issues in "Advanced settings" tab.. (#6404) 2021-07-09 10:48:31 +08:00
wang wei
8323cb0c5b fix(TDI-45136): improve tRunJob to support Dynamic context and EXTRA_CLASSPATH (#6321) 2021-07-09 10:42:48 +08:00
vyu-talend
cf137543ff Vyu/tdi 46167 modify containsspace for column (#6392)
* feat(TDI-46167):improve contaionsSpaces for column.

* feat(TDI-46167):reverse the changes to JDBC and ODBC.

* feat(TDI-46167):keep the minimal changes.
2021-07-09 10:12:29 +08:00
vyu-talend
3ca0f5ac72 fix(TDI-46125):add strict reply parsing to FTP components. (#6360) 2021-07-08 16:16:43 +08:00
wang wei
a5320f0b67 fix(TDI-46220): upgrade json-smart to 2.4.7 (#6300) 2021-07-08 14:39:44 +08:00
wang wei
7bebaa9fbf fix(TDI-46321): tFileOutputExcel outputs 1 less number in NB_LINE global varaible (#6365) 2021-07-08 10:18:52 +08:00
Wei Hua
c51a8712dd APPINT-32340 [tESBConsumer 7.3.1] NullPointerException if context variable is not provided (#6397) 2021-07-08 09:29:40 +08:00
Jill Yan
57ba9552b5 check if JSONObject.NULL before convert (#6399)
* check if JSONObject.NULL before convert

* use equals to compare Object

* correct the logic

Co-authored-by: jillyan <yan955599@gmail.com>
2021-07-07 16:12:36 +08:00
jzhao
31ca62fba6 fix(TDI-43795): Hide Single Insert Query action and mark it deprecated (#4588) (#6403)
Co-authored-by: OleksiiNimych <59691550+OleksiiNimych@users.noreply.github.com>
2021-07-07 15:21:09 +08:00
bhe-talendbj
1990b363f3 fix(TUP-31736): add warning message (#6378)
* fix(TUP-31736): add warning message

* fix(TUP-31736): fix warning message
2021-07-07 11:21:48 +08:00
sbliu
b0175e0c6e fix(TUP-31871) Implicit contextload does not work with microsoftsql server and windows authentication + useNTLMv2=true;domain={MYDOMAIN}. (#6351)
update the driver with user selected but not always the default driver
2021-07-07 09:54:36 +08:00
ypiel
3a8ba3d0d0 feat(TDI-46022): mscrm return representation / custom headers (#6394) 2021-07-06 11:44:12 +02:00
hcyi
301d689b33 fix(TUP-30538):Snowflake test connection should be optional. (#6348)
* fix(TUP-30538):Snowflake test connection should be optional.

* fix(TUP-30538):Snowflake test connection should be optional.

* fix(TUP-30538):Snowflake test connection should be optional.
2021-07-05 16:16:22 +08:00
pyzhou
d6293c745a feat(TDI-46112):Adding profile credential provider (#6379)
* feat(TDI-46112):Adding profile credential provider

* Add feature to Buckets components

* typo

* fix bug of finding profile file

* improve code
2021-07-05 15:33:58 +08:00
wang wei
6a6f966c51 fix(TDI-46251): [7.3.1] MsSQL row named "column" results in code (#6309) 2021-07-05 11:32:59 +08:00
Jane Ding
f5474437f2 feat(TUP-23337):Option to disable Job's screenshot creation. (#6317)
* feat(TUP-23337):Option to disable Job's screenshot creation.
https://jira.talendforge.org/browse/TUP-23337

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-23337):Option to disable Job's screenshot creation.
https://jira.talendforge.org/browse/TUP-23337

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-23337):Option to disable Job's screenshot creation.
https://jira.talendforge.org/browse/TUP-23337

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-23337):Option to disable Job's screenshot creation.
https://jira.talendforge.org/browse/TUP-23337
2021-07-05 08:57:02 +08:00
wang wei
408ea73812 fix(TDI-46329): tS3List NullPointerException in R2021-05 (#6347) 2021-07-02 18:33:28 +08:00
bhe-talendbj
045525ac75 fix(TUP-31858): Implicit Context Load - can't find the jdbc driver class (#6343)
* fix(TUP-31858): Implicit Context Load - can't find the jdbc driver class

* fix(TUP-31858): fix data type

* chore(TUP-31858): refactor

* fix(TUP-31858): add missing JDBC driver properties
2021-06-30 09:47:42 +08:00
jiezhang-tlnd
e96de986bb fix(TUP-31904)java.lang.NullPointerException at (#6359)
org.talend.repository.items.importexport.handlers.cache.RepositoryObjectCache.initialize
2021-06-29 10:48:47 +08:00
Laurent BOURGEOIS
5555f862df feat(TBD-10185):Support Datasets API in tMap - Spark Batch - Inner|Left Join / Match model: Unique match (#6313) 2021-06-25 09:51:18 +02:00
hcyi
8770aa2225 fix(TUP-31218):Medata for salesforce:UI show issue when enable Mutual (#6356)
authenticaiton
2021-06-25 14:09:44 +08:00
hcyi
b97d359382 fix(TUP-31608):Output link from one tELTMSSqlMap to another tELTMSSqlMap (#6332)
is not saved after we remove once.
2021-06-24 18:21:09 +08:00
Olha V
4e6a70a0ef chore(TDI-46067): Bump commons-lang3 (#6227)
* chore(TDI-46067): Bump commons-lang3

* bump commons-lang3 to 3.10 in studio-se to align between all
  components

* chore(TDI-46067): Bump commons-lang3

* clean up unused UrlPath

* chore(TDI-46067): Bump commons-lang3

* updated talend-mscrm lib version after rebase
2021-06-24 10:45:53 +03:00
jzhao-talend
c84b02c186 fix(TDI-46297):FCB 'EnableRegionalDisco' is disabled error when using
tMicrosoftCrmInput
2021-06-21 11:55:58 +08:00
wang wei
f2e2d9dd43 fix(TDI-46218): tRedshiftOutput data loss when insert size matches the input size (#6303) 2021-06-17 14:24:33 +08:00
pyzhou
4f362b8cd4 fix(TDI-44917):error message compile error (#6336) 2021-06-16 11:00:13 +08:00
Oleksandr Zhelezniak
838c12ba2d fix(TDI-46248): fix compile error when dieOnError is checked (#6308) 2021-06-14 11:00:56 +03:00
Dmytro Grygorenko
a20f8c75f0 fix(TDI-46065): Redshift - add "Credential Provider" option to XML layout. (#6207)
* fix(TDI-46065): add "Credential Provider" option to XML layout.

* fix(TDI-46065): wording correction.

* fix(TDI-46065): some more XML layout rearrangements.

* fix(TDI-46065): reverted row number changes.

* fix(TDI-46065): fix for existing S3 connection.

* fix(TDI-46065): added fix for tRedshiftBulkExec component.

* fix(TDI-46065): some corrections, migration task.

* fix(TDI-46065): fix imports.

* fix(TDI-46065): corrections to migration task.

* fix(TDI-46065): process checkbox in migration task.

* fix(TDI-46065): fix for tRedshiftOutputBulkExec component (Web identity token)

* fix(TDI-46065): web_* credential transition from OutputBulkExec to OutputBulk component.

* fix(TDI-46065): set credential provider inaccessible; modify migration task.

* fix(TDI-46065): one more change to migration task.

* fix(TDI-46065): reverting recent changes, restoring migration task.

* fix(TDI-46065): hide "Web identity token" option for Redshift components.
2021-06-11 11:13:33 +03:00
jiezhang-tlnd
e6a05e0738 Jzhang/73/tup 31122 (#6311)
* feat(TUP-31122)new Data Center in AWS Australia
https://jira.talendforge.org/browse/TUP-31122

* update login ui

Co-authored-by: jie.zhang <jie.zhang@LT-DDC8R73.talend.com>
2021-06-10 17:59:12 +08:00
Emmanuel GALLOIS
2e5d89b14a feat(TDI-46215): bump component-runtime to 1.33.1 (#6293) 2021-06-10 09:04:33 +02:00
jzhao
a0bf8ea8b7 feat(TDI-45575):Support Parquet File format for regular DI (#6320)
* add talend-parquet lib
2021-06-10 14:43:04 +08:00
pyzhou
cb2d011370 fix(TDI-44917):Fix Compile Error (#6318)
* fix(TDI-44917):Fix Compile Error

* fix(TDI-44917):Fix Compile Error
2021-06-10 11:36:09 +08:00
vyu-talend
fe9f23eee5 Vyu/tdi 45797 s3put enable object lock feature (#6285)
* feat(TDI-45797):add object lock feature to s3put.

* feat(TDI-45797):optimize code.

* feat(TDI-45797):make some changes.

* feat(TDI-45797):fix code generation error.

* feat(TDI-45797):fix context value issue.

* feat(TDI-45797):fix the common file issue in ts3copy.

* feat(TDI-45797):fix the common file path issue in s3copy.

Co-authored-by: Balázs Gunics <bgunics@talend.com>
2021-06-09 11:17:59 +08:00
pyzhou
e7903640b2 fix(TDI-46062):add checkbox Preserve last modified time tFileCopy (#6261)
* fix(TDI-46062):add checkbox Preserve last modified time tFileCopy

fix(TDI-46062): add Junit test

* MigrationTask

* Hide preserve last modified time when check copy directory
2021-06-09 11:14:14 +08:00
ypiel
b6676e4fbd feat(TDI-45155): ms crm support odata expand 7.3 backport (#6312)
* feat(TDI-45155): ms crm support odata expand 7.3 backport

* feat(TDI-45155): Bump lib in MicrosoftCrmOutput_java.xml

* feat(TDI-45155): formatting issue
2021-06-08 16:16:35 +02:00
Dmytro Grygorenko
5ec26c5514 fix(TDI-46109): update XStream to 1.4.17 (#6283) 2021-06-08 09:20:02 +03:00
pyzhou
aee262c30d fix(TDI-46152):tmap die on error issue (#6291)
* fix(TDI-46152):tmap die on error issue

* fix NPE
2021-06-08 10:29:44 +08:00
bkatiukhov
4b68070278 APPINT-32851 Fix cSplitter with JSonPath is not working as expected (#6232)
Co-authored-by: bohdan.katiukhov <bohdan.katiukhov@KBP1-LHP-A00125.synapse.com>
2021-06-07 13:34:47 +03:00
Jill Yan
c7cc06102f Fix/jill/APPINT-32940 (maintenance/7.3.1) (#6286)
* APPINT-32940

* APPINT-32940

correct logic and if (unselectList.size() > 0), or it will add all dependencies into manifest

* APPINT-32940 format

* APPINT-32940 format

* APPINT-32940 compare maven uri

* APPINT-32940

format

* APPINT-32940 refector

* APPINT-32940  refactor

* APPINT-32940 correct check logic

Co-authored-by: jillyan <yan955599@gmail.com>
2021-06-07 18:01:07 +08:00
wang wei
cfe68fa443 fix(TDI-45979): commons-compress-1.18 to 1.19 (#6274) 2021-06-07 10:22:57 +08:00
pyzhou
a961d53357 Pyzhou/tdi 44917 check components error massage 7.3 (#5448)
* fix(TDI-44917):format

* fix(TDI-44917):add error message

* fix tdie and tCreate table

* fix some compile error

* fix tmap

* fix error

* fix toracleOutput

* fix error

* fix compile error tFileInputMSPositional

* add googleStorageConnection.javajet

* add omission (#5909)

* fix bug

* fix compile error of tNetezzaOutput
2021-06-04 09:27:54 +08:00
mbasiuk-talend
f9004ebd4c chore(TDI-46053): upgrade snakeyaml to 1.26 (#6254) 2021-06-03 11:20:44 +03:00
sbliu
e43b872877 feat(TUP-30834) repace axis1 with axis2. (#6152)
remove axis1 export code for export job. ref TUP-19079.
remove dependency to axis1 for repository metadata, remove dependency to salesforce.
for WSDL2JAVAController, still using axis1 to translate wsdl.
2021-06-02 18:28:38 +08:00
chmyga
387871fb59 fix(TDI-46100): connection support (#6253)
* fix(TDI-46100): connection support

* Add reuse connection support to standalone connectors

* fix(TDI-46100): connection support

* Add comment explaining reflection

Co-authored-by: Dmytro Chmyga <dmytro.chmyga@globallogic.com>
2021-05-28 16:21:21 +03:00
hcyi
159cdc9c9d fix(TUP-31553):Hadoop Metadata Wizard when using custom distro dialog (#6258)
box doesnt pop up to import dependencies.
2021-05-28 14:37:43 +08:00
bhe-talendbj
09bcc66d09 chore(TUP-31617): remove commons-compress-1.18 (#6277)
* chore(TUP-31617): remove commons-compress-1.18

* chore(TUP-31617^C update build.properties
2021-05-28 11:42:42 +08:00
hzhao-talendbj
3bd63f5795 feat(TUP-26184): add the same features to filter fields, same as in the (#6213)
* feat(TUP-26184): add the same features to filter fields, same as in the
tMap

* TUP-26184:  some temp code changes

* TUP-26184 add special checkbox for eltmap table column

* TUP-36184 remove useless code

* TUP-26184 remove useless code

* add line back

* TUP-26184 remove some useless code

* TUP-26184 remove useless code

* TUP-26184 remove useless code

* TUP-26184 fix NPE issue

* TUP-26184 remove useless code

* TUP-26184 fix link can't auto change position issue

* TUP-26184 fix link display issue

* TUP-26184 add Enable/disable column name filter

* TUP-26184 fix some NPE errors when filter is on and select below columns

* TUP-26184 change filter icon position to sync with tmap
2021-05-28 09:59:39 +08:00
Jane Ding
32d256d666 fix(TUP-31316):Error connecting to Azure SQL database with Azure Active (#6270)
directory method
https://jira.talendforge.org/browse/TUP-31316
2021-05-26 17:30:09 +08:00
zyuan-talend
b96ee6514b feat(TUP-30343):have the "Export Dependencies" option checked by default.(#6273) 2021-05-26 11:38:25 +08:00
jiezhang-tlnd
fa08aef33c fix(TUP-31228)Netsuite tck guess schema when use existing connection and (#6241)
* fix(TUP-31228)Netsuite tck guess schema when use existing connection and
Token-based login type

* fix(TUP-31228)Netsuite tck guess schema when use existing connection and
2021-05-26 10:05:04 +08:00
Jane Ding
f2325c166d fix(TUP-30849):Improve build Job performance (#6014)
* fix(TUP-30849):Improve build Job performance
https://jira.talendforge.org/browse/TUP-30849

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-31117):Improve performances related to recursive jobs (#6243)

https://jira.talendforge.org/browse/TUP-31117
2021-05-25 18:10:53 +08:00
jiezhang-tlnd
36f23162bb fix(TUP-TUP-31164)Guess schema button on the informix tDBinput component (#6221)
* fix(TUP-TUP-31164)Guess schema button on the informix tDBinput component
returns zero length datatype

* Guess schema button on the informix tDBinput component returns zero
length datatype
2021-05-25 11:28:41 +08:00
Oleksandr Zhelezniak
7118b02042 feat(TDI-45963): after variables (#5933)
* handle specific metakey
* feature uses metadata endpoint in the framework
* extract metadata in javajet
2021-05-24 12:20:24 +03:00
apoltavtsev
d2ae45d2df bugfix(APPINT-33067) Backport fix for NPE 2021-05-21 10:53:43 +02:00
bkatiukhov
dfa91dd61e APPINT-32843 Fix error while deploying a route with tAzureStorageQueue (#6156)
* APPINT-32843 Fix error while deploying a route with tAzureStorageQueue

* Update osgi-exclude.properties

Co-authored-by: Bohdan Katiukhov <bohdan.katiukhov@synapse.com>
2021-05-21 10:14:34 +03:00
jiezhang-tlnd
f75b7895db fix(TUP-31213)tNetSuiteV2019Input failed to guess schema because preview (#6237)
subjob can't work
https://jira.talendforge.org/browse/TUP-31213
2021-05-21 11:16:53 +08:00
apoltavtsev
e05955934f chore(APPINT-32953) fix the NPE when using DI license 2021-05-20 08:51:01 +02:00
SunChaoqun
38acaab6e1 TESB-31720:[7.4.1]Build a job as OSGi will get a jar with only META-INF (#5756) (#6248) 2021-05-19 20:05:39 +02:00
hzhao-talendbj
19300112e8 chore(TUP-29079): remove some urlpath (#6224) 2021-05-19 15:31:27 +08:00
pyzhou
25ace64c68 Pyzhou/tdi 41535 refactor scp components (#6188)
* fix(TDI-41535):change footer

* fix(TDI-41535):connection and close

* fix(TDI-41535):Filelist

* fix(TDI-41535):tSCPDelete

* fix(TDI-41535):tSCPFileExist

* fix(TDI-41535):tSCPGet & tSCPPut

* fix(TDI-41535):tSCPPut remove duplicate

* fix(TDI-41535):tSCPClose bug

* fix(TDI-41535):tSCPTruncate

* fix(TDI-41535):fix public key compile error

* fix(TDI-41535):tSCPFileList count 0 line

* fix(TDI-41535):tSCPGet overwrite warning

* fix(TDI-41535):tSCPGet error message

* fix(TDI-41535):tSCPGet escape space

* fix(TDI-41535):tSCPGet tSCPPut wildCard

* fix(TDI-41535):tSCPGet nb_line

* fix(TDI-41535):tSCPPut error

* fix(TDI-41535):tSCPPut truncate throw Exception

* fix(TDI-41535):jar for scp components

* fix(TDI-41535):add distributionManagement
2021-05-19 14:14:26 +08:00
apoltavtsev
a188cd0e07 fix(APPINT-32953) NPE is corrected 2021-05-19 04:51:05 +02:00
apoltavtsev
e3473f4aa5 fix(APPINT-32953) Routelets are built before Route 2021-05-18 09:02:45 +02:00
Dmytro Sylaiev
7ac39ecd46 fix(TDI-46060): Fix compile error for tFTPFileList (#6219) 2021-05-14 18:19:00 +03:00
sponomarova
bfe5e903c6 fix(TBD-12358): cfx lib change (#6228) 2021-05-14 13:58:54 +03:00
apoltavtsev
e18a8f48a0 fix(APPINT-32995) Root poms installed in CI mode 2021-05-14 10:54:49 +02:00
vdrokov
f7937c3710 Vdrokov fix appint 32987 maintenance/7.3 (#6202)
* APPINT-32905: Issue with Rest service flow

* APPINT-32987: Fix dublicate variable
2021-05-13 12:09:04 +03:00
hcyi
85e8040773 feat(TUP-26747):improve Outline for joblet and subjob. (#6211)
* feat(TUP-26747):improve Outline for joblet and subjob.

* feat(TUP-26747):improve outline for joblet and subjob.

* feat(TUP-26747):improve outline for joblet and subjob.

* feat(TUP-26747):improve outline for joblet.

* feat(TUP-26747):switch to ComponentSettingsView.
2021-05-13 16:48:24 +08:00
qiongli
70908ad2df chore(TDQ-19297): Upgrade 'cxf' to 3.3.10 (#6199) 2021-05-13 16:21:09 +08:00
Max
a7f1809476 fix(TBD-12358): CVE: Update CXF to 3.3.10 (#6158) 2021-05-13 09:13:52 +03:00
zyuan-talend
2d04f97a64 feat(TUP-25494):Correct dialog title. (#6217) 2021-05-13 10:30:41 +08:00
bkatiukhov
3d5992f017 APPINT-32254 Fix bean-validation ignored when deployed in Runtime (#6174)
* APPINT-32254 Fix bean-validation ignored when deployed in Runtime

* Add specific version for validation constraints

Co-authored-by: Bohdan Katiukhov <bohdan.katiukhov@synapse.com>
Co-authored-by: bohdan.katiukhov <bohdan.katiukhov@KBP1-LHP-A00125.mshome.net>
2021-05-12 19:02:36 +03:00
Max
6b11676a66 fix(TBD-12142): CVE: jersey-core-1.4 and jersey-1.9 (#6168) 2021-05-12 13:38:59 +03:00
zyuan-talend
a102775762 fix(TUP-30430): Clone the connection's UNIQUE_NAME for some links (#6194)
instead of generating a new one in joblet node container.
2021-05-12 17:44:27 +08:00
vyu-talend
2721082b75 feat(TDI-45122):add charset to filefetch's parameters (#6149)
* Added encoding option for the upload parameters. Default behavior is unchanged.
Used OPENED_LIST so context parameters can be used.

* feat(TDI-45122):add charset to filefetch's parameters

* feat(TDI-45122):fix issues.

* feat(TDI-45122):fix issue.

Co-authored-by: Balázs Gunics <bgunics@talend.com>
2021-05-12 16:08:09 +08:00
Liu Xinquan
0c9629ef55 fix(APPINT-32593) java.lang.ClassNotFoundException: org.apache.cxf.message.Message (#6200) 2021-05-12 13:55:30 +08:00
ovladyka
14da1383e1 fix(TDI-45910):Jakarta-ORO imported (#6166)
* fix(TDI-45910):Jakarta-ORO imported

* fix(TDI-45910):Jakarta-ORO updated the mvn GAV and required
2021-05-11 18:41:52 +03:00
mbasiuk-talend
01d97c8f63 chore(TDI-45882): bump cxf 3 3 10 (#6167)
* chore(TDI-45882): bump cxf to 3.3.10

* chore(TDI-45882): remove unused cxf version variable
2021-05-11 16:17:21 +03:00
Oleksandr Zhelezniak
59a1b91e4a chore(TDI-45929): bump talend-mscrm (#6101)
* bump in org.talend.libraries.crm
2021-05-11 15:41:26 +03:00
Oleksandr Zhelezniak
e01d4de5c3 fix(TDI-45968): fix commons-codec module name (#6136) 2021-05-11 15:34:58 +03:00
chmyga
d343213ecb fix(TDI-45859): tFileCopy with different fs (#6083)
* fix(TDI-45859): tFileCopy with different fs

* Add force copy parameter

* Copy and delete file instead of moving

* fix(TDI-45859): tFileCopy with different fs

* Add test

* Fix PR comment

* fix(TDI-45859): tFileCopy with different fs

* Fix UI issue

Co-authored-by: Dmytro Chmyga <dmytro.chmyga@globallogic.com>
2021-05-11 14:32:45 +03:00
Oleksandr Zhelezniak
1bd7157e10 fix(TDI-46005): cve bump httpclient to 4.5.13 (#6164)
* bump httpclient to 4.5.13
* fix groupId for talend-bonita-client in javajet
* update httpcore
2021-05-11 13:11:44 +03:00
Dmytro Sylaiev
b68c9ef23f fix(TDI-45824): Bump jersey for tRest (#6137) 2021-05-11 13:03:33 +03:00
Oleksandr Zhelezniak
bf8ae50d77 fix(TDI-45973): cve bump httpclient to 4.5.13 (#6148) 2021-05-11 12:49:09 +03:00
Dmytro Grygorenko
0757392e0a fix(TDI-46004): tEXist components and tXMLRPCInput dependency import. (#6178)
* fix(TDI-46004): fix for dependency import from Nexus.

* fix(TDI-46004): fixed GAV to match the ones from Maven Central.

* fix(TDI-46004): adding all tEXist components.
2021-05-11 10:33:24 +03:00
wang wei
bdb2545a42 fix(TDI-45491): Contextualized configuration properties for S3 component (#6143) 2021-05-11 14:26:35 +08:00
hcyi
3e46ca4dee feat(TUP-26747):Clickable hyperlink from Outline listed component to the job canvas. (#6121)
* feat(TUP-26747):
Clickable hyperlink from Outline listed component to the job canvas.

* feat(TUP-26747):Clickable hyperlink from Outline listed component to the
job canvas.

* feat(TUP-26747):Clickable hyperlink from Outline listed component to the
job canvas.

* feat(TUP-26747):Clickable hyperlink from Outline listed component to the
job canvas.

* feat(TUP-26747):Clickable hyperlink from Outline listed component to the
job canvas.

* feat(TUP-26747):Clickable hyperlink from Outline listed component to the
job canvas.

* feat(TUP-26747):moved the link with editor button to the first one.

* feat(TUP-26747):link with editor if selected component variable.
2021-05-11 09:51:17 +08:00
Denis Sergent
650b50420b Revert "APPINT-32905: Issue with Rest service flow (#6171)" (#6192)
This reverts commit d5386d1114.
2021-05-10 11:36:29 +02:00
kjwang
76137c4b3c Fix:TUP-31429 Fail to add reference project (#6183)
Fix:TUP-31429 Fail to add reference project
https://jira.talendforge.org/browse/TUP-31429
2021-05-10 17:23:41 +08:00
kjwang
371908919b Fix TUP-31096 Could not find or load main class error on the jobs (#6176)
Fix TUP-31096 Could not find or load main class error on the jobs created on the Feature Branch which has #
https://jira.talendforge.org/browse/TUP-31096
2021-05-10 17:22:56 +08:00
Jane Ding
8a20a15f9f fix(TUP-31237):Invalid username or password when creating a Snowflake (#6132)
Metadata Connection with a Snowflake password that has a slash character
https://jira.talendforge.org/browse/TUP-31237
2021-05-10 16:04:03 +08:00
wang wei
40e5c5f7fd fix(TDI-45663): tS3List: Adds Missing File Details (#6146) 2021-05-10 13:39:02 +08:00
wang wei
bcb2d60a99 fix(TDI-45580): Contextualize multipart threshold parameter for S3 components (#6141) 2021-05-10 13:29:50 +08:00
chmyga
3d9d6734c2 feat(TDI-45836): Standalone connectors support (#6018)
* feat(TDI-45836): Standalone connectors support

* Integrate TCK Standalone connectors to studio

* feat(TDI-45836): Standalone connectors support

* remove NB_LINE after var for Standalone connectors

Co-authored-by: Dmytro Chmyga <dmytro.chmyga@globallogic.com>
2021-05-10 11:16:17 +08:00
wang wei
a54823f72d fix(TDI-45821): CVE: json-smart-2.2.1.jar (#6161) 2021-05-10 11:02:24 +08:00
wang wei
2a4167eb4f fix(TDI-45913): Enforce that only Strings, Maps and HashMaps can be loaded from the crcMap file(#6098) 2021-05-10 10:47:42 +08:00
wang wei
a3a53e8447 fix(TDI-45912): Enforce that System path separator character is indeed a character for tRunjob(#6092) 2021-05-10 10:39:09 +08:00
bhe-talendbj
545bc72afa fix(TUP-31346): add default branch for remote uninitialized git repo (#6182)
* fix(TUP-31346): always show selected item

* fix(TUP-31346): select branch
2021-05-08 16:58:54 +08:00
Max
dbc2f213c2 fix/TBD-12115: CVE: derby-10.11.1.1.jar (#6159) 2021-05-06 12:51:23 +03:00
kjwang
2a5cb99f75 Feat:TUP-30377 Move the "Allow specific characters (UTF8,...)" (#6139)
Feat:TUP-30377 Move the "Allow specific characters (UTF8,...) preference setting to project setting.
https://jira.talendforge.org/browse/TUP-30377
2021-05-06 14:46:29 +08:00
ovladyka
73a00f14bb Fix(TUP-30413:Comma missing for tELTMap with multiple inputs, when aliases are used) (#5844) 2021-05-06 10:04:04 +08:00
vdrokov
d5386d1114 APPINT-32905: Issue with Rest service flow (#6171) 2021-05-05 12:31:19 +03:00
Dmytro Sylaiev
2b90106385 fix(TDI-45642): Fix codegen error for Greenplum (#6102)
* fix(TDI-45642): Fix codegen error for Greenplum

* chore(TDI-45642): Add missing empty line
2021-05-05 11:54:00 +03:00
kjwang
cfc6477b33 Revert "TUP-31096 Could not find or load main class error on the jobs created on the Feature Branch which has # (#6082)" (#6169)
This reverts commit 5f5c92a766.
2021-04-30 18:36:19 +08:00
Dmytro Grygorenko
df55122199 fix(TDI-45879): save initial value of NB_LINE to globalMap. (#6076) 2021-04-28 17:19:24 +03:00
ovladyka
98a1bed1e1 fix(TDI-45900): Encoding doesn't work for byte[] type when tFileOutputDelimited use CSV option (#6140)
Updated two javajet files tFileOutputDelimited_begin and tFileOutputDelimited_main
2021-04-28 10:43:13 +03:00
zyuan-talend
194ac012c4 feat(TUP-25494): provide branch search and memory from launcher. (#6138)
* feat(TUP-25494): provide branch search and memory from launcher.
2021-04-28 14:42:22 +08:00
kjwang
5f5c92a766 TUP-31096 Could not find or load main class error on the jobs created on the Feature Branch which has # (#6082)
* TUP-31096 Could not find or load main class error on the jobs created on
the Feature Branch which has #
https://jira.talendforge.org/browse/TUP-31096
2021-04-27 17:57:17 +08:00
OleksiiNimych
d46b547fc9 fix(TDI-45551): SingleStore fix blob type processing (#6084) 2021-04-26 12:32:43 +03:00
Oleksandr Zhelezniak
b05d599f3f fix(TDI-43931) pass full date to independand child job (#6094)
* Convert long as date from context args
* Update file root to be up to date
* Implement fix for tRunJob
* Mention string parse exception in log warn

(cherry picked from commit 87d3fd7c7d)

Co-authored-by: Dmytro Sylaiev <dmytro.sylaiev@globallogic.com>
2021-04-26 11:00:03 +03:00
ypiel
816d395f2d chore: bump to tck:1.32.0 2021-04-22 12:13:35 +02:00
Zhiwei Xue
bfe02643b3 fix(TUP-31246):Inner routine node disappeared after refresh repository (#6130)
view
2021-04-21 17:52:04 +08:00
kjwang
f99d97538f TUP-21090 Support connection component for tacokit (Fix command line (#6124)
* TUP-21090 Support connection component for tacokit (Fix command line
load image error)
https://jira.talendforge.org/browse/TUP-21090
2021-04-20 11:29:36 +08:00
Jane Ding
8d2ff69e40 fix(TUP-30992):CVE: junit-4.11,4.12,4.13 (#6123)
https://jira.talendforge.org/browse/TUP-30992
2021-04-19 19:29:22 +08:00
Zhiwei Xue
f23c9b02ee fix(TUP-31027):[performance] studio will hang when import a special job (#6114) 2021-04-19 15:35:56 +08:00
hcyi
d794cc9a7b fix(TUP-30793):to fix the password problem of RabbitMQ. (#6116) 2021-04-19 15:24:43 +08:00
Jane Ding
ac5cc1ee1d fix(TUP-30992):CVE: junit-4.11,4.13 (#6110)
https://jira.talendforge.org/browse/TUP-30992

Signed-off-by: jding-tlnd <jding@talend.com>
2021-04-16 19:19:19 +08:00
wang wei
70a75cf790 fix(TDI-45577): Job using snowflake dynamic schema fails with special char (#6095) 2021-04-16 18:11:34 +08:00
Jane Ding
db870ecc30 fix(TUP-30992):CVE: junit-4.11,4.13 (#6106)
https://jira.talendforge.org/browse/TUP-30992
fix(TUP-29033):Fail to run testcase in studio and CI
https://jira.talendforge.org/browse/TUP-29033

Signed-off-by: jding-tlnd <jding@talend.com>
2021-04-16 16:30:14 +08:00
Oleksandr Zhelezniak
78f9b554eb feat(TDI-45323): new auth provider web token (#6060)
* replace checkbox with drop-down credential provider list
* migration task
2021-04-15 12:57:13 +03:00
apoltavtsev
7146bdf26c bugfix(APPINT-32288) Propagate "bundleVersion" option 2021-04-15 10:28:05 +02:00
jiezhang-tlnd
4655c0a059 fix(TUP-30992)CVE: junit-4.11,4.13 (#6090) 2021-04-15 16:13:14 +08:00
Max
49658a28d3 fix(TBD-12112): commons-beanutils-core-1.8.0.jar to 1.9.4 (#6012)
* fix(TBD-12112): commons-beanutils-core-1.8.0.jar to 1.9.4

* fix(TBD-12112): dead code elimination

* fix(TBD-12112): additional code cleanup
2021-04-15 10:53:11 +03:00
SunChaoqun
cda46bb231 APPINT-32688:R2021-03 issue with tDB*** using datasource (#6085) 2021-04-15 10:25:13 +08:00
Zhiwei Xue
9df3a48b78 fix(TUP-30791): remove setup code dependencies action for testcase (#6089) 2021-04-14 16:54:13 +08:00
hcyi
9cce21a3bd fix(TUP-30438):Issue when updating snowflake table using tELTOutput. (#5883)
* fix(TUP-30438):Issue when updating snowflake table using tELTOutput.

* fix(TUP-30438):add junts.

* fix(TUP-30438):Issue when updating snowflake table using tELTOutput.
2021-04-14 16:14:48 +08:00
kjwang
174ea89be9 TUP-31145 TCK:Guess schema use an exist connection will overwrite the parameters of component tNetSuiteV2019Input (#6081)
* TUP-31145 TCK:Guess schema use an exist connection will overwrite the
parameters of component tNetSuiteV2019Input
https://jira.talendforge.org/browse/TUP-31145

* TUP-31145 TCK:Guess schema use an exist connection will overwrite the
parameters of component tNetSuiteV2019Input (Fix guess schema issue
cause by TDI-45246)
https://jira.talendforge.org/browse/TUP-31145
2021-04-14 15:26:20 +08:00
kjwang
b4f2124a60 kjwang/Feat TUP-21090 Support connection component for tacokit (#5977)
* kjwang/Feat TUP-21090 Support connection component for tacokit
https://jira.talendforge.org/browse/TUP-21090
2021-04-13 18:06:53 +08:00
Richard Lecomte
5ac16bb7cc TDI-45014 : SFTP auth with password and public key (#6030)
* TDI-45014 : SFTP auth with password and public key

* TDI-45014 : SFTP auth with password and public key

* TDI-45014 : SFTP auth with password and public key

* TDI-45014 : SFTP auth with password and public key

Added parenthesis

* TDI-45014 : SFTP auth with password and public key

Smarter getPassword method
2021-04-13 11:56:15 +02:00
Emmanuel GALLOIS
bc445f065c feat(TDI-45842): bump component-runtime to 1.31.2 (#6029)
* feat(TDI-45842): bump component-runtime to 1.31.1-SNAPSHOT

* feat(TDI-45842): bump component-runtime to 1.31.2

Co-authored-by: jzhao-talend <jzhao@talend.com>
Co-authored-by: mbasiuk <mbasiuk@talend.com>
2021-04-13 12:40:03 +03:00
zyuan-talend
891e6a9d5e fix(TUP-29284): only show proposals for enabled categories(proposal (#6070)
kinds).
2021-04-13 17:20:38 +08:00
hzhao-talendbj
c6e4e79411 fix(TUP-30625): update maven project after convert jobscript to job (#6069) 2021-04-13 14:49:16 +08:00
sbliu
47ffb3d242 feat(TUP-30358) Enhance Data Collector - Route usage details 2021-04-13 10:57:20 +08:00
jiezhang-tlnd
ae30bc1fb3 fix(TUP-30954)CVE_xstream-1.4.15.jar (#6035) 2021-04-13 10:54:41 +08:00
Emmanuel GALLOIS
6b7fce2f78 feat(TDI-45246): do not put in configuration hidden parameters 2021-04-12 12:02:03 +02:00
hcyi
6049577e03 feat(TUP-30291):Add Suggestable support for Table options (List<Row>) in Studio. (#6013)
* feat(TUP-30291):Add Suggestable support for Table options (List<Row>) in
Studio.

* feat(TUP-30291):Add Suggestable support for Table options (List<Row>) in
Studio.

* feat(TUP-30291):improve for the implementation.

* feat(TUP-30291):fix TUP-31031 Add condition, NOT select field, directly
select operator, then throw errors

* feat(TUP-30291):fix TUP-31031 Add condition, NOT select field, directly
select operator, then throw errors

* feat(TUP-30291):fix TUP-31032 [random] Studio can't save field value.

* feat(TUP-30291):fix TUP-31032 [random] Studio can't save field value.
2021-04-12 15:24:26 +08:00
jiezhang-tlnd
186fcafb29 feat(TUP-30381)Support greenplum driver for Greenplum Database in studio (#5995)
* feat(TUP-30381)Support greenplum driver for Greenplum Database in studio
metadata
https://jira.talendforge.org/browse/TUP-30381

Conflicts:
	main/plugins/org.talend.repository/plugin.xml

* remove hard code

* add dbversion for greenplum

* Add REPOSITORY_VALUE for Greenplum components

* set right dbversionString
2021-04-12 10:02:40 +08:00
wang wei
7f3d3b7a59 fix(TDI-45650): [7.3.1] tDeltaLakeOutput- not handling the updates when we are using Dynamic schema(#5908) 2021-04-12 09:35:14 +08:00
Zhiwei Xue
9a11a94043 fix(TUP-30783):Support inner routine with the same name in different (#6065)
custom jar.
2021-04-09 15:50:36 +08:00
Zhiwei Xue
a47de9821f feat(TUP-29952):Change "Assign Routine to" action to "Copy Routine to". (#6059) 2021-04-09 15:49:34 +08:00
Dmytro Sylaiev
f6114ef000 fix(TDI-45642): Count key columns (#6039)
* fix(TDI-45642): Count key columns

* fix(TDI-45642): Fix another regression related to dynamic schema
2021-04-08 19:29:23 +03:00
Zhiwei Xue
9d93ff1652 fix(TUP-30786):Avoid to click finish button more than once when do (#6053)
create custom routine jar/bean jar.
2021-04-08 18:32:57 +08:00
clesaec
51a97c8b24 TDI-45786 - add charset on dynamic (#6027)
* TDI-45786 - add charset on dynamic
2021-04-08 11:38:21 +02:00
Dmytro Sylaiev
29ec16e725 fix(TDI-42478): tFTPConnection : SSL/TLS Client Authentication does not work : no suitable certificate found - continuing without client authentication (#5971)
Co-authored-by: s.bovsunovskyi <s.bovsunovskyi@globallogic.com>
2021-04-08 10:56:55 +03:00
hcyi
6e5e7d1e0a fix(TUP-30731):tELTPostgresqloutput context not work when checked "use (#6021)
update statement without subqueries"
2021-04-08 14:57:12 +08:00
hcyi
5dda69da6a fix(TUP-30793):TCK Datastore on studio Metadata. (#6049) 2021-04-08 11:20:44 +08:00
hzhao-talendbj
e534bed3e0 chore(TUP-27039): Update Commons Compress to 1.19 backport to 7.3 (#5996) 2021-04-08 10:51:44 +08:00
wang wei
56bc8ee766 fix(TDI-45815): CVE: xstream-1.4.15.jar (#6040) 2021-04-08 09:34:57 +08:00
Zhiwei Xue
71413a41dc fix(TUP-30780):Only check used custom jars when run/build Job (#6041) 2021-04-06 15:52:30 +08:00
Dmytro Sylaiev
6240c4331e fix(TDI-45776): Bump slf4j-jdk14 version to make it downloadable (#6017)
* fix(TDI-45776): Bump slf4j-jdk14 version to make it downloadable

* fix(TDI-45776): Apply also for tBonitaInstantiateProcess
2021-04-06 10:48:53 +03:00
Zhiwei Xue
92fac62ac0 fix(TUP-30977):test run map using custom routines and beans doesn't work (#6023)
after switch branch
2021-04-06 12:05:56 +08:00
Laurent BOURGEOIS
8bdca657d4 fix(TBD-11968):CVE commons-collections 3.2.1 (#5866) 2021-04-02 15:43:30 +02:00
bhe-talendbj
ea33bcd37e feat(TUP-30047): Need support of tRunJob with Dynamic Job option enabled on test cases / CI/CD (#6007)
* fix(TUP-30047): Correct classpath for tRunJob in CI mode

* feat(TUP-30047): support running dynamic jobs

* feat(TUP-30047): change location of classpath.jar if running ci test
2021-04-02 16:14:27 +08:00
AlixMetivier
61b2b21833 feat(TBD-11317): allow tS3Configuration to work with assume role in joblet (#5980) 2021-04-01 14:11:27 +02:00
pyzhou
399ae80700 fix(TDI-45834):tFileCopy change module name (#6019) 2021-03-31 16:49:31 +08:00
jiezhang-tlnd
10fd426856 chore(TUP-29381): add dependency for assembly (#5574) (#5839)
* chore(TUP-29381): add dependency for assembly

* add dependency

* add to template

Co-authored-by: hzhao-talendbj <49395568+hzhao-talendbj@users.noreply.github.com>
2021-03-30 15:57:13 +08:00
bkatiukhov
c113df2c41 TESB-32307 tESBConsumer - wrong header content-type (#5976)
Co-authored-by: bohdan.katiukhov <bohdan.katiukhov@KBP1-LHP-A00125.synapse.com>
2021-03-29 10:18:38 +02:00
pyzhou
4eb679c6e9 fix(TDI-45727):CVE jackson-mapper-asl (#5946) 2021-03-26 18:49:01 +08:00
Dmytro Sylaiev
1cf44a07ec fix(TDI-45642): Throw an warning exception when every column is a key… (#5855)
* fix(TDI-45642): Throw an warning exception when every column is a key for update

* Refactor, deduplicate code

* fix(TDI-45642): Fix codegen error for mssql

* fix(TDI-45642): Throw an error for update on duplicate mysql

* fix(TDI-45642): Warn message instead of exception for insert or update
2021-03-25 11:23:49 +02:00
Zhiwei Xue
0bdf41d228 fix(TUP-30813):Add Junits for dependency management feature (#6003) 2021-03-25 09:39:19 +08:00
Laurent BOURGEOIS
c64fec7601 fix(TBD-12776): Fix testAddLog4jToModuleList unit test (#5998) 2021-03-24 11:05:56 +01:00
Oleksandr Zhelezniak
8ab6492011 feat(TDI-45732): extend not dieoneerror area (#5991)
* extend try-block that includes "Input tables (lookups)"
2021-03-24 10:28:51 +02:00
Zhiwei Xue
780ce47ad7 fix(TUP-30779):Custom jar resource unload issue after git pull&merge. (#5982)
* fix(TUP-30779):Custom jar resource unload issue after git pull&merge.

* fix(TUP-30845): fix refreshing NPE
2021-03-24 16:21:11 +08:00
clesaec
9d04099b86 TDI-45772 : unarchiv correction (#5981) 2021-03-23 09:29:59 +01:00
sbliu
e3775bacfe fix(TUP-25417) JDK11: run a job call a child job use twebservice meet error.
using relative path for building parent job(whose child job contains esb cxf components) on jdk11
2021-03-23 14:55:04 +08:00
Jane Ding
0e1a65b82f fix(TUP-30615):Schema Update Detection popping up everytime upon opening (#5973)
the job
https://jira.talendforge.org/browse/TUP-30615

Signed-off-by: jding-tlnd <jding@talend.com>
2021-03-23 11:30:23 +08:00
Oleksandr Zhelezniak
75c51b6dec feat(TDI-45746): fix date context variable (#5943)
* fix the logic of passing date variable to sub jobs
2021-03-19 10:58:02 +02:00
Dmytro Sylaiev
f809f597b4 fix(TDI-45741): Fix checkbox visibility (#5924) 2021-03-19 09:53:22 +02:00
sbliu
97bad0d5ca chore(TUP-30522) add test case for version contains 'SNAPSHOT' 2021-03-18 14:16:56 +08:00
apoltavtsev
a74a54214e fix(TESB-32507) Correct manifest generation for org.talend.esb.authorization.xacml.rt.pep 2021-03-17 20:21:23 +01:00
Jane Ding
bbc2e81686 fix(TUP-30758):tSingleStoreOutputBulkExec can't work (#5964)
https://jira.talendforge.org/browse/TUP-30758

Signed-off-by: jding-tlnd <jding@talend.com>
2021-03-17 09:43:47 +08:00
Max
6bf37640b9 fix(TBD-12011): tDBclose connection deltalake on error - moved to correct package (#5905) 2021-03-16 11:50:49 +02:00
clesaec
b5d8c8d0f3 Clesaec/tdi 45301 t s3 acl (#5829)
* TDI-45301 - ACL canned options added
2021-03-16 10:41:20 +01:00
Oleksandr Zhelezniak
95afb4904e feat(TCOMP-1877): clean cached libs studio-integration (#5961)
* clean cached libs during clean phase for studio-integration plugin
* help to avoid using the out-to-date version of jars in plugin
2021-03-16 10:45:16 +02:00
Zhiwei Xue
322a55e751 feat(TUP-29014): bugfix 2021-03-15 (#5951) 2021-03-15 15:33:36 +08:00
SunChaoqun
028578141e TESB-32453:DemoServiceConsumer/DemoRESTConsumer fail to deploy to (#5952)
runtime with message"[statistics] disconnected"
2021-03-15 15:04:23 +08:00
Dmytro Grygorenko
d7c09e2d71 feat(TDI-45590): migration task for CosmosDB (#5861) 2021-03-14 08:25:45 +02:00
AlixMetivier
24ae727858 feat(TBD-11882): update tCollectAndCheck for BD (#5919) 2021-03-12 16:49:57 +01:00
hzhao-talendbj
1e39f1e09c TUP-30589 fix junit failed (missing jar beanutils-1.9.2 & axis-1.4) (#5939) 2021-03-12 10:30:56 +08:00
SunChaoqun
3c58d86789 TESB-31465:[7.3.1] Studio ESB build performance issue (#5931)
* TESB-31465:[7.3.1] Studio ESB build performance issue

* TESB-32391
[7.3.1] Incorrect OSGi manifest for Routes

* TESB-32391
[7.3.1] Incorrect OSGi manifest for Routes

* TESB-32391:[7.3.1] Incorrect OSGi manifest for Routes

* TESB-32391:[7.3.1] Incorrect OSGi manifest for Routes

* TESB-32391:[7.3.1] Incorrect OSGi manifest for Routes
2021-03-11 23:01:59 +08:00
SunChaoqun
d51d53c3b5 TESB-30792:Upgrade maven plugins (#5932) 2021-03-11 23:01:39 +08:00
Chao MENG
2ca61108c6 fix(TUP-30651): Login page won't be refreshed after delete project (#5940)
https://jira.talendforge.org/browse/TUP-30651
2021-03-11 16:44:43 +08:00
Zhiwei Xue
8c2ea5dd99 feat(TUP-29014):disable rename of custom jar (#5942) 2021-03-11 16:24:51 +08:00
sbliu
ba7c5e45c2 fix(TUP-30257) TMAP - Java & Traces Debug preview not working 2021-03-11 15:03:00 +08:00
zyuan-talend
ba7830ad5c fix(TUP-30589): Mockito cannot mock this class. (#5937) 2021-03-11 11:29:23 +08:00
sbliu
82bc2123f1 fix(TUP-30109) Log4J preferences does not save
backport TUP-26197, needn't force to log4j2 when import old project which is unactive and log4j1.
resolved problem that after restore default value the preference changes cannot be saved on log4j preference page.
2021-03-11 10:54:27 +08:00
pyzhou
5f70c22c91 fix(TDI-45727): replace default IP for proxy (#5917)
* fix(TDI-45727): replace default IP for proxy

* add migration/RemoveDefaultProxyIPTask.java

* add debug

* remove debug
2021-03-10 16:34:54 +08:00
kjwang
9f48439f53 TUP-30648 Migration: tRun job cannot run if Install 731 R02 monthly (#5929)
TUP-30648 Migration: tRun job cannot run if Install 731 R02 monthly patch plus R03 temp patch
https://jira.talendforge.org/browse/TUP-30648
2021-03-10 15:44:50 +08:00
bhe-talendbj
cf25104e30 bugfix(TUP-30378): Talend 7.3.1 tDBInput or tMSSqlInput component will not open query editor (#5886)
* fix(TUP-30378): add exception log

* fix(TUP-30378): add log

* fix(TUP-30378): run open sqlbuilder in background
2021-03-10 10:55:46 +08:00
Jane Ding
ea33684b50 feat(TUP-30169):adapter the tjdbc bulk components in TDI-45487 for (#5862)
* feat(TUP-30169):adapter the tjdbc bulk components in TDI-45487 for
studio and support singlestore database only with a whitelist
https://jira.talendforge.org/browse/TUP-30169

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-30169):adapter the tjdbc bulk components in TDI-45487 for
studio and support singlestore database only with a whitelist
https://jira.talendforge.org/browse/TUP-30169

Signed-off-by: jding-tlnd <jding@talend.com>

* fix(TUP-30169):adapter the tjdbc bulk components in TDI-45487 for studio
and support singlestore database only with a whitelist
https://jira.talendforge.org/browse/TUP-30169

Signed-off-by: jding-tlnd <jding@talend.com>

* fix(TUP-28699):[Bug] The mapping is wrong after dragging (#5514)

tELTMap/tJDBCSCDELT from the created metadata
https://jira.talendforge.org/browse/TUP-28699

Signed-off-by: jding-tlnd <jding@talend.com>

Conflicts:
	main/plugins/org.talend.designer.core/src/main/java/org/talend/designer/core/ui/editor/cmd/ChangeValuesFromRepository.java


Signed-off-by: jding-tlnd <jding@talend.com>

* fix(TUP-28699):[Bug] The mapping is wrong after dragging (#5514)

tELTMap/tJDBCSCDELT from the created metadata
https://jira.talendforge.org/browse/TUP-28699

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-30169):adapter the tjdbc bulk components in TDI-45487 for
studio
and support singlestore database only with a whitelist
https://jira.talendforge.org/browse/TUP-30169

Signed-off-by: jding-tlnd <jding@talend.com>
2021-03-09 17:39:55 +08:00
vyu-talend
61c03b2eda fix(TDI-45702):fix the syntax error in eltoutput (#5895) 2021-03-09 17:12:39 +08:00
Zhiwei Xue
127c703af5 feat(TUP-29014): Add only compile code projects function for TDM 2021-03-09 16:08:54 +08:00
Zhiwei Xue
0176cb23ca feat(TUP-29014): Add only compile code projects function for TDM (#5921) 2021-03-09 15:51:29 +08:00
sbliu
d38412eb01 fix(TUP-30250) Not able to share snowflake connection between job and joblet. 2021-03-09 14:25:50 +08:00
Zhiwei Xue
f041bee6b8 feat(TUP-29014): Only build and package beans/routines that are used in Route/Job (#5794)
* feat(TUP-29018): Rearrange single routines into jars

* feat(TUP-29017):Setup routine dependencies for Job

* feat(TUP-29019): Generation and Build: Build routine jars in different
maven projects.

* feat(TUP-29019): Build routine jars in different maven projects

* feat(TUP-29019): fix codesjar cache and update job maven project problem

* feat(TUP-29019): fix codesjar cache and update job maven project problem

* feat(TUP-29019): fix several issues

* feat(TUP-29943):should not have create routines action when object in (#5715)

recycle bin
https://jira.talendforge.org/browse/TUP-29943

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-29014): refactor codesjar resource cache

* feat(TUP-29019): fix wrong codesjar groupid of ref project in classpath

* feat(TUP-29943):codeJar import items issue (#5725)

* feat(TUP-29943):codeJar import items issue
https://jira.talendforge.org/browse/TUP-29943

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-29943):codeJar import items issue
https://jira.talendforge.org/browse/TUP-29943

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-29014): fix codesjar dependencies can't set required issue

* TESB-31500:[Dependency Management] Only build and package beans that are
used in Route

* feat(TUP-29014): fix NPE of data preview

* feat(TUP-29943): export codeJar with lib modules, import codeJar with (#5740)

lib modules
https://jira.talendforge.org/browse/TUP-29943

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-29019): improve update codesjar project logic

* feat(TUP-29014):fix assign routine wrong package issue.

* feat(TUP-29014):fix empty class files of codesjar m2 jar issue.

* feat(TUP-29014):fix several assign to action issues and NPE of data
preview

* feat(TUP-29014):support edit codes dependencies for routelet and set
this action read only for testcases

* feat(TUP-29014):fix several import issue and build code jar problem

* feat(TUP-29014): fix nl and some spell issues

* feat(TUP-29943):add codejar check delete reference (#5773)

https://jira.talendforge.org/browse/TUP-29943

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-29943):export and import to deploy libs issue (#5779)

https://jira.talendforge.org/browse/TUP-29943

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-29014):fix i18n issue

* feat(TUP-29014):revert the change for building project

* feat(TUP-29014): update ref code projects after resolved dependencies

* Revert "feat(TUP-29014): update ref code projects after resolved dependencies"

This reverts commit 5a93e784e7.

* feat(TUP-29014): support shot class name for custom jars in component

* feat(TUP-29943):rename issues (#5823)

* feat(TUP-29943):rename issues
https://jira.talendforge.org/browse/TUP-29943

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-29943):rename innercode; won't store namefor codejar
https://jira.talendforge.org/browse/TUP-29943

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-29943):remove old module for properties change and delete
forever
https://jira.talendforge.org/browse/TUP-29943

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-29943):check if exist relationship, warn user re-generate all
pom
https://jira.talendforge.org/browse/TUP-29943

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-29943):reinstall codejar after inner code rename
https://jira.talendforge.org/browse/TUP-29943

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-29014): improve update codesjar project performance and fix
regression

* feat(TUP-29014):fix rename regressions and improve import performance

* feat(TUP-29943):build out job not include inner code items (#5852)

https://jira.talendforge.org/browse/TUP-29943

Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-29943):disable export action for innercode, and duplicate (#5854)

import issue
https://jira.talendforge.org/browse/TUP-29943
Signed-off-by: jding-tlnd <jding@talend.com>

* feat(TUP-29014):fix bd generate code and data preview issues

* feat(TUP-30584):Do not show custom routine/bean jars in TOS (#5898)

* fix(TUP-30597):Fix junit failures caused by dependency management (#5899)

feature

* feat(TUP-29014):fix several git reset issues

* feat(TUP-29014): bugfix 2021-03-05

* feat(TUP-29014): bugfix 2021-03-08

* feat(TUP-29014): bugfix 2021-03-08 2

* feat(TUP-29014):bugfix 2021-03-09

Co-authored-by: Jane Ding <jding@talend.com>
Co-authored-by: SunChaoqun <csun@talend.com>
2021-03-09 12:13:05 +08:00
hcyi
dfcd6e3f2d fix(TUP-30043):Add a new option for tELTOracleMap to generate column alias on selection. (#5806)
* fix(TUP-30043):Add a new option for tELTOracleMap to generate column
alias on selection.

* fix(TUP-30043):Add a new option for tELTOracleMap to generate column
alias on selection.

* fix(TUP-30043):Add a new option for tELTOracleMap to generate column
alias on selection.

* fix(TUP-30043):update title for the new option for tELTOracleMap.

* fix(TUP-30043):Add a new option for tELTOracleMap to generate column
alias on selection.
2021-03-08 18:28:54 +08:00
sbliu
0a7e0e56e4 fix(TUP-30041) do not show warning message if db type column does not show. (#5723) 2021-03-08 14:04:17 +08:00
hzhao-talendbj
bd2e612a44 TUP-30333 tMap with Lookup model Reload at each row freezes Studio (#5817) (#5874)
* TUP-30333 tMap with Lookup model Reload at each row freezes Studio

* TUP-30333 add comments

* TUP-30333 add condition for refresh background
2021-03-08 09:50:43 +08:00
SunChaoqun
d561e36a7e TESB-31465:[7.3.1] Studio ESB build performance issue (#5864)
* TESB-31465:[7.3.1] Studio ESB build performance issue

* TESB-31465:[7.3.1] Studio ESB build performance issue
2021-03-05 18:40:08 +08:00
jiezhang-tlnd
2fd9e82220 TUP-26534 (#4813) (#5769)
Co-authored-by: hzhao-talendbj <49395568+hzhao-talendbj@users.noreply.github.com>
Co-authored-by: hzhao-talendbj <hzhao@talend.com>
2021-03-05 14:43:05 +08:00
kjwang
11a41a331e Fix: TUP-26185 Merge GIT branches - Conflict resolution - the "Compare (#5876)
Fix: TUP-26185 Merge GIT branches - Conflict resolution - the "Compare Result" view does not display differences with a tELTMap component
https://jira.talendforge.org/browse/TUP-26185
2021-03-04 14:12:06 +08:00
Jane Ding
e9fa81a1c8 fix(TUP-30548):Debugger does not work in 7.3 if the installation path (#5894)
* fix(TUP-30548):Debugger does not work in 7.3 if the installation path
contains space
https://jira.talendforge.org/browse/TUP-30548

Signed-off-by: jding-tlnd <jding@talend.com>

* fix(TUP-30548):Debugger does not work in 7.3 if the installation path
contains space
https://jira.talendforge.org/browse/TUP-30548

Signed-off-by: jding-tlnd <jding@talend.com>
2021-03-03 22:54:28 +08:00
Emmanuel GALLOIS
9153d30f6e feat(TDI-45704): bump component-runtime to 1.30.0 (#5891)
* feat(TDI-45704): bump component-runtime to 1.30.0
* feat(TDI-45704): fix tests
* feat(TDI-45704): cleanup imports
2021-03-03 10:06:21 +01:00
wang wei
dd863cfd15 fix(TDI-45561): Getting Permission denied error in tFileInputExcel in (#5784)
Co-authored-by: qyliu <qyliu@talend.com>
2021-03-03 10:52:01 +08:00
pyzhou
55d48cfe91 fix(TDI-45418):Upgrade Jackson libraries (#5884) 2021-03-03 10:48:44 +08:00
vyu-talend
7b325e8707 fix(TDI-45613):fix the issue in md5. (#5851) 2021-03-02 18:25:49 +08:00
jiezhang-tlnd
7465b41a34 TUP-27851 Upgrade xstream to xstream 1.4.12 (#5788) 2021-03-01 18:33:37 +08:00
jiezhang-tlnd
79fb201844 chore(TUP-27224)Update Daikon Crypto Utils to 1.15.0 (#5807)
* chore(TUP-27224)Update Daikon Crypto Utils to 1.15.0

* chore(TUP-27224)add migration
2021-03-01 15:50:40 +08:00
pyzhou
45edbf18a1 fix(TDI-45668) CVE ant tfileUnactive (#5868) 2021-03-01 09:30:38 +08:00
bhe-talendbj
f347a16522 chore(TUP-30230): Remove org.talend.libraries.apache.batik (#5766) 2021-02-26 14:39:26 +08:00
apoltavtsev
b9e4faf2bd fix(TESB-32252) Ignore "SNAPSHOT" during dependencies comparison 2021-02-25 07:12:29 +01:00
1153 changed files with 31596 additions and 25107 deletions

View File

@@ -584,13 +584,11 @@ EParameterName.jdbcURL=JDBC URL
EParameterName.driverJar=Driver jar
EParameterName.className=Class name
EParameterName.mappingFile=Mapping file
SetupProcessDependenciesRoutinesAction.title=Setup routine dependencies
SetupProcessDependenciesRoutinesAction.title=Setup Codes Dependencies
SetupProcessDependenciesRoutinesDialog.systemRoutineLabel=System routines
SetupProcessDependenciesRoutinesDialog.userRoutineLabel=User routines
PerformancePreferencePage.addAllSystemRoutines=Add all system routines to job dependencies, when creating a new job
PerformancePreferencePage.addAllUserRoutines=Add all user routines to job dependencies, when creating a new job
ShowRoutineItemsDialog.systemTitle=Select Sytem Routines
ShowRoutineItemsDialog.title=Select Routines
AbstractMultiPageTalendEditor_pleaseWait=Saving Please Wait....
DocumentationPreferencePage.use_css_template=Use CSS file as a template when export to HTML
DocumentationPreferencePage.css_file=CSS File

View File

@@ -1,5 +1,5 @@
NavigatorContent.contexts=Contexts
NavigatorContent.routines=Routines
NavigatorContent.routines=Global Routines
NavigatorContent.sqlTemplates=SQL Templates
NavigatorContent.documentation=Documentation
NavigatorContent.activation=di.fake.for.activation

View File

@@ -6,7 +6,7 @@
<license url="http://www.example.com/license">[Enter License Description here.]</license>
<requires>
<import feature="org.eclipse.test" version="0.0.0" match="greaterOrEqual"/>
<import plugin="org.junit" version="0.0.0" match="greaterOrEqual"/>
<import plugin="org.junit" version="4.13.2" match="greaterOrEqual"/>
<import plugin="org.talend.commons.runtime" version="0.0.0" match="greaterOrEqual"/>
<import plugin="org.talend.commons.ui" version="0.0.0" match="greaterOrEqual"/>
<import plugin="org.talend.core" version="0.0.0" match="greaterOrEqual"/>

View File

@@ -16,7 +16,6 @@
</requires>
<plugin id="org.talend.libraries.apache" download-size="0" install-size="0" version="0.0.0"/>
<plugin id="org.talend.libraries.apache.axis2" download-size="0" install-size="0" version="0.0.0"/>
<plugin id="org.talend.libraries.apache.batik" download-size="0" install-size="0" version="0.0.0"/>
<plugin id="org.talend.libraries.apache.chemistry" download-size="0" install-size="0" version="0.0.0"/>
<plugin id="org.talend.libraries.apache.common" download-size="0" install-size="0" version="0.0.0"/>
<plugin id="org.talend.libraries.apache.cxf" download-size="0" install-size="0" version="0.0.0"/>
@@ -51,5 +50,4 @@
<plugin id="org.talend.libraries.slf4j" download-size="0" install-size="0" version="0.0.0"/>
<plugin id="org.talend.libraries.xml" download-size="0" install-size="0" version="0.0.0"/>
<plugin id="org.talend.libraries.zmq" download-size="0" install-size="0" version="0.0.0"/>
<plugin id="org.talend.libraries.zookeeper" download-size="0" install-size="0" version="0.0.0"/>
</feature>

View File

@@ -412,7 +412,7 @@
if(ignoredParamsNames.contains(name)) {
//do nothing
} else if(org.talend.core.model.process.EParameterFieldType.PASSWORD.equals(ep.getFieldType())){
} else if(org.talend.core.model.process.EParameterFieldType.PASSWORD.equals(ep.getFieldType()) || org.talend.core.model.process.EParameterFieldType.HIDDEN_TEXT.equals(ep.getFieldType())){
//not log password
}else{
String value = org.talend.core.model.utils.NodeUtil.getRuntimeParameterValue(node, ep);

View File

@@ -126,8 +126,16 @@
boolean exist_tSCP = false;
List<INode> scpComponentsList = (List<INode>)process.getNodesOfType("tSCPConnection");
if (scpComponentsList.size() > 0) {
String parameterNames = "";
int scpsize = scpComponentsList.size();
if (scpsize > 0) {
exist_tSCP = true;
for (int i = 0; i < scpsize; i++) {
parameterNames += "\"conn_" + scpComponentsList.get(i).getUniqueName() + "\"";
if(i < scpsize-1){
parameterNames += ",";
}
}
}
boolean exist_tCassandra = false;
@@ -506,17 +514,35 @@
}
org.apache.logging.log4j.core.config.Configurator.setLevel(org.apache.logging.log4j.LogManager.getRootLogger().getName(), log.getLevel());
<%}%>
}
log.info("TalendJob: '<%=codeGenArgument.getJobName()%>' - Start.");
<%}%>
<%
INode jobCatcherNode = null;
}
log.info("TalendJob: '<%=codeGenArgument.getJobName()%>' - Start.");
<%}%>
<%
INode jobCatcherNode = null;
int threadPoolSize = 0;
boolean tRESTRequestLoopExists = false;
for (INode nodeInProcess : process.getGeneratingNodes()) {
String componentName = nodeInProcess.getComponent().getName();
if("tJobStructureCatcher".equals(componentName)) {
if(jobCatcherNode==null && "tJobStructureCatcher".equals(componentName)) {
jobCatcherNode = nodeInProcess;
break;
continue;
}
if(!nodeInProcess.isActivate()) continue;
if("tRESTRequestLoop".equals(componentName)) {
tRESTRequestLoopExists = true;
continue;
}
if("tWriteXMLFieldOut".equals(componentName)) {
IConnection nextMergeConn = NodeUtil.getNextMergeConnection(nodeInProcess);
if(nextMergeConn == null || nextMergeConn.getInputId()==1){
threadPoolSize++;
}
}
}
@@ -644,15 +670,15 @@
boolean inOSGi = routines.system.BundleUtils.inOSGi();
if (inOSGi) {
java.util.Dictionary<String, Object> jobProperties = routines.system.BundleUtils.getJobProperties(jobName);
if (jobProperties != null) {
contextStr = (String)jobProperties.get("context");
}
}
try {
java.util.Dictionary<String, Object> jobProperties = null;
if (inOSGi) {
jobProperties = routines.system.BundleUtils.getJobProperties(jobName);
if (jobProperties != null && jobProperties.get("context") != null) {
contextStr = (String)jobProperties.get("context");
}
}
//call job/subjob with an existing context, like: --context=production. if without this parameter, there will use the default context instead.
java.io.InputStream inContext = <%=className%>.class.getClassLoader().getResourceAsStream("<%=jobClassPackageFolder%>/contexts/" + contextStr + ".properties");
if (inContext == null) {
@@ -662,8 +688,17 @@
try {
//defaultProps is in order to keep the original context value
if(context != null && context.isEmpty()) {
defaultProps.load(inContext);
context = new ContextProperties(defaultProps);
defaultProps.load(inContext);
if (inOSGi && jobProperties != null) {
java.util.Enumeration<String> keys = jobProperties.keys();
while (keys.hasMoreElements()) {
String propKey = keys.nextElement();
if (defaultProps.containsKey(propKey)) {
defaultProps.put(propKey, (String) jobProperties.get(propKey));
}
}
}
context = new ContextProperties(defaultProps);
}
} finally {
inContext.close();
@@ -733,34 +768,39 @@
<%
} else if(typeToGenerate.equals("java.util.Date")) {
%>
try{
String context_<%=ctxParam.getName()%>_value = context.getProperty("<%=ctxParam.getName()%>");
if (context_<%=ctxParam.getName()%>_value == null){
context_<%=ctxParam.getName()%>_value = "";
}
int context_<%=ctxParam.getName()%>_pos = context_<%=ctxParam.getName()%>_value.indexOf(";");
String context_<%=ctxParam.getName()%>_pattern = "yyyy-MM-dd HH:mm:ss";
if(context_<%=ctxParam.getName()%>_pos > -1){
context_<%=ctxParam.getName()%>_pattern = context_<%=ctxParam.getName()%>_value.substring(0, context_<%=ctxParam.getName()%>_pos);
context_<%=ctxParam.getName()%>_value = context_<%=ctxParam.getName()%>_value.substring(context_<%=ctxParam.getName()%>_pos + 1);
}
try{
if (context_<%=ctxParam.getName()%>_value == null){
context_<%=ctxParam.getName()%>_value = "";
}
int context_<%=ctxParam.getName()%>_pos = context_<%=ctxParam.getName()%>_value.indexOf(";");
String context_<%=ctxParam.getName()%>_pattern = "yyyy-MM-dd HH:mm:ss";
if(context_<%=ctxParam.getName()%>_pos > -1){
context_<%=ctxParam.getName()%>_pattern = context_<%=ctxParam.getName()%>_value.substring(0, context_<%=ctxParam.getName()%>_pos);
context_<%=ctxParam.getName()%>_value = context_<%=ctxParam.getName()%>_value.substring(context_<%=ctxParam.getName()%>_pos + 1);
}
context.<%=ctxParam.getName()%>=(java.util.Date)(new java.text.SimpleDateFormat(context_<%=ctxParam.getName()%>_pattern).parse(context_<%=ctxParam.getName()%>_value));
context.<%=ctxParam.getName()%>=(java.util.Date)(new java.text.SimpleDateFormat(context_<%=ctxParam.getName()%>_pattern).parse(context_<%=ctxParam.getName()%>_value));
} catch(ParseException e) {
} catch(ParseException e) {
try { <% /*try to check if date passed as long also*/ %>
long context_<%=ctxParam.getName()%>_longValue = Long.valueOf(context_<%=ctxParam.getName()%>_value);
context.<%=ctxParam.getName()%> = new java.util.Date(context_<%=ctxParam.getName()%>_longValue);
} catch (NumberFormatException cantParseToLongException) {
<%
if (isLog4jEnabled) {
if (isLog4jEnabled) {
%>
log.warn(String.format("<%=warningMessageFormat %>", "<%=ctxParam.getName() %>", e.getMessage()));
log.warn(String.format("<%=warningMessageFormat %>", "<%=ctxParam.getName() %>", "Can't parse date string: " + e.getMessage() + " and long: " + cantParseToLongException.getMessage()));
<%
} else {
} else {
%>
System.err.println(String.format("<%=warningMessageFormat %>", "<%=ctxParam.getName() %>", e.getMessage()));
System.err.println(String.format("<%=warningMessageFormat %>", "<%=ctxParam.getName() %>", "Can't parse date string: " + e.getMessage() + " and long: " + cantParseToLongException.getMessage()));
<%
}
%>
context.<%=ctxParam.getName()%>=null;
}
}
%>
context.<%=ctxParam.getName()%>=null;
}
<%
} else if(typeToGenerate.equals("Object")||typeToGenerate.equals("String")||typeToGenerate.equals("java.lang.String")) {
%>
@@ -1177,6 +1217,26 @@ this.globalResumeTicket = true;//to run tPostJob
e.printStackTrace();
}
}
<%
}
//tRESTRequest may appear in microservice, the code may be called before call submit(task) method, so can't shutdown it here
if(!tRESTRequestLoopExists && threadPoolSize>0) {
%>
es.shutdown();
<%//shutdownNow should never be executed, only for safe%>
try {
if(!es.awaitTermination(60, java.util.concurrent.TimeUnit.SECONDS)) {
es.shutdownNow();
if(!es.awaitTermination(60, java.util.concurrent.TimeUnit.SECONDS)) {
}
}
} catch (java.lang.InterruptedException ie) {
es.shutdownNow();
} catch (java.lang.Exception e) {
}
<%
}
%>
@@ -1185,9 +1245,12 @@ this.globalResumeTicket = true;//to run tPostJob
closeJmsConnections();
<% } %>
<% if (exist_tSCP) { %>
closeScpConnections();
<% } %>
<% if (exist_tSCP) {
%>
closeCloseableConnections(<%=parameterNames%>);
<%
}
%>
<%
if (stats) {
@@ -1241,7 +1304,7 @@ if (execStat) {
closeJmsConnections();
<% } %>
<% if(exist_tSCP) { %>
closeScpConnections();
closeCloseableConnections(<%=parameterNames%>);
<% } %>
<% if (exist_tSQLDB) { %>
closeSqlDbConnections();
@@ -1309,22 +1372,17 @@ if (execStat) {
<%
if(exist_tSCP) {
%>
private void closeScpConnections() {
try {
Object obj_conn;
<%
for (INode scpNode : scpComponentsList) {
%>
obj_conn = globalMap.remove("conn_<%=scpNode.getUniqueName() %>");
if (null != obj_conn) {
((ch.ethz.ssh2.Connection) obj_conn).close();
private void closeCloseableConnections(String... names) {
java.util.Arrays.stream(names).forEach(name-> {
try {
Object obj_conn = globalMap.remove(name);
if(obj_conn != null){
((java.io.Closeable)obj_conn).close();
}
} catch (IOException ioException) {
}
<%
}
%>
} catch (java.lang.Exception e) {
}
}
});
}
<%
}
%>
@@ -1434,6 +1492,7 @@ if (execStat) {
if ("sftp".equals(type)) { %>
((com.jcraft.jsch.ChannelSftp) obj_conn).quit();
<%} else { %>
((org.apache.commons.net.ftp.FTPClient) obj_conn).logout();
((org.apache.commons.net.ftp.FTPClient) obj_conn).disconnect();
<%}%>
}

View File

@@ -56,11 +56,25 @@ if ((metadatas != null) && (metadatas.size() > 0)) { // metadata
// Set up the component definition, and the properties for all types of
// components.
List<? extends IConnection> allInLineJobConns = NodeUtil.getFirstIncomingLineConnectionsOfType(node, "tRESTRequestIn");
%>
boolean doesNodeBelongToRequest_<%=cid%> = <%= allInLineJobConns.size() %> == 0;
@SuppressWarnings("unchecked")
java.util.Map<String, Object> restRequest_<%=cid%> = (java.util.Map<String, Object>)globalMap.get("restRequest");
String currentTRestRequestOperation_<%=cid%> = (String)(restRequest_<%=cid%> != null ? restRequest_<%=cid%>.get("OPERATION") : null);
<%
for (IConnection inLineConn : allInLineJobConns) {
%>
if("<%= inLineConn.getName() %>".equals(currentTRestRequestOperation_<%=cid%>)) {
doesNodeBelongToRequest_<%=cid%> = true;
}
<%
}
%>
org.talend.components.api.component.ComponentDefinition def_<%=cid %> =
new <%= def.getClass().getName()%>();
org.talend.components.api.component.runtime.Writer writer_<%=cid%> = null;
org.talend.components.api.component.runtime.Reader reader_<%=cid%> = null;
@@ -149,7 +163,7 @@ globalMap.put("TALEND_COMPONENTS_VERSION", "<%=component.getVersion()%>");
boolean isParallelize ="true".equalsIgnoreCase(ElementParameterParser.getValue(node, "__PARALLELIZE__"));
if (isParallelize) {
%>
final String buffersSizeKey_<%=cid%> = "buffersSizeKey_<%=cid%>_" + Thread.currentThread().getId();
final String buffersSizeKey_<%=cid%> = "buffersSizeKey_<%=cid%>_" + Thread.currentThread().getId();
<%
}
%>
@@ -219,9 +233,11 @@ if(componentRuntime_<%=cid%> instanceof org.talend.components.api.component.runt
org.talend.components.api.component.runtime.SourceOrSink sourceOrSink_<%=cid%> = null;
if(componentRuntime_<%=cid%> instanceof org.talend.components.api.component.runtime.SourceOrSink) {
sourceOrSink_<%=cid%> = (org.talend.components.api.component.runtime.SourceOrSink)componentRuntime_<%=cid%>;
org.talend.daikon.properties.ValidationResult vr_<%=cid%> = sourceOrSink_<%=cid%>.validate(container_<%=cid%>);
if (vr_<%=cid%>.getStatus() == org.talend.daikon.properties.ValidationResult.Result.ERROR ) {
throw new RuntimeException(vr_<%=cid%>.getMessage());
if (doesNodeBelongToRequest_<%=cid%>) {
org.talend.daikon.properties.ValidationResult vr_<%=cid%> = sourceOrSink_<%=cid%>.validate(container_<%=cid%>);
if (vr_<%=cid%>.getStatus() == org.talend.daikon.properties.ValidationResult.Result.ERROR ) {
throw new RuntimeException(vr_<%=cid%>.getMessage());
}
}
}
@@ -297,13 +313,13 @@ if (hasOutputOnly || asInputComponent) {
for (; available_<%=cid%>; available_<%=cid%> = reader_<%=cid%>.advance()) {
nb_line_<%=cid %>++;
<%if(hasDataOutput) {%>
if (multi_output_is_allowed_<%=cid%>) {
<%if(main!=null){%>
<%=main.getName()%> = null;
<%}%>
<%if(reject!=null){%>
<%=reject.getName()%> = null;
<%}%>
@@ -315,11 +331,11 @@ if (hasOutputOnly || asInputComponent) {
<%
if (main != null) {
%>
if(multi_output_is_allowed_<%=cid%>) {
<%=main.getName()%> = new <%=main.getName() %>Struct();
}
<%
irToRow.generateConvertRecord("data_" + cid, main.getName(), main.getMetadataTable().getListColumns());
}
@@ -330,7 +346,7 @@ if (hasOutputOnly || asInputComponent) {
if (reject!=null) {
%>
Object data_<%=cid%> = info_<%=cid%>.get("talend_record");
if (multi_output_is_allowed_<%=cid%>) {
<%=reject.getName()%> = new <%=reject.getName() %>Struct();
}
@@ -343,19 +359,19 @@ if (hasOutputOnly || asInputComponent) {
}
<%
Set<String> commonColumns = new HashSet<String>();
for (IMetadataColumn column : columnList) {
commonColumns.add(column.getLabel());
}
//pass error columns
List<IMetadataColumn> rejectColumns = reject.getMetadataTable().getListColumns();
for(IMetadataColumn column : rejectColumns) {
String columnName = column.getLabel();
// JavaType javaType = JavaTypesManager.getJavaTypeFromId(column.getTalendType());
String typeToGenerate = JavaTypesManager.getTypeToGenerate(column.getTalendType(), column.isNullable());
//error columns
if(!commonColumns.contains(columnName)) {
%>
@@ -385,7 +401,7 @@ if (hasOutputOnly || asInputComponent) {
<%
}
%>
} // end of catch
} // end of catch
<%
// The for loop around the incoming records from the reader is left open.
@@ -397,9 +413,13 @@ if (hasOutputOnly || asInputComponent) {
org.talend.components.api.component.runtime.Sink sink_<%=cid%> =
(org.talend.components.api.component.runtime.Sink)sourceOrSink_<%=cid%>;
org.talend.components.api.component.runtime.WriteOperation writeOperation_<%=cid%> = sink_<%=cid%>.createWriteOperation();
writeOperation_<%=cid%>.initialize(container_<%=cid%>);
if (doesNodeBelongToRequest_<%=cid%>) {
writeOperation_<%=cid%>.initialize(container_<%=cid%>);
}
writer_<%=cid%> = writeOperation_<%=cid%>.createWriter(container_<%=cid%>);
writer_<%=cid%>.open("<%=cid%>");
if (doesNodeBelongToRequest_<%=cid%>) {
writer_<%=cid%>.open("<%=cid%>");
}
resourceMap.put("writer_<%=cid%>", writer_<%=cid%>);
} // end of "sourceOrSink_<%=cid%> instanceof ...Sink"
@@ -448,7 +468,7 @@ if (hasOutputOnly || asInputComponent) {
}
}
%>
java.lang.Iterable<?> outgoingMainRecordsList_<%=cid%> = new java.util.ArrayList<Object>();
java.util.Iterator outgoingMainRecordsIt_<%=cid%> = null;

View File

@@ -120,7 +120,8 @@ if(hasInput){
dm_<%=cid%>.getLogicalType(),
dm_<%=cid%>.getFormat(),
dm_<%=cid%>.getDescription(),
dm_<%=cid%>.isNullable());
dm_<%=cid%>.isNullable(),
dm_<%=cid%>.isKey());
}
incomingEnforcer_<%=cid%>.createRuntimeSchema();
}

View File

@@ -170,16 +170,8 @@ class IndexedRecordToRowStructGenerator {
if (columnName.equals(dynamicColName)) {
%>
java.util.Map<String, Object> dynamicValue_<%=cid%> = (java.util.Map<String, Object>) <%=codeVarSchemaEnforcer%>.get(<%=i%>);
org.apache.avro.Schema dynSchema_<%=cid%> = ((org.talend.codegen.enforcer.OutgoingDynamicSchemaEnforcer) <%=codeVarSchemaEnforcer%>).getDynamicFieldsSchema();
for (org.apache.avro.Schema.Field dynamicField_<%=cid%> : dynSchema_<%=cid%>.getFields()){
String name = dynamicField_<%=cid%>.name();
if("true".equals(dynamicField_<%=cid%>.getProp("ENABLE_SPECIAL_TABLENAME"))){
dynamicValue_<%=cid%>.put(dynamicField_<%=cid%>.getProp("talend.field.dbColumnName"), dynamicValue_<%=cid%>.get(name));
dynamicValue_<%=cid%>.remove(name);
}
}
for (java.util.Map.Entry<String, Object> dynamicValueEntry_<%=cid%> : dynamicValue_<%=cid%>.entrySet()) {
<%=codeVarDynamic%>.setColumnValue(<%=codeVarDynamic%>.getIndex(dynamicValueEntry_<%=cid%>.getKey()), dynamicValueEntry_<%=cid%>.getValue());
<%=codeVarDynamic%>.addColumnValue(dynamicValueEntry_<%=cid%>.getValue());
}
<%=codeVarRowStruct%>.<%=dynamicColName%> = <%=codeVarDynamic%>;
<%

View File

@@ -73,6 +73,9 @@ import pigudf.<%=routine%>;
import routines.<%=routine%>;
<% }
}%>
<%for (String codesJar : CodeGeneratorRoutine.getRequiredCodesJarName(process)) {%>
import <%=codesJar%>;
<%}%>
import routines.system.*;
import routines.system.api.*;
import java.text.ParseException;
@@ -382,11 +385,101 @@ public <%=JavaTypesManager.getTypeToGenerate(ctxParam.getType(),true)%> get<%=Ch
<%
INode jobCatcherNode = null;
//one matched component or part, one thread
//why not computed by cpu or resource : please image this case :
//loop==>(input==>(twritexmlfield A)==>(twritexmlfield B)==>(twritexmlfield C)==>output), dead lock as cycle dependency and only one thead in thread pool
//maybe newCachedThreadPool is a better idea, but that have risk for creating more threads, then more memory for TDI-47230
//why not generate thread pool object in subprocess scope :
// 1: major reason : difficult to control the var scope, somewhere can't access it, then compiler issue
// 2: we may need this thread pool for bigger scope, not only for twritexmlfield/twritejsonfield in future
// 3: we don't suppose this thread pool cost big resource after all tasks done, so we can shutdown it more later,
// for example, most time, user will use less than 3 twritexmlfield in one job, then 3 threads thread pool, we can close them in job finish code part,
// not a big cost to keep that. And of course, we best to start&clean it in subprocess finish, but that's risk of 1 above.
int threadPoolSize = 0;
boolean tHMapExists = false;
boolean tHMapOutExists = false;
boolean tRESTRequestLoopExists = false;
for (INode nodeInProcess : processNodes) {
String componentName = nodeInProcess.getComponent().getName();
if("tJobStructureCatcher".equals(componentName)) {
if(jobCatcherNode==null && "tJobStructureCatcher".equals(componentName)) {
jobCatcherNode = nodeInProcess;
break;
continue;
}
if(!nodeInProcess.isActivate()) continue;
if("tHMap".equals(componentName)) {
tHMapExists = true;
continue;
}
if("tHMapOut".equals(componentName)) {
tHMapOutExists = true;
continue;
}
if("tRESTRequestLoop".equals(componentName)) {
tRESTRequestLoopExists = true;
continue;
}
if("tWriteXMLFieldOut".equals(componentName)) {
IConnection nextMergeConn = NodeUtil.getNextMergeConnection(nodeInProcess);
if(nextMergeConn == null || nextMergeConn.getInputId()==1){
threadPoolSize++;
}
}
}
if(threadPoolSize>0) {
if(tRESTRequestLoopExists) {//miscroservice
%>
private class DaemonThreadFactory implements java.util.concurrent.ThreadFactory {
java.util.concurrent.ThreadFactory factory = java.util.concurrent.Executors.defaultThreadFactory();
public java.lang.Thread newThread(java.lang.Runnable r) {
java.lang.Thread t = factory.newThread(r);
t.setDaemon(true);
return t;
}
}
<%
}
if(tHMapExists || tHMapOutExists) {
%>
private final java.util.concurrent.ExecutorService es = java.util.concurrent.Executors.newFixedThreadPool(<%=threadPoolSize%> <%if(tRESTRequestLoopExists) {%>,new DaemonThreadFactory()<%}%>);
<%
} else {
%>
private final java.util.concurrent.ExecutorService es = java.util.concurrent.Executors.newCachedThreadPool(<%if(tRESTRequestLoopExists) {%>new DaemonThreadFactory()<%}%>);
<%
}
if(tRESTRequestLoopExists) {//miscroservice
%>
{
java.lang.Runtime.getRuntime().addShutdownHook(new java.lang.Thread() {
public void run() {
es.shutdown();
try {
if(!es.awaitTermination(60, java.util.concurrent.TimeUnit.SECONDS)) {
es.shutdownNow();
if(!es.awaitTermination(60, java.util.concurrent.TimeUnit.SECONDS)) {
}
}
} catch (java.lang.InterruptedException ie) {
es.shutdownNow();
} catch (java.lang.Exception e) {
}
}
});
}
<%
}
}

View File

@@ -20,6 +20,7 @@
org.talend.core.model.process.IHashConfiguration
org.talend.core.model.process.IHashableColumn
org.talend.core.model.utils.NodeUtil
org.talend.core.model.utils.TalendTextUtils
"
class="SubProcessHeader"
skeleton="subprocess_header_java.skeleton"
@@ -183,6 +184,48 @@ public static class <%=conn.getName() %>Struct<%=templateOrigin %> implements ro
public <%= typeToGenerate %> get<%=column.getLabel().substring(0, 1).toUpperCase()%><%=column.getLabel().substring(1)%> () {
return this.<%=column.getLabel()%>;
}
public Boolean <%=column.getLabel()%>IsNullable(){
return <%=column.isNullable()%>;
}
public Boolean <%=column.getLabel()%>IsKey(){
return <%=column.isKey()%>;
}
public Integer <%=column.getLabel()%>Length(){
return <%=column.getLength()%>;
}
public Integer <%=column.getLabel()%>Precision(){
return <%=column.getPrecision()%>;
}
public String <%=column.getLabel()%>Default(){
<% if (column.getDefault() == null) { %>
return null;
<% } else { %>
return "<%=TalendTextUtils.escapeJavaText(TalendTextUtils.removeQuotes(column.getDefault()))%>";
<% } %>
}
public String <%=column.getLabel()%>Comment(){
<% if (column.getComment() == null) { %>
return null;
<% } else { %>
return "<%=TalendTextUtils.escapeJavaText(TalendTextUtils.removeQuotes(column.getComment()))%>";
<% } %>
}
public String <%=column.getLabel()%>Pattern(){
<% if (column.getPattern() == null) { %>
return null;
<% } else { %>
return "<%=TalendTextUtils.escapeJavaText(TalendTextUtils.removeQuotes(column.getPattern()))%>";
<% } %>
}
public String <%=column.getLabel()%>OriginalDbColumnName(){
<% if (column.getOriginalDbColumnName() == null) { %>
return null;
<% } else { %>
return "<%=TalendTextUtils.escapeJavaText(TalendTextUtils.removeQuotes(column.getOriginalDbColumnName()))%>";
<% } %>
}
<%
if((conn.getLineStyle() == EConnectionType.FLOW_REF) && conn.getTarget().getUniqueName().startsWith("tXMLMap") && "id_Document".equals(javaType.getId())) {
%>

View File

@@ -10,6 +10,7 @@ CodeGenerator.getGraphicalNode2=------process.getGeneratingNodes()------
CodeGenerator.JET.TimeOut=JET initialisation Time Out
CodeGenerator.newLine=\n\n\n\n
CodeGenerator.Node.NotFound=Node not found in current process
CodeGenerator.Components.NotFound={0}: Component is missing: {1}; use -D{2}=false in your studio or commandline to skip this check, and a warning message will be logged.
JavaRoutineSynchronizer.UnsupportedOperation.Exception1=method not implemented: org.talend.designer.codegen.JavaRoutineSynchronizer line:49
JavaRoutineSynchronizer.UnsupportedOperation.Exception2=method not implemented: org.talend.designer.codegen.JavaRoutineSynchronizer line:58
JetSkeletonManager.unableLoad=unable to load skeleton update cache file

View File

@@ -66,6 +66,7 @@ import org.talend.designer.codegen.model.CodeGeneratorEmittersPoolFactory;
import org.talend.designer.codegen.model.CodeGeneratorInternalTemplatesFactoryProvider;
import org.talend.designer.codegen.proxy.JetProxy;
import org.talend.designer.core.generic.model.Component;
import org.talend.designer.core.model.components.DummyComponent;
import org.talend.designer.runprocess.ProcessorUtilities;
/**
@@ -845,6 +846,25 @@ public class CodeGenerator implements ICodeGenerator {
IComponentFileNaming componentFileNaming = ComponentsFactoryProvider.getFileNamingInstance();
IComponent component = node.getComponent();
if (component instanceof DummyComponent) {
if (((DummyComponent) component).isMissingComponent()) {
String processName = "";
try {
IProcess proc = node.getProcess();
processName = proc.getName() + " " + proc.getVersion();
} catch (Exception e) {
ExceptionHandler.process(e);
}
if (IProcess.ERR_ON_COMPONENT_MISSING) {
throw new CodeGeneratorException(Messages.getString("CodeGenerator.Components.NotFound", processName,
component.getName(), IProcess.PROP_ERR_ON_COMPONENT_MISSING));
}
if (ECodePart.BEGIN.equals(part)) {
log.warn(Messages.getString("CodeGenerator.Components.NotFound", processName, component.getName(),
IProcess.PROP_ERR_ON_COMPONENT_MISSING));
}
}
}
// some code unification to handle all component types the same way.
String templateURI = component.getTemplateFolder() + TemplateUtil.DIR_SEP
+ componentFileNaming.getJetFileName(component.getTemplateNamePrefix(), language.getExtension(), part);

View File

@@ -69,6 +69,15 @@ public class JavaRoutineSynchronizer extends AbstractRoutineSynchronizer {
syncRoutineItems(getRoutines(true), true);
}
@Override
public void syncAllInnerCodes() throws SystemException {
syncInnerCodeItems(false);
}
@Override
public void syncAllInnerCodesForLogOn() throws SystemException {
syncInnerCodeItems(true);
}
private void syncRoutineItems(Collection<RoutineItem> routineObjects, boolean forceUpdate) throws SystemException {
for (RoutineItem routineItem : routineObjects) {

View File

@@ -8,6 +8,7 @@ Require-Bundle: org.eclipse.core.runtime,
org.eclipse.ui,
org.apache.log4j,
org.apache.commons.collections,
org.apache.commons.discovery,
org.apache.commons.logging,
org.apache.commons.beanutils,
org.apache.commons.io,
@@ -25,7 +26,6 @@ Require-Bundle: org.eclipse.core.runtime,
org.talend.repository,
org.talend.core.repository,
org.talend.updates.runtime,
org.apache.axis,
org.eclipse.ui.intro,
org.eclipse.ui.forms,
org.eclipse.jface.text

View File

@@ -1,66 +0,0 @@
// ============================================================================
//
// Copyright (C) 2006-2019 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.designer.components.exchange.proxy;
import org.apache.commons.lang.StringUtils;
/**
*
* DOC hcyi class global comment. Detailled comment
*/
public class DefaultHTTPSTransportClientProperties extends DefaultHTTPTransportClientProperties {
/**
* @see org.apache.axis.components.net.TransportClientProperties#getProxyHost()
*/
@Override
public String getProxyHost() {
return StringUtils.trimToEmpty(System.getProperty("https.proxyHost")); //$NON-NLS-1$
}
/**
* @see org.apache.axis.components.net.TransportClientProperties#getNonProxyHosts()
*/
@Override
public String getNonProxyHosts() {
return StringUtils.trimToEmpty(System.getProperty("https.nonProxyHosts")); //$NON-NLS-1$
}
/**
* @see org.apache.axis.components.net.TransportClientProperties#getPort()
*/
@Override
public String getProxyPort() {
return StringUtils.trimToEmpty(System.getProperty("https.proxyPort")); //$NON-NLS-1$
}
/**
* @see org.apache.axis.components.net.TransportClientProperties#getUser()
*/
@Override
public String getProxyUser() {
return StringUtils.trimToEmpty(System.getProperty("https.proxyUser")); //$NON-NLS-1$
}
/**
* @see org.apache.axis.components.net.TransportClientProperties#getPassword()
*/
@Override
public String getProxyPassword() {
return StringUtils.trimToEmpty(System.getProperty("https.proxyPassword")); //$NON-NLS-1$
}
}

View File

@@ -1,58 +0,0 @@
// ============================================================================
//
// Copyright (C) 2006-2019 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.designer.components.exchange.proxy;
import org.apache.axis.components.net.TransportClientProperties;
import org.apache.commons.lang.StringUtils;
/**
*
* DOC hcyi class global comment. Detailled comment
*/
public class DefaultHTTPTransportClientProperties implements TransportClientProperties {
/**
* @see org.apache.axis.components.net.TransportClientProperties#getProxyHost()
*/
public String getProxyHost() {
return StringUtils.trimToEmpty(System.getProperty("http.proxyHost")); //$NON-NLS-1$
}
/**
* @see org.apache.axis.components.net.TransportClientProperties#getNonProxyHosts()
*/
public String getNonProxyHosts() {
return StringUtils.trimToEmpty(System.getProperty("http.nonProxyHosts")); //$NON-NLS-1$
}
/**
* @see org.apache.axis.components.net.TransportClientProperties#getPort()
*/
public String getProxyPort() {
return StringUtils.trimToEmpty(System.getProperty("http.proxyPort")); //$NON-NLS-1$
}
/**
* @see org.apache.axis.components.net.TransportClientProperties#getProxyUser()
*/
public String getProxyUser() {
return StringUtils.trimToEmpty(System.getProperty("http.proxyUser")); //$NON-NLS-1$
}
/**
* @see org.apache.axis.components.net.TransportClientProperties#getProxyPassword()
*/
public String getProxyPassword() {
return StringUtils.trimToEmpty(System.getProperty("http.proxyPassword")); //$NON-NLS-1$
}
}

View File

@@ -25,11 +25,9 @@ import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.axis.components.net.TransportClientProperties;
import org.apache.axis.components.net.TransportClientPropertiesFactory;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.collections.map.MultiValueMap;
import org.apache.commons.discovery.tools.ManagedProperties;
import org.apache.commons.httpclient.HostConfiguration;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.NameValuePair;
@@ -151,14 +149,17 @@ public class ExchangeUtils {
public static String sendGetRequest(String urlAddress) throws Exception {
HttpClient httpclient = new HttpClient();
GetMethod getMethod = new GetMethod(urlAddress);
TransportClientProperties tcp = TransportClientPropertiesFactory.create("http");
if (tcp.getProxyHost().length() != 0) {
String proxyUser = ManagedProperties.getProperty("http.proxyUser");
String proxyPassword = ManagedProperties.getProperty("http.proxyPassword");
String proxyHost = ManagedProperties.getProperty("http.proxyHost");
proxyHost = proxyHost != null ? proxyHost : "";
String proxyPort = ManagedProperties.getProperty("http.proxyPort");
if (proxyHost.length() != 0) {
UsernamePasswordCredentials creds = new UsernamePasswordCredentials(
tcp.getProxyUser() != null ? tcp.getProxyUser() : "",
tcp.getProxyPassword() != null ? tcp.getProxyUser() : "");
proxyUser != null ? proxyUser : "", proxyPassword != null ? proxyPassword : "");
httpclient.getState().setProxyCredentials(AuthScope.ANY, creds);
HostConfiguration hcf = new HostConfiguration();
hcf.setProxy(tcp.getProxyHost(), Integer.parseInt(tcp.getProxyPort()));
hcf.setProxy(proxyHost, Integer.parseInt(proxyPort));
httpclient.executeMethod(hcf, getMethod);
} else {
httpclient.executeMethod(getMethod);

View File

@@ -42,7 +42,18 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.25</version>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
<version>1.2.19</version>
</dependency>
<!-- Spring 3 dependencies -->
<dependency>
<groupId>org.springframework</groupId>

View File

@@ -1,69 +0,0 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.libraries</groupId>
<artifactId>checkArchive-1.1-20190917</artifactId>
<version>6.0.0</version>
<name>checkArchive</name>
<description>Dependence for tFileArchive and tFileUnAchive</description>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
<java.source.version>1.7</java.source.version>
</properties>
<distributionManagement>
<snapshotRepository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceSnapshot/</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
<releases>
<enabled>false</enabled>
</releases>
</snapshotRepository>
<repository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceRelease/</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
<releases>
<enabled>true</enabled>
</releases>
</repository>
</distributionManagement>
<dependencies>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-compress -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>1.19</version>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/java</directory>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>${java.source.version}</source>
<target>${java.source.version}</target>
<showDeprecation>true</showDeprecation>
<showWarnings>true</showWarnings>
<compilerArgument>-XDignore.symbol.file</compilerArgument>
<fork>true</fork>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components</groupId>
<artifactId>filecopy</artifactId>
<version>2.0.1</version>
<version>2.0.3</version>
<packaging>jar</packaging>
<name>talend-copy</name>
@@ -14,6 +14,7 @@
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
<java.source.version>1.8</java.source.version>
<junit5.version>5.4.2</junit5.version>
<slf4j.version>1.7.28</slf4j.version>
</properties>
<distributionManagement>
@@ -52,7 +53,12 @@
<version>${junit5.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>

View File

@@ -20,11 +20,16 @@ import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.file.attribute.FileTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* DOC Administrator class global comment. Detailled comment
*/
public class FileCopy {
static Logger logger = LoggerFactory.getLogger(Object.class);
/** Private constructor, only static methods */
private FileCopy() {
}
@@ -37,19 +42,57 @@ public class FileCopy {
* @param delSrc : true if delete source.
* @throws IOException : if IO pb.
*/
public static void copyFile(String srcFileName, String desFileName, boolean delSrc) throws IOException {
public static void copyFile(String srcFileName, String desFileName, boolean delSrc, boolean keepModified)
throws IOException {
final Path source = Paths.get(srcFileName);
final Path destination = Paths.get(desFileName);
FileTime lastModifiedTime = null;
try {
lastModifiedTime = Files.getLastModifiedTime(source);
} catch (IOException e) {
logger.warn(e.getLocalizedMessage());
}
if (delSrc) {
// move : more efficient if in same FS and mustr delete existing file.
FileTime lastModifiedTime = Files.getLastModifiedTime(source);
Files.move(source, destination, StandardCopyOption.REPLACE_EXISTING);
Files.setLastModifiedTime(destination,lastModifiedTime);
} else {
Files.copy(source, destination, StandardCopyOption.REPLACE_EXISTING);
Files.setLastModifiedTime(destination,Files.getLastModifiedTime(source));
}
if(keepModified){
try {
Files.setLastModifiedTime(destination,lastModifiedTime);
} catch (IOException e) {
logger.warn(e.getLocalizedMessage());
}
}
}
public static void copyFile(String srcFileName, String desFileName, boolean delSrc ) throws IOException {
copyFile(srcFileName,desFileName,delSrc,true);
}
/**
* Force Copy and Delete files.
*
* @param srcFileName : file name for source file.
* @param desFileName : file name for destination file.
* @throws IOException : if IO pb.
*/
public static void forceCopyAndDelete(String srcFileName, String desFileName, boolean keepModified) throws IOException {
final Path source = Paths.get(srcFileName);
final Path destination = Paths.get(desFileName);
final long lastModifiedTime = new File(srcFileName).lastModified();
Files.copy(source, destination, StandardCopyOption.REPLACE_EXISTING);
Files.delete(source);
if(keepModified){
destination.toFile().setLastModified(lastModifiedTime);
}
}
public static void forceCopyAndDelete(String srcFileName, String desFileName) throws IOException {
forceCopyAndDelete(srcFileName,desFileName,true);
}
}

View File

@@ -100,6 +100,26 @@ class FileCopyTest {
Assertions.assertEquals(referenceSize, copy.length(), "Size error");
}
@Test
void testForceCopyWithDelete() throws Exception {
final URL repCopy = Thread.currentThread().getContextClassLoader().getResource("copy");
File file = this.buildFile("fileToDelete.txt", 10L * 1024L);
file.deleteOnExit();
File copy = new File(repCopy.getPath(), "fileToDelete.txt");
long referenceSize = file.length();
if (!copy.exists()) {
copy.createNewFile();
}
copy.deleteOnExit();
FileCopy.forceCopyAndDelete(file.getPath(), copy.getPath());
Assertions.assertFalse(file.exists(), "file not delete");
Assertions.assertTrue(copy.exists(), "small file : original file deleted");
Assertions.assertEquals(referenceSize, copy.length(), "Size error");
}
@Test
void testLastModifiedTime() throws Exception {
final URL repCopy = Thread.currentThread().getContextClassLoader().getResource("copy");
@@ -143,4 +163,22 @@ class FileCopyTest {
return generatedFile;
}
@Test
void testKeepLastModifiedTime() throws Exception {
final URL repCopy = Thread.currentThread().getContextClassLoader().getResource("copy");
File file = this.buildFile("fileLMT.txt", 10L * 1024L);
file.deleteOnExit();
long referencceTime = 324723894L;
file.setLastModified(referencceTime);
File copy = new File(repCopy.getPath(), "fileLMTDestination.txt");
if (copy.exists()) {
copy.delete();
}
copy.deleteOnExit();
FileCopy.copyFile(file.getPath(), copy.getPath(), true,true);
Assertions.assertEquals(referencceTime, copy.lastModified(), "modified time is not idential");
}
}

View File

@@ -230,9 +230,9 @@
<scope>compile</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.14</version>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
<version>1.2.19</version>
<scope>runtime</scope>
<optional>true</optional>
</dependency>

View File

@@ -11,7 +11,7 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<cxf.version>3.1.2</cxf.version>
<cxf.version>3.4.7</cxf.version>
</properties>
<build>
@@ -77,8 +77,8 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.5.1</version>
<configuration>
<source>1.7</source>
<target>1.7</target>
<source>1.8</source>
<target>1.8</target>
<encoding>UTF-8</encoding>
</configuration>
</plugin>

View File

@@ -22,6 +22,7 @@ import javax.xml.bind.annotation.XmlType;
* &lt;element name="description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/&gt;
* &lt;element name="name" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/&gt;
* &lt;element name="processSteps" type="{http://www.talend.com/mdm}WSTransformerProcessStep" maxOccurs="unbounded" minOccurs="0"/&gt;
* &lt;element name="withAdminPermissions" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/&gt;
* &lt;/sequence&gt;
* &lt;/restriction&gt;
* &lt;/complexContent&gt;
@@ -34,7 +35,8 @@ import javax.xml.bind.annotation.XmlType;
@XmlType(name = "WSTransformerV2", propOrder = {
"description",
"name",
"processSteps"
"processSteps",
"withAdminPermissions"
})
public class WSTransformerV2 {
@@ -42,6 +44,7 @@ public class WSTransformerV2 {
protected String name;
@XmlElement(nillable = true)
protected List<WSTransformerProcessStep> processSteps;
protected Boolean withAdminPermissions;
/**
* Default no-arg constructor
@@ -55,10 +58,11 @@ public class WSTransformerV2 {
* Fully-initialising value constructor
*
*/
public WSTransformerV2(final String description, final String name, final List<WSTransformerProcessStep> processSteps) {
public WSTransformerV2(final String description, final String name, final List<WSTransformerProcessStep> processSteps, final Boolean withAdminPermissions) {
this.description = description;
this.name = name;
this.processSteps = processSteps;
this.withAdminPermissions = withAdminPermissions;
}
/**
@@ -138,4 +142,28 @@ public class WSTransformerV2 {
return this.processSteps;
}
/**
* Gets the value of the withAdminPermissions property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isWithAdminPermissions() {
return withAdminPermissions;
}
/**
* Sets the value of the withAdminPermissions property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setWithAdminPermissions(Boolean value) {
this.withAdminPermissions = value;
}
}

View File

@@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend</groupId>
<artifactId>talendMQRFH2</artifactId>
<version>1.0.1-20190206</version>
<version>1.1.0-20220307</version>
<packaging>jar</packaging>
<properties>
@@ -36,20 +36,10 @@
</distributionManagement>
<dependencies>
<dependency>
<groupId>com.ibm.mq</groupId>
<artifactId>com.ibm.mq</artifactId>
<version>8.0.0.9</version>
</dependency>
<dependency>
<groupId>com.ibm.mq</groupId>
<artifactId>com.ibm.mqjms</artifactId>
<version>8.0.0.9</version>
</dependency>
<dependency>
<groupId>com.ibm.mq</groupId>
<artifactId>com.ibm.mq.allclient</artifactId>
<version>8.0.0.9</version>
<version>9.2.4.0</version>
</dependency>
<dependency>
<groupId>org.talend.libraries</groupId>
@@ -67,9 +57,9 @@
<version>6.0.0</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
<version>1.2.19</version>
</dependency>
</dependencies>

View File

@@ -113,9 +113,9 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
<version>1.2.19</version>
<scope>test</scope>
</dependency>
<!-- JUnit -->

View File

@@ -63,9 +63,9 @@
<version>4.1.2</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
<version>1.2.19</version>
</dependency>
</dependencies>
<build>

View File

@@ -7,21 +7,21 @@
<groupId>org.talend.libraries</groupId>
<artifactId>talend-codegen-utils</artifactId>
<!-- release for revert version of library -->
<version>0.28.0</version>
<version>0.31.0</version>
<packaging>jar</packaging>
<properties>
<avro.version>1.8.0</avro.version>
<components.version>0.25.0-SNAPSHOT</components.version>
<daikon.version>0.26.0-SNAPSHOT</daikon.version>
<components.version>0.30.0</components.version>
<daikon.version>0.31.11</daikon.version>
<hamcrest.version>1.3</hamcrest.version>
<junit.version>4.12</junit.version>
<java-formatter.plugin.version>0.1.0</java-formatter.plugin.version>
<formatter.plugin.version>1.6.0-SNAPSHOT</formatter.plugin.version>
<mockito.version>2.2.15</mockito.version>
<jacoco.plugin.version>0.7.8</jacoco.plugin.version>
<maven.compiler.source>1.7</maven.compiler.source>
<maven.compiler.target>1.7</maven.compiler.target>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
</properties>

View File

@@ -24,8 +24,10 @@ import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import org.apache.avro.Schema;
@@ -33,9 +35,11 @@ import org.apache.avro.Schema.Field;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.IndexedRecord;
import org.apache.avro.SchemaParseException;
import org.talend.codegen.DiSchemaConstants;
import org.talend.daikon.avro.AvroUtils;
import org.talend.daikon.avro.LogicalTypeUtils;
import org.talend.daikon.avro.NameUtil;
import org.talend.daikon.avro.SchemaConstants;
/**
@@ -133,6 +137,7 @@ public class IncomingSchemaEnforcer {
}
}
//TODO remove this method as no place use it now in javajet
/**
* Take all of the parameters from the dynamic metadata and adapt it to a field for the runtime Schema.
*
@@ -144,6 +149,62 @@ public class IncomingSchemaEnforcer {
addDynamicField(name, type, null, format, description, isNullable);
}
private Set<String> existNames;
private Map<String, String> unvalidName2ValidName;
private int index = 0;
/**
* Recreates dynamic field from parameters retrieved from DI dynamic metadata
*
* @param name dynamic field name
* @param diType di column type
* @param logicalType dynamic field logical type; could be null
* @param fieldPattern dynamic field date format
* @param description dynamic field description
* @param isNullable defines whether dynamic field may contain <code>null</code> value
* @param isKey defines whether dynamic field is key field
*/
public void addDynamicField(String name, String diType, String logicalType, String fieldPattern, String description,
boolean isNullable, boolean isKey) {
if (!needsInitDynamicColumns())
return;
Schema fieldSchema = diToAvro(diType, logicalType);
if (isNullable) {
fieldSchema = SchemaBuilder.nullable().type(fieldSchema);
}
Schema.Field field;
try {
field = new Schema.Field(name, fieldSchema, description, (Object) null);
} catch (SchemaParseException e) {
//if the name contains special char which can't pass avro name check like $ and #,
//but uniode like Japanese which can pass too though that is not expected
if (existNames == null) {
existNames = new HashSet<>();
unvalidName2ValidName = new HashMap<>();
}
String validName = NameUtil.correct(name, index++, existNames);
existNames.add(validName);
unvalidName2ValidName.put(name, validName);
field = new Schema.Field(validName, fieldSchema, description, (Object) null);
field.addProp(SchemaConstants.TALEND_COLUMN_DB_COLUMN_NAME, name);
}
// Set pattern for date type
if ("id_Date".equals(diType) && fieldPattern != null) {
field.addProp(SchemaConstants.TALEND_COLUMN_PATTERN, fieldPattern);
}
if (isKey) {
field.addProp(SchemaConstants.TALEND_COLUMN_IS_KEY, "true");
}
dynamicFields.add(field);
}
/**
* Recreates dynamic field from parameters retrieved from DI dynamic metadata
*
@@ -154,21 +215,10 @@ public class IncomingSchemaEnforcer {
* @param description dynamic field description
* @param isNullable defines whether dynamic field may contain <code>null</code> value
*/
@Deprecated
public void addDynamicField(String name, String diType, String logicalType, String fieldPattern, String description,
boolean isNullable) {
if (!needsInitDynamicColumns())
return;
Schema fieldSchema = diToAvro(diType, logicalType);
if (isNullable) {
fieldSchema = SchemaBuilder.nullable().type(fieldSchema);
}
Schema.Field field = new Schema.Field(name, fieldSchema, description, (Object) null);
// Set pattern for date type
if ("id_Date".equals(diType) && fieldPattern != null) {
field.addProp(SchemaConstants.TALEND_COLUMN_PATTERN, fieldPattern);
}
dynamicFields.add(field);
addDynamicField(name, diType, logicalType, fieldPattern, description, isNullable, false);
}
public void addIncomingNodeField(String name, String className) {
@@ -250,6 +300,8 @@ public class IncomingSchemaEnforcer {
fieldSchema = AvroUtils._decimal();
} else if ("id_Date".equals(diType)) {
fieldSchema = AvroUtils._date();
} else if ("id_byte[]".equals(diType)) {
fieldSchema = AvroUtils._bytes();
} else {
throw new UnsupportedOperationException("Unrecognized type " + diType);
}
@@ -369,6 +421,9 @@ public class IncomingSchemaEnforcer {
return designSchema;
}
//here we do special process for dynamic input name, but in fact,
//we have issue which support Japanese char or special char as label for basic talend column too,
//so not only dynamic columns may have special name, but also basic may have, but here, we don't consider that, that's TODO
/**
* Converts DI data value to Avro format and put it into record by field name
*
@@ -376,9 +431,16 @@ public class IncomingSchemaEnforcer {
* @param diValue data value
*/
public void put(String name, Object diValue) {
if (unvalidName2ValidName != null) {
String validName = unvalidName2ValidName.get(name);
if (validName != null) {
name = validName;
}
}
put(columnToFieldIndex.get(name), diValue);
}
//TODO make it private, no place to call it except current class?
/**
* Converts DI data value to Avro format and put it into record by field index
*

View File

@@ -52,6 +52,8 @@ public class IncomingSchemaEnforcerTest {
*/
private IndexedRecord componentRecord;
private IndexedRecord componentRecordWithSpecialName;
@Rule
public ExpectedException thrown = ExpectedException.none();
@@ -72,9 +74,29 @@ public class IncomingSchemaEnforcerTest {
componentRecord.put(3, true);
componentRecord.put(4, "Main Street");
componentRecord.put(5, "This is a record with six columns.");
Schema componentSchemaWithSpecialName = SchemaBuilder.builder().record("Record").fields() //
.name("id").type().intType().noDefault() //
.name("name").type().stringType().noDefault() //
.name("age").type().intType().noDefault() //
.name("性别").type().booleanType().noDefault() //why this don't store the origin name, as it can pass the avro name check, it's a avro bug
.name("address_").prop(SchemaConstants.TALEND_COLUMN_DB_COLUMN_NAME, "address#").type().stringType().noDefault() //
.name("comment_").prop(SchemaConstants.TALEND_COLUMN_DB_COLUMN_NAME, "comment$").type().stringType().noDefault() //
.endRecord();
componentRecordWithSpecialName = new GenericData.Record(componentSchemaWithSpecialName);
componentRecordWithSpecialName.put(0, 1);
componentRecordWithSpecialName.put(1, "User");
componentRecordWithSpecialName.put(2, 100);
componentRecordWithSpecialName.put(3, true);
componentRecordWithSpecialName.put(4, "Main Street");
componentRecordWithSpecialName.put(5, "This is a record with six columns.");
}
private void checkEnforcerWithComponentRecordData(IncomingSchemaEnforcer enforcer) {
checkEnforcerWithComponentRecordData(enforcer, false);
}
private void checkEnforcerWithComponentRecordData(IncomingSchemaEnforcer enforcer, boolean specialName) {
// The enforcer must be ready to receive values.
assertThat(enforcer.needsInitDynamicColumns(), is(false));
@@ -88,15 +110,25 @@ public class IncomingSchemaEnforcerTest {
IndexedRecord adapted = enforcer.createIndexedRecord();
// Ensure that the result is the same as the expected component record.
assertThat(adapted, is(componentRecord));
if (specialName) {
assertThat(adapted, is(componentRecordWithSpecialName));
} else {
assertThat(adapted, is(componentRecord));
}
// Ensure that we create a new instance when we give it another value.
enforcer.put("id", 2);
enforcer.put("name", "User2");
enforcer.put("age", 200);
enforcer.put("valid", false);
enforcer.put("address", "2 Main Street");
enforcer.put("comment", "2 This is a record with six columns.");
if (specialName) {
enforcer.put("性别", false);
enforcer.put("address#", "2 Main Street");
enforcer.put("comment$", "2 This is a record with six columns.");
} else {
enforcer.put("valid", false);
enforcer.put("address", "2 Main Street");
enforcer.put("comment", "2 This is a record with six columns.");
}
IndexedRecord adapted2 = enforcer.createIndexedRecord();
// It should have the same schema, but not be the same instance.
@@ -392,6 +424,39 @@ public class IncomingSchemaEnforcerTest {
checkEnforcerWithComponentRecordData(enforcer);
}
@Test
public void testDynamicColumnWithSpecialName() {
Schema designSchema = SchemaBuilder.builder().record("Record") //
.prop(DiSchemaConstants.TALEND6_DYNAMIC_COLUMN_POSITION, "3") //
.prop(SchemaConstants.INCLUDE_ALL_FIELDS, "true") //
.fields() //
.name("id").type().intType().noDefault() //
.name("name").type().stringType().noDefault() //
.name("age").type().intType().noDefault() //
.endRecord();
IncomingSchemaEnforcer enforcer = new IncomingSchemaEnforcer(designSchema);
// The enforcer isn't usable yet.
assertThat(enforcer.getDesignSchema(), is(designSchema));
assertFalse(enforcer.areDynamicFieldsInitialized());
assertThat(enforcer.getRuntimeSchema(), nullValue());
enforcer.addDynamicField("性别", "id_Boolean", null, null, null, false, false);
enforcer.addDynamicField("address#", "id_String", null, null, null, false, false);
enforcer.addDynamicField("comment$", "id_String", null, null, null, false, false);
assertFalse(enforcer.areDynamicFieldsInitialized());
enforcer.createRuntimeSchema();
assertTrue(enforcer.areDynamicFieldsInitialized());
// Check the run-time schema was created.
assertThat(enforcer.getDesignSchema(), is(designSchema));
assertThat(enforcer.getRuntimeSchema(), not(nullValue()));
// Put values into the enforcer and get them as an IndexedRecord.
checkEnforcerWithComponentRecordData(enforcer, true);
}
@Test
public void testTypeConversion_toDate() {
// The expected schema after enforcement.
@@ -699,6 +764,28 @@ public class IncomingSchemaEnforcerTest {
assertThat(record.get(1), is((Object) new Date(1234567891011L)));
}
/**
* Checks key field setting
*/
@Test
public void testAddDynamicFieldKey() {
Schema expectedRuntimeSchema = SchemaBuilder.builder().record("Record").fields().name("id")
.prop(SchemaConstants.TALEND_COLUMN_IS_KEY, "true").type().intType().noDefault().endRecord();
Schema designSchema = SchemaBuilder.builder().record("Record").prop(SchemaConstants.INCLUDE_ALL_FIELDS, "true")
.prop(DiSchemaConstants.TALEND6_DYNAMIC_COLUMN_POSITION, "0").fields().endRecord();
IncomingSchemaEnforcer enforcer = new IncomingSchemaEnforcer(designSchema);
enforcer.addDynamicField("id", "id_Integer", null, null, null, false, true);
enforcer.createRuntimeSchema();
assertTrue(enforcer.areDynamicFieldsInitialized());
Schema actualRuntimeSchema = enforcer.getRuntimeSchema();
assertEquals(expectedRuntimeSchema, actualRuntimeSchema);
}
/**
* Checks {@link IncomingSchemaEnforcer#put()} converts string value to date according pattern specified in dynamic field
* TODO (iv.gonchar): this is incorrect behavior, because avro record should not contain java.util.Date value. It should store

View File

@@ -45,9 +45,9 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
<version>1.2.19</version>
<scope>test</scope>
</dependency>
<dependency>

View File

@@ -4,7 +4,7 @@
<groupId>org.talend</groupId>
<artifactId>talend-httputil</artifactId>
<name>talend-httputil</name>
<version>1.0.5</version>
<version>1.0.6</version>
<properties>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
@@ -20,7 +20,7 @@
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.10.1</version>
<version>2.11.4</version>
</dependency>
<dependency>

View File

@@ -69,13 +69,13 @@
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>2.12.1</version>
<version>2.17.1</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.12.1</version>
<version>2.17.1</version>
<optional>true</optional>
</dependency>
</dependencies>
@@ -97,4 +97,4 @@
</pluginManagement>
</build>
</project>
</project>

View File

@@ -10,7 +10,6 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<cxf.version>3.1.1</cxf.version>
<odata.version>4.3.0</odata.version>
<slf4j.version>1.7.12</slf4j.version>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
@@ -46,9 +45,9 @@
<version>1.7.4</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.5</version>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
<version>1.2.19</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>

View File

@@ -0,0 +1,191 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components</groupId>
<artifactId>talend-orc</artifactId>
<version>1.0-20211008</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<hadoop.version>3.2.2</hadoop.version>
<apache.orc.version>1.7.0</apache.orc.version>
<junit.jupiter.version>5.7.2</junit.jupiter.version>
<hamcrest.version>1.3</hamcrest.version>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
</properties>
<distributionManagement>
<snapshotRepository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceSnapshot/</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
<releases>
<enabled>false</enabled>
</releases>
</snapshotRepository>
<repository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceRelease/</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
<releases>
<enabled>true</enabled>
</releases>
</repository>
</distributionManagement>
<dependencies>
<dependency>
<groupId>org.apache.orc</groupId>
<artifactId>orc-core</artifactId>
<version>${apache.orc.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<artifactId>jersey-server</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-servlet</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j</artifactId>
<groupId>log4j</groupId>
</exclusion>
<exclusion>
<artifactId>zookeeper</artifactId>
<groupId>org.apache.zookeeper</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-webapp</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>javax.servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<artifactId>jersey-server</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-servlet</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j</artifactId>
<groupId>log4j</groupId>
</exclusion>
<exclusion>
<artifactId>zookeeper</artifactId>
<groupId>org.apache.zookeeper</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-webapp</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>javax.servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-launcher</artifactId>
<version>1.7.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-library</artifactId>
<version>${hamcrest.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-library</artifactId>
<version>${hamcrest.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,24 @@
package org.talend.orc;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.MapColumnVector;
abstract class ORCCommonUtils {
/**
* Check that the map type for the key is BYTES, LONG or DOUBLE and that the key
* type is LONG, DOUBLE, BYTES, DECIMAL or TIMESTAMP.
*
* @param mapVector a MapColumnVector
* @return true if the key and value types conform to the limits described
* above.
*/
public static boolean checkMapColumnVectorTypes(MapColumnVector mapVector) {
ColumnVector.Type keyType = mapVector.keys.type;
ColumnVector.Type valueType = mapVector.values.type;
return (keyType == ColumnVector.Type.BYTES || keyType == ColumnVector.Type.LONG
|| keyType == ColumnVector.Type.DOUBLE)
&& (valueType == ColumnVector.Type.LONG || valueType == ColumnVector.Type.DOUBLE
|| valueType == ColumnVector.Type.BYTES || valueType == ColumnVector.Type.DECIMAL
|| valueType == ColumnVector.Type.TIMESTAMP);
}
}

View File

@@ -0,0 +1,442 @@
package org.talend.orc;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.MapColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.orc.TypeDescription;
public class ORCReadUtils {
public static Object readColumnByName(VectorizedRowBatch batch, String columnName, TypeDescription schema,
int rowNum) {
List<String> allColumnNames = schema.getFieldNames();
int colIndex = allColumnNames.indexOf(columnName);
if (colIndex < 0 || colIndex > batch.cols.length - 1) {
return null;
} else {
org.apache.hadoop.hive.ql.exec.vector.ColumnVector colVector = batch.cols[colIndex];
TypeDescription fieldType = schema.getChildren().get(colIndex);
int colRow = colVector.isRepeating ? 0 : rowNum;
Object value = readColumn(colVector, fieldType, colRow);
return value;
}
}
public static Object readColumn(ColumnVector colVec, TypeDescription colType, int rowNum) {
Object columnObj = null;
if (!colVec.isNull[rowNum]) {
switch (colVec.type) {
case BYTES:
columnObj = readBytesVal(colVec, colType, rowNum);
break;
case DECIMAL:
columnObj = readDecimalVal(colVec, rowNum);
break;
case DOUBLE:
columnObj = readDoubleVal(colVec, colType, rowNum);
break;
case LIST:
columnObj = readListVal(colVec, colType, rowNum);
break;
case LONG:
columnObj = readLongVal(colVec, colType, rowNum);
break;
case MAP:
columnObj = readMapVal(colVec, colType, rowNum);
break;
case STRUCT:
columnObj = readStructVal(colVec, colType, rowNum);
break;
case TIMESTAMP:
columnObj = readTimestampVal(colVec, colType, rowNum);
break;
case UNION:
columnObj = readUnionVal(colVec, colType, rowNum);
break;
default:
throw new RuntimeException("readColumn: unsupported ORC file column type: " + colVec.type.name());
}
}
return columnObj;
}
private static Object readListVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
Object listValues = null;
if (!colVec.isNull[rowNum]) {
ListColumnVector listVector = (ListColumnVector) colVec;
ColumnVector listChildVector = listVector.child;
TypeDescription childType = colType.getChildren().get(0);
switch (listChildVector.type) {
case BYTES:
listValues = readBytesListValues(listVector, childType, rowNum);
break;
case DECIMAL:
listValues = readDecimalListValues(listVector, rowNum);
break;
case DOUBLE:
listValues = readDoubleListValues(listVector, rowNum);
break;
case LONG:
listValues = readLongListValues(listVector, childType, rowNum);
break;
case TIMESTAMP:
listValues = readTimestampListValues(listVector, childType, rowNum);
break;
default:
throw new RuntimeException(listVector.type.name() + " is not supported for ListColumnVectors");
}
}
return listValues;
}
private static List<Object> readLongListVector(LongColumnVector longVector, TypeDescription childType, int offset,
int numValues) {
List<Object> longList = new ArrayList<>();
for (int i = 0; i < numValues; i++) {
if (!longVector.isNull[offset + i]) {
long longVal = longVector.vector[offset + i];
if (childType.getCategory() == TypeDescription.Category.BOOLEAN) {
Boolean boolVal = longVal == 0 ? Boolean.valueOf(false) : Boolean.valueOf(true);
longList.add(boolVal);
} else if (childType.getCategory() == TypeDescription.Category.INT) {
Integer intObj = (int) longVal;
longList.add(intObj);
} else {
longList.add(longVal);
}
} else {
longList.add(null);
}
}
return longList;
}
private static Object readLongListValues(ListColumnVector listVector, TypeDescription childType, int rowNum) {
int offset = (int) listVector.offsets[rowNum];
int numValues = (int) listVector.lengths[rowNum];
LongColumnVector longVector = (LongColumnVector) listVector.child;
return readLongListVector(longVector, childType, offset, numValues);
}
private static Object readTimestampListVector(TimestampColumnVector timestampVector, TypeDescription childType,
int offset, int numValues) {
List<Object> timestampList = new ArrayList<>();
for (int i = 0; i < numValues; i++) {
if (!timestampVector.isNull[offset + i]) {
int nanos = timestampVector.nanos[offset + i];
long millisec = timestampVector.time[offset + i];
Timestamp timestamp = new Timestamp(millisec);
timestamp.setNanos(nanos);
if (childType.getCategory() == TypeDescription.Category.DATE) {
Date date = new Date(timestamp.getTime());
timestampList.add(date);
} else {
timestampList.add(timestamp);
}
} else {
timestampList.add(null);
}
}
return timestampList;
}
/**
* Read either Timestamp or Date values, depending on the definition in the
* schema.
*/
private static Object readTimestampListValues(ListColumnVector listVector, TypeDescription childType, int rowNum) {
int offset = (int) listVector.offsets[rowNum];
int numValues = (int) listVector.lengths[rowNum];
TimestampColumnVector timestampVec = (TimestampColumnVector) listVector.child;
return readTimestampListVector(timestampVec, childType, offset, numValues);
}
private static Object readDecimalListVector(DecimalColumnVector decimalVector, int offset, int numValues,
int batchRowNum) {
List<Object> decimalList = new ArrayList<>();
for (int i = 0; i < numValues; i++) {
if (!decimalVector.isNull[offset + i]) {
BigDecimal bigDecimal = decimalVector.vector[batchRowNum].getHiveDecimal().bigDecimalValue();
decimalList.add(bigDecimal);
} else {
decimalList.add(null);
}
}
return decimalList;
}
private static Object readDecimalListValues(ListColumnVector listVector, int rowNum) {
int offset = (int) listVector.offsets[rowNum];
int numValues = (int) listVector.lengths[rowNum];
DecimalColumnVector decimalVec = (DecimalColumnVector) listVector.child;
return readDecimalListVector(decimalVec, offset, numValues, rowNum);
}
private static Object readBytesListVector(BytesColumnVector bytesVec, TypeDescription childType, int offset,
int numValues) {
List<Object> bytesValList = new ArrayList<>();
for (int i = 0; i < numValues; i++) {
if (!bytesVec.isNull[offset + i]) {
byte[] byteArray = bytesVec.vector[offset + i];
int vecLen = bytesVec.length[offset + i];
int vecStart = bytesVec.start[offset + i];
byte[] vecCopy = Arrays.copyOfRange(byteArray, vecStart, vecStart + vecLen);
if (childType.getCategory() == TypeDescription.Category.STRING) {
String str = new String(vecCopy);
bytesValList.add(str);
} else {
bytesValList.add(vecCopy);
}
} else {
bytesValList.add(null);
}
}
return bytesValList;
}
private static Object readBytesListValues(ListColumnVector listVector, TypeDescription childType, int rowNum) {
int offset = (int) listVector.offsets[rowNum];
int numValues = (int) listVector.lengths[rowNum];
BytesColumnVector bytesVec = (BytesColumnVector) listVector.child;
return readBytesListVector(bytesVec, childType, offset, numValues);
}
private static Object readDoubleListVector(DoubleColumnVector doubleVec, int offset, int numValues) {
List<Object> doubleList = new ArrayList<>();
for (int i = 0; i < numValues; i++) {
if (!doubleVec.isNull[offset + i]) {
Double doubleVal = doubleVec.vector[offset + i];
doubleList.add(doubleVal);
} else {
doubleList.add(null);
}
}
return doubleList;
}
private static Object readDoubleListValues(ListColumnVector listVector, int rowNum) {
int offset = (int) listVector.offsets[rowNum];
int numValues = (int) listVector.lengths[rowNum];
DoubleColumnVector doubleVec = (DoubleColumnVector) listVector.child;
return readDoubleListVector(doubleVec, offset, numValues);
}
@SuppressWarnings("unchecked")
private static List<Object> readMapVector(ColumnVector mapVector, TypeDescription childType, int offset,
int numValues, int rowNum) {
List<Object> mapList = null;
switch (mapVector.type) {
case BYTES:
mapList = (List<Object>) readBytesListVector((BytesColumnVector) mapVector, childType, offset, numValues);
break;
case DECIMAL:
mapList = (List<Object>) readDecimalListVector((DecimalColumnVector) mapVector, offset, numValues, rowNum);
;
break;
case DOUBLE:
mapList = (List<Object>) readDoubleListVector((DoubleColumnVector) mapVector, offset, numValues);
break;
case LONG:
mapList = readLongListVector((LongColumnVector) mapVector, childType, offset, numValues);
break;
case TIMESTAMP:
mapList = (List<Object>) readTimestampListVector((TimestampColumnVector) mapVector, childType, offset,
numValues);
break;
default:
throw new RuntimeException(mapVector.type.name() + " is not supported for MapColumnVectors");
}
return mapList;
}
/**
* <p>
* Read a Map column value (e.g., a set of keys and their associated values).
* </p>
* <p>
* The Map key and value types are the first and second children in the children
* TypeDescription List. From the TypeDescription source:
* </p>
*
* <pre>
* result.children.add(keyType);
* result.children.add(valueType);
* </pre>
*/
private static Object readMapVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
Map<Object, Object> objMap = new HashMap<>();
MapColumnVector mapVector = (MapColumnVector) colVec;
if (ORCCommonUtils.checkMapColumnVectorTypes(mapVector)) {
int mapSize = (int) mapVector.lengths[rowNum];
int offset = (int) mapVector.offsets[rowNum];
List<TypeDescription> mapTypes = colType.getChildren();
TypeDescription keyType = mapTypes.get(0);
TypeDescription valueType = mapTypes.get(1);
ColumnVector keyChild = mapVector.keys;
ColumnVector valueChild = mapVector.values;
List<Object> keyList = readMapVector(keyChild, keyType, offset, mapSize, rowNum);
List<Object> valueList = readMapVector(valueChild, valueType, offset, mapSize, rowNum);
assert (keyList.size() == valueList.size());
for (int i = 0; i < keyList.size(); i++) {
objMap.put(keyList.get(i), valueList.get(i));
}
} else {
throw new RuntimeException("readMapVal: unsupported key or value types");
}
return objMap;
}
private static Object readUnionVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
Pair<TypeDescription, Object> columnValuePair;
UnionColumnVector unionVector = (UnionColumnVector) colVec;
int tagVal = unionVector.tags[rowNum];
List<TypeDescription> unionFieldTypes = colType.getChildren();
if (tagVal < unionFieldTypes.size()) {
TypeDescription fieldType = unionFieldTypes.get(tagVal);
if (tagVal < unionVector.fields.length) {
ColumnVector fieldVector = unionVector.fields[tagVal];
int colRow = fieldVector.isRepeating ? 0 : rowNum;
Object unionValue = readColumn(fieldVector, fieldType, colRow);
columnValuePair = new ImmutablePair<>(fieldType, unionValue);
} else {
throw new RuntimeException("readUnionVal: union tag value out of range for union column vectors");
}
} else {
throw new RuntimeException("readUnionVal: union tag value out of range for union types");
}
return columnValuePair;
}
private static Object readStructVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
Object structObj = null;
if (!colVec.isNull[rowNum]) {
List<Object> fieldValList = new ArrayList<>();
StructColumnVector structVector = (StructColumnVector) colVec;
ColumnVector[] fieldVec = structVector.fields;
List<TypeDescription> fieldTypes = colType.getChildren();
assert (fieldVec.length == fieldTypes.size());
for (int i = 0; i < fieldVec.length; i++) {
int colRow = fieldVec[i].isRepeating ? 0 : rowNum;
Object fieldObj = readColumn(fieldVec[i], fieldTypes.get(i), colRow);
fieldValList.add(fieldObj);
}
structObj = fieldValList;
}
return structObj;
}
private static Object readTimestampVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
Object timestampVal = null;
if (!colVec.isNull[rowNum]) {
TimestampColumnVector timestampVec = (TimestampColumnVector) colVec;
int nanos = timestampVec.nanos[rowNum];
long millisec = timestampVec.time[rowNum];
Timestamp timestamp = new Timestamp(millisec);
timestamp.setNanos(nanos);
timestampVal = timestamp;
if (colType.getCategory() == TypeDescription.Category.DATE) {
timestampVal = new Date(timestamp.getTime());
}
}
return timestampVal;
}
private static Object readDecimalVal(ColumnVector colVec, int rowNum) {
Object decimalObj = null;
if (!colVec.isNull[rowNum]) {
DecimalColumnVector decimalVec = (DecimalColumnVector) colVec;
decimalObj = decimalVec.vector[rowNum].getHiveDecimal().bigDecimalValue().setScale(decimalVec.scale);
}
return decimalObj;
}
/**
* Read a Long or Boolean value
*
* @param colVec the column vector
* @param colType the type of the column
* @param rowNum the ORC file row number.
* @return a Boolean or Long object
*/
private static Object readLongVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
Object colObj = null;
if (!colVec.isNull[rowNum]) {
LongColumnVector longVec = (LongColumnVector) colVec;
Long longVal = longVec.vector[rowNum];
colObj = longVal;
if (colType.getCategory() == TypeDescription.Category.INT) {
colObj = longVal.intValue();
} else if (colType.getCategory() == TypeDescription.Category.BOOLEAN) {
colObj = longVal == 1 ? Boolean.TRUE : Boolean.FALSE;
} else if (colType.getCategory() == TypeDescription.Category.DATE) {
colObj = new Date(longVal * 86400000);
} else if (colType.getCategory() == TypeDescription.Category.BYTE) {
colObj = longVal.byteValue();
} else if (colType.getCategory() == TypeDescription.Category.SHORT) {
colObj = longVal.shortValue();
}
}
return colObj;
}
/**
* Read a Double or Float value
*
* @param colVec the column vector
* @param colType the type of the column
* @param rowNum the ORC file row number.
* @return a Double or Float object
*/
private static Object readDoubleVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
Object colObj = null;
if (!colVec.isNull[rowNum]) {
DoubleColumnVector longVec = (DoubleColumnVector) colVec;
Double doubleVal = longVec.vector[rowNum];
colObj = doubleVal;
if (colType.getCategory() == TypeDescription.Category.FLOAT) {
colObj = doubleVal.floatValue();
}
}
return colObj;
}
private static Object readBytesVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
Object bytesObj = null;
if (!colVec.isNull[rowNum]) {
BytesColumnVector bytesVector = (BytesColumnVector) colVec;
byte[] columnBytes = bytesVector.vector[rowNum];
int vecLen = bytesVector.length[rowNum];
int vecStart = bytesVector.start[rowNum];
byte[] vecCopy = Arrays.copyOfRange(columnBytes, vecStart, vecStart + vecLen);
if (colType.getCategory() == TypeDescription.Category.STRING ||colType.getCategory() == TypeDescription.Category.VARCHAR) {
bytesObj = new String(vecCopy);
} else if (colType.getCategory() == TypeDescription.Category.CHAR) {
String charStr = new String(vecCopy);
bytesObj = charStr;
} else {
bytesObj = vecCopy;
}
}
return bytesObj;
}
}

View File

@@ -0,0 +1,884 @@
package org.talend.orc;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DateColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.MapColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.MultiValuedColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.orc.TypeDescription;
public class ORCWriteUtils {
private static final Predicate<Object> isInteger = Integer.class::isInstance;
private static final Predicate<Object> isLong = Long.class::isInstance;
private static final Predicate<Object> isDouble = Double.class::isInstance;
private static final Predicate<Object> isString = String.class::isInstance;
private static final Predicate<Object> isBigDecimal = BigDecimal.class::isInstance;
private static final Predicate<Object> isDate = Date.class::isInstance;
public static void setColumnByName(VectorizedRowBatch batch, String columnName, TypeDescription schema,
Object colVal, int rowNum) {
List<String> allColumnNames = schema.getFieldNames();
int colIndex = allColumnNames.indexOf(columnName);
if (colIndex < 0 || colIndex > batch.cols.length - 1) {
return;
} else {
org.apache.hadoop.hive.ql.exec.vector.ColumnVector colVector = batch.cols[colIndex];
TypeDescription fieldType = schema.getChildren().get(colIndex);
setColumn(colVal, fieldType, columnName, colVector, rowNum);
}
}
/**
* Add a column value that is a String or a byte[] array.
*
* @param colVal the column value object
* @param fieldName the name of the field (for error reporting)
* @param bytesColVector the BytesColumnVector that the byte array will be added
* to.
* @param rowNum the ORC file row number
*/
private static void setByteColumnVector(Object colVal, String fieldName, BytesColumnVector bytesColVector,
int rowNum) {
if (colVal instanceof byte[] || colVal instanceof String || colVal instanceof Character) {
byte[] byteVec;
if (colVal instanceof String) {
String strVal = (String) colVal;
byteVec = strVal.getBytes(StandardCharsets.UTF_8);
} else if (colVal instanceof Character) {
String strVal = String.valueOf((char) colVal);
byteVec = strVal.getBytes(StandardCharsets.UTF_8);
} else {
byteVec = (byte[]) colVal;
}
bytesColVector.setRef(rowNum, byteVec, 0, byteVec.length);
} else {
throw new RuntimeException(orcExceptionMsg("byte[] or String type expected for field ", fieldName, rowNum));
}
}
private static void setDecimalVector(Object colVal, String fieldName, DecimalColumnVector decimalColVector,
int rowNum) {
if (colVal instanceof BigDecimal) {
BigDecimal bigDecimal = (BigDecimal) colVal;
decimalColVector.precision = (short) bigDecimal.precision();
decimalColVector.scale = (short) bigDecimal.scale();
HiveDecimal hiveDecimal = HiveDecimal.create(bigDecimal);
HiveDecimalWritable writeableDecimal = new HiveDecimalWritable(hiveDecimal);
decimalColVector.vector[rowNum] = writeableDecimal;
} else {
throw new RuntimeException(orcExceptionMsg("BigDecimal type expected for field ", fieldName, rowNum));
}
}
private static void setDoubleVector(Object colVal, String fieldName, DoubleColumnVector doubleVector, int rowNum) {
if (colVal instanceof Double) {
doubleVector.vector[rowNum] = (Double) colVal;
} else if (colVal instanceof Float) {
Float fltVal = (Float) colVal;
doubleVector.vector[rowNum] = fltVal.doubleValue();
} else if (colVal instanceof BigDecimal) {
doubleVector.vector[rowNum] = ((BigDecimal) colVal).doubleValue();
} else {
throw new RuntimeException(orcExceptionMsg("Double/Float/BigDecimal type expected for field ", fieldName, rowNum));
}
}
/**
* Initialize a LongColumnVector value.
*
* @param colVal an object of type Boolean, Integer, Long or BigInteger.
* @param fieldName the field name in the schema
* @param longVector the LongColumnVector
* @param rowNum the row number
*/
private static void setLongColumnVector(Object colVal, String fieldName, LongColumnVector longVector, int rowNum) {
if (colVal instanceof Boolean) {
Boolean bool = (Boolean) colVal;
longVector.vector[rowNum] = (bool.equals(Boolean.TRUE)) ? Long.valueOf(1) : Long.valueOf(0);
} else if (colVal instanceof Byte) {
longVector.vector[rowNum] = (Byte) colVal;
} else if (colVal instanceof Short) {
longVector.vector[rowNum] = (Short) colVal;
} else if (colVal instanceof Integer) {
longVector.vector[rowNum] = (Integer) colVal;
} else if (colVal instanceof Long) {
longVector.vector[rowNum] = (Long) colVal;
} else if (colVal instanceof BigInteger) {
BigInteger bigInt = (BigInteger) colVal;
longVector.vector[rowNum] = bigInt.longValue();
} else {
throw new RuntimeException(orcExceptionMsg("Long or Integer type expected for field ", fieldName, rowNum));
}
}
private static void setDateColumnVector(Object colVal, String fieldName, DateColumnVector dateVector, int rowNum) {
if (colVal instanceof Date) {
Date dateVal = (Date) colVal;
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
cal.setTime(dateVal);
long epochDay = LocalDate.of(cal.get(Calendar.YEAR), cal.get(Calendar.MONTH)+1, cal.get(Calendar.DAY_OF_MONTH)).toEpochDay();
dateVector.vector[rowNum] = epochDay;
} else {
throw new RuntimeException(orcExceptionMsg("Date type expected for field ", fieldName, rowNum));
}
}
private static void setTimestampVector(Object colVal, String fieldName, TimestampColumnVector timestampVector,
int rowNum) {
if (colVal instanceof Timestamp) {
timestampVector.set(rowNum, (Timestamp) colVal);
} else if (colVal instanceof Date) {
Date date = (Date) colVal;
Timestamp ts = new Timestamp(date.getTime());
timestampVector.set(rowNum, ts);
} else {
throw new RuntimeException(
orcExceptionMsg("Date or Timestamp type expected for field ", fieldName, rowNum));
}
}
/**
* <p>
* A union column can contain column vectors of more than one type. In the
* TypeDescription createUnion() is called to create a TypeDescription for a
* union column. The union values are added by calling the addUnionChild()
* method on this TypeDescription object.
* </p>
* <p>
* The class fields in the UnionColumnVector are shown below:
* </p>
*
* <pre>
* public class UnionColumnVector extends ColumnVector {
* public int[] tags;
* public ColumnVector[] fields;
* </pre>
* <p>
* A tag value (
*
* <pre>
* tags[rowNum]
* </pre>
*
* ) is associated with each field value (
*
* <pre>
* fields[rowNum])
* </pre>
*
* . The tag value serves as an index for the field type. For example, if there
* are three field types defined:
* <ol>
* <li>Long</li>
* <li>Double</li>
* <li>String</li>
* </ol>
* The tag will have a value in the range of [0..2]
* </p>
* <p>
* The tag value is needed to initialize the ColumnVector since without the tag
* there is no way to know which union child should be initialized.
* </p>
*
* @param colVal a Pair&lt;ColumnVector.Type, Object&gt; object with the
* union type and the object that will be used to initialize
* the union child ColumnVector.
* @param fieldName The name of the union field
* @param unionVector The UnionColumnVector to be initialized
* @param rowNum the ORC file row number.
*/
private static void setUnionColumnVector(Object colVal, TypeDescription unionTypeDesc, String fieldName,
UnionColumnVector unionVector, int rowNum) {
@SuppressWarnings("unchecked")
Pair<TypeDescription, Object> unionValuePair = (Pair<TypeDescription, Object>) colVal;
TypeDescription unionValType = unionValuePair.getLeft();
List<TypeDescription> unionChildrenTypes = unionTypeDesc.getChildren();
Object unionColVal = unionValuePair.getRight();
boolean found = false;
for (int i = 0; i < unionChildrenTypes.size(); i++) {
if (unionChildrenTypes.get(i).getCategory() == unionValType.getCategory()) {
unionVector.tags[rowNum] = i;
ColumnVector unionFieldVec = unionVector.fields[i];
setColumn(unionColVal, unionChildrenTypes.get(i), fieldName, unionFieldVec, rowNum);
found = true;
break;
}
}
if (!found) {
throw new RuntimeException("writeUnionColumnVector: Bad type enumeration "
+ unionValType.getCategory().getName() + " passed for field " + fieldName);
}
}
private static void setLongListVector(List<Object> longValList, LongColumnVector longVector, int offset,
String fieldName) {
for (int i = 0; i < longValList.size(); i++) {
Object objVal = longValList.get(i);
if (objVal != null) {
if (objVal instanceof Integer) {
longVector.vector[offset + i] = (Integer) objVal;
} else if (objVal instanceof Long) {
longVector.vector[offset + i] = (Long) objVal;
} else {
throw new RuntimeException("List<Integer> expected for field " + fieldName);
}
} else {
longVector.isNull[offset + i] = true;
longVector.noNulls = false;
}
}
}
private static void setLongList(List<Object> colValList, ListColumnVector listVector, String fieldName,
int rowNum) {
LongColumnVector longVector = (LongColumnVector) listVector.child;
int offset = (int) listVector.offsets[rowNum];
setLongListVector(colValList, longVector, offset, fieldName);
}
private static void setDoubleListVector(List<Object> doubleValList, DoubleColumnVector doubleVector, int offset,
String fieldName) {
for (int i = 0; i < doubleValList.size(); i++) {
Object objVal = doubleValList.get(i);
if (objVal != null) {
if (objVal instanceof Double) {
doubleVector.vector[offset + i] = (Double) objVal;
} else if (objVal instanceof Float) {
Float fltVal = (Float) objVal;
doubleVector.vector[offset + i] = fltVal.doubleValue();
} else {
throw new RuntimeException("List<Double> expected for field " + fieldName);
}
} else {
doubleVector.isNull[offset + i] = true;
doubleVector.noNulls = false;
}
}
}
private static void setDoubleList(List<Object> doubleValList, ListColumnVector listVector, String fieldName,
int rowNum) {
DoubleColumnVector vecChild = (DoubleColumnVector) listVector.child;
int offset = (int) listVector.offsets[rowNum];
setDoubleListVector(doubleValList, vecChild, offset, fieldName);
}
private static void setTimestampListVector(List<Object> valueList, TimestampColumnVector timestampVector,
int offset, String fieldName) {
for (int i = 0; i < valueList.size(); i++) {
Object objVal = valueList.get(i);
if (objVal != null) {
if (objVal instanceof Date) {
Timestamp ts = (objVal instanceof Timestamp) ? (Timestamp) objVal
: new Timestamp(((Date) objVal).getTime());
timestampVector.time[offset + i] = ts.getTime();
timestampVector.nanos[offset + i] = ts.getNanos();
} else {
throw new RuntimeException("List<Date> or List<Timestamp> expected for field " + fieldName);
}
} else {
timestampVector.isNull[offset + i] = true;
timestampVector.noNulls = false;
}
}
}
/**
* Initialize the vector values for a ListColumnVector of Date or Timestamp
* values.
*
* @param colValList a list of Timestamp or java.util.Date objects
* @param listVector A ListColumnVector with a child that will contain the
* vector values.
* @param fieldName The field name in the schema for this ORC element
* @param rowNum The ORC file row number
*/
private static void setTimestampList(List<Object> colValList, ListColumnVector listVector, String fieldName,
int rowNum) {
TimestampColumnVector timestampVector = (TimestampColumnVector) listVector.child;
int offset = (int) listVector.offsets[rowNum];
setTimestampListVector(colValList, timestampVector, offset, fieldName);
}
private static void setDecimalListVector(List<Object> decimalValList, DecimalColumnVector decimalVector, int offset,
String fieldName) {
for (int i = 0; i < decimalValList.size(); i++) {
Object objVal = decimalValList.get(i);
if (objVal != null) {
if (objVal instanceof BigDecimal) {
BigDecimal bigDecimal = (BigDecimal) objVal;
decimalVector.precision = (short) bigDecimal.precision();
decimalVector.scale = (short) bigDecimal.scale();
HiveDecimal hiveDecimal = HiveDecimal.create(bigDecimal);
HiveDecimalWritable writeableDecimal = new HiveDecimalWritable(hiveDecimal);
decimalVector.vector[offset + i] = writeableDecimal;
} else {
throw new RuntimeException("BigDecimal value expected for field " + fieldName);
}
} else {
decimalVector.isNull[offset + i] = true;
decimalVector.noNulls = false;
}
}
}
/**
*
* @param colValList a list of BigDecimal values to initialize the
* ListColumnVector child
* @param listVector the ListColumnVector with the DecimalColumnVector child
* @param fieldName the field name for the ListColumnVector/DecimalColumnVector
* column
* @param rowNum the ORC file row number
*/
private static void setDecimalList(List<Object> colValList, ListColumnVector listVector, String fieldName,
int rowNum) {
DecimalColumnVector decimalVector = (DecimalColumnVector) listVector.child;
int offset = (int) listVector.offsets[rowNum];
setDecimalListVector(colValList, decimalVector, offset, fieldName);
}
private static void setBytesListVector(List<Object> valueList, BytesColumnVector bytesVector, int offset,
String fieldName) {
for (int i = 0; i < valueList.size(); i++) {
Object objVal = valueList.get(i);
if (objVal != null) {
if (objVal instanceof byte[] || objVal instanceof String) {
byte[] byteVec = (objVal instanceof byte[]) ? (byte[]) objVal
: ((String) objVal).getBytes(StandardCharsets.UTF_8);
bytesVector.vector[offset + i] = byteVec;
bytesVector.length[offset + i] = byteVec.length;
} else {
throw new RuntimeException("String or byte[] value expected for field " + fieldName);
}
} else {
bytesVector.isNull[offset + i] = true;
bytesVector.length[offset + i] = 0;
bytesVector.noNulls = false;
}
}
}
/**
* Initialize a ListColumnVector with a BytesColumnVector child with byte[]
* values.
*
* @param colValList a list of byte[] or String values
* @param listVector the parent ListColumnVector
* @param fieldName the field name for the ORC column that contains the
* ListColumnVector
* @param rowNum the ORC file row number
*/
private static void setBytesList(List<Object> colValList, ListColumnVector listVector, String fieldName,
int rowNum) {
BytesColumnVector bytesVector = (BytesColumnVector) listVector.child;
int offset = (int) listVector.offsets[rowNum];
setBytesListVector(colValList, bytesVector, offset, fieldName);
}
private static void setMultiValuedVectorParameters(MultiValuedColumnVector multiVector, int vecLength, int rowNum) {
multiVector.lengths[rowNum] = vecLength;
if (rowNum > 0) {
multiVector.offsets[rowNum] = multiVector.lengths[rowNum - 1] + multiVector.offsets[rowNum - 1];
}
}
private static void setListVectorParameters(ListColumnVector listVec, int maxBatchSize, int vecLength, int rowNum) {
setMultiValuedVectorParameters(listVec, vecLength, rowNum);
listVec.child.ensureSize(maxBatchSize * vecLength, true);
}
/**
* Initialize a ListColumnVector. The child of the vector is limited to the
* scalar types long, double, String (or byte[])), BigDecimal or Date (or
* Timestamp).
*
* @param colVal a List&lt;Object&gt;
* @param typeDesc the schema definition for this column
* @param fieldName the column field name
* @param listVector the ListColumnVector parent of the vector type child
* @param rowNum the ORC file row number.
*/
private static void setListColumnVector(Object colVal, TypeDescription typeDesc, String fieldName,
ListColumnVector listVector, int rowNum) {
if (colVal instanceof List) {
@SuppressWarnings("unchecked")
List<Object> objValList = (List<Object>) colVal;
final int maxBatchSize = typeDesc.createRowBatch().getMaxSize();
setListVectorParameters(listVector, maxBatchSize, objValList.size(), rowNum);
ColumnVector.Type childType = listVector.child.type;
switch (childType) {
case LONG:
setLongList(objValList, listVector, fieldName, rowNum);
break;
case DOUBLE:
setDoubleList(objValList, listVector, fieldName, rowNum);
break;
case BYTES:
setBytesList(objValList, listVector, fieldName, rowNum);
break;
case DECIMAL:
setDecimalList(objValList, listVector, fieldName, rowNum);
break;
case TIMESTAMP:
setTimestampList(objValList, listVector, fieldName, rowNum);
break;
default:
throw new RuntimeException(childType.name() + " is not supported for ListColumnVector columns");
}
} else {
throw new RuntimeException("List value expected for field " + fieldName);
}
}
/**
* Test that all elements in an Object list are of a particular type
*
* @param objList the Object list that is tested
* @param typeTest a function that compares against a particular Object type
* @return true if all elements are of the test type, false if one or more
* elements are not of that type.
*/
private static boolean isListType(List<Object> objList, Predicate<Object> typeTest) {
return !objList.stream().map(typeTest::test).collect(Collectors.toList()).contains(false);
}
/**
* Initialize a ColumnVector with Long values.
*
* @param valueList a list of Long values
* @param colVector the LongColumnVector that will be initialized with the Long
* values
* @param offset the offset[rownum] value for the array
* @param fieldName the field name for the Map column
*/
private static void setLongMapValues(List<Object> valueList, ColumnVector colVector, int offset, String fieldName) {
if (isListType(valueList, isLong) || isListType(valueList, isInteger)) {
LongColumnVector longVector = (LongColumnVector) colVector;
setLongListVector(valueList, longVector, offset, fieldName);
} else {
throw new RuntimeException("For field " + fieldName + " Long values expected");
}
}
/**
* Initialize a ColumnVector with Double values.
*
* @param valueList a list of Double values
* @param colVector the DoubleColumnVector that will be initialized with the
* Double values
* @param offset the offset[rownum] value for the array
* @param fieldName the field name for the Map column
*/
private static void setDoubleMapValues(List<Object> valueList, ColumnVector colVector, int offset,
String fieldName) {
if (isListType(valueList, isDouble)) {
DoubleColumnVector doubleVector = (DoubleColumnVector) colVector;
setDoubleListVector(valueList, doubleVector, offset, fieldName);
} else {
throw new RuntimeException("For field " + fieldName + " Double values expected");
}
}
/**
* Initialize a ColumnVector with String values.
*
* @param valueList a list of String values
* @param colVector the BytesColumnVector that will be initialized with the
* String values
* @param offset the offset[rownum] value for the array
* @param fieldName the field name for the Map column
*/
private static void setStringMapValues(List<Object> valueList, ColumnVector colVector, int offset,
String fieldName) {
if (isListType(valueList, isString)) {
BytesColumnVector doubleVector = (BytesColumnVector) colVector;
setBytesListVector(valueList, doubleVector, offset, fieldName);
} else {
throw new RuntimeException("For field " + fieldName + " String values expected");
}
}
/**
* Initialize a ColumnVector with BigDeciml values.
*
* @param valueList a list of BigDecimal
* @param colVector the DecimalColumnVector that will be initialized with the
* BigDecimal values
* @param offset the offset[rownum] value for the array
* @param fieldName the field name for the Map column
*/
private static void setDecimalMapValues(List<Object> valueList, ColumnVector colVector, int offset,
String fieldName) {
if (isListType(valueList, isBigDecimal)) {
DecimalColumnVector decimalVector = (DecimalColumnVector) colVector;
setDecimalListVector(valueList, decimalVector, offset, fieldName);
} else {
throw new RuntimeException("For field " + fieldName + " BigDecimal values expected");
}
}
/**
* Initialize a ColumnVector with timestamp values.
*
* @param valueList a list of Date (or Timestamp) objects
* @param colVector the TimestampColumnVector that will be initialized with the
* Timestamp values
* @param offset the offset[rownum] value for the array
* @param fieldName the field name for the Map column
*/
private static void setTimestampMapValues(List<Object> valueList, ColumnVector colVector, int offset,
String fieldName) {
if (isListType(valueList, isDate)) {
TimestampColumnVector timestampVector = (TimestampColumnVector) colVector;
setTimestampListVector(valueList, timestampVector, offset, fieldName);
} else {
throw new RuntimeException("For field " + fieldName + " Date or Timestamp values expected");
}
}
/**
* Set the MapColumn value array vector. The type for this vector is limited to
* long, double, bytes (String), Decimal and Timestamp.
*
* @param valueList a list of Objects to initialize the Map column value array.
* @param colVector the column array vector to be initialized with the map
* values.
* @param offset the offset[rowNum] from the parent MapColumnVector
* @param fieldName the name of the field for the MapColumnVector.
*/
private static void setMapValueVector(List<Object> valueList, ColumnVector colVector, int offset,
String fieldName) {
switch (colVector.type) {
case LONG:
setLongMapValues(valueList, colVector, offset, fieldName);
break;
case DOUBLE:
setDoubleMapValues(valueList, colVector, offset, fieldName);
break;
case BYTES:
setStringMapValues(valueList, colVector, offset, fieldName);
break;
case DECIMAL:
setDecimalMapValues(valueList, colVector, offset, fieldName);
break;
case TIMESTAMP:
setTimestampMapValues(valueList, colVector, offset, fieldName);
break;
default:
throw new RuntimeException(
"For field " + fieldName + " values must be long, double, String, BigDecimal or Timestamp");
}
}
/**
* <p>
* Initialize a MapColumnVector with Long key values.
* </p>
*
* @param mapSet a set of {key, value} pairs, where the key values are Long
* objects. The elements of this set will be used to initialize
* the key and value array column vectors that are children of
* the MapColumnVector.
* @param mapVector the MapColumnVector. This ColumnVector has children for the
* key and value arrays.
* @param fieldName the field name for the map column vector column.
* @param rowNum the ORC file row number.
*/
private static void setLongKeyMap(Set<Map.Entry<Object, Object>> mapSet, MapColumnVector mapVector,
String fieldName, int rowNum) {
List<Object> keyValueList = mapSet.stream().map(Map.Entry::getKey).collect(Collectors.toList());
if (isListType(keyValueList, isLong)) {
LongColumnVector longVector = (LongColumnVector) mapVector.keys;
int offset = (int) mapVector.offsets[rowNum];
// set the key vector
setLongListVector(keyValueList, longVector, offset, fieldName);
// set the value vector
ColumnVector valueVector = mapVector.values;
List<Object> valueList = mapSet.stream().map(Map.Entry::getValue).collect(Collectors.toList());
setMapValueVector(valueList, valueVector, offset, fieldName);
} else {
throw new RuntimeException("For field " + fieldName + " Long key type expected to match schema");
}
}
/**
* <p>
* Initialize a MapColumnVector with Double key values.
* </p>
*
* @param mapSet a set of {key, value} pairs, where the key values are Double
* objects. The elements of this set will be used to initialize
* the key and value array column vectors that are children of
* the MapColumnVector.
* @param mapVector the MapColumnVector. This ColumnVector has children for the
* key and value arrays.
* @param fieldName the field name for the map column vector column.
* @param rowNum the ORC file row number.
*/
private static void setDoubleKeyMap(Set<Map.Entry<Object, Object>> mapSet, MapColumnVector mapVector,
String fieldName, int rowNum) {
List<Object> keyValueList = mapSet.stream().map(Map.Entry::getKey).collect(Collectors.toList());
if (isListType(keyValueList, isDouble)) {
DoubleColumnVector doubleVector = (DoubleColumnVector) mapVector.keys;
int offset = (int) mapVector.offsets[rowNum];
// set the key vector
setDoubleListVector(keyValueList, doubleVector, offset, fieldName);
// set the value vector
ColumnVector valueVector = mapVector.values;
List<Object> valueList = mapSet.stream().map(Map.Entry::getValue).collect(Collectors.toList());
setMapValueVector(valueList, valueVector, offset, fieldName);
} else {
throw new RuntimeException("For field " + fieldName + " Long key type expected to match schema");
}
}
/**
* <p>
* Initialize a MapColumnVector with String key values.
* </p>
*
* @param mapSet a set of {key, value} pairs, where the key values are String
* objects. The elements of this set will be used to initialize
* the key and value array column vectors that are children of
* the MapColumnVector.
* @param mapVector the MapColumnVector. This ColumnVector has children for the
* key and value arrays.
* @param fieldName the field name for the map column vector column.
* @param rowNum the ORC file row number.
*/
private static void setStringKeyMap(Set<Map.Entry<Object, Object>> mapSet, MapColumnVector mapVector,
String fieldName, int rowNum) {
List<Object> keyValueList = mapSet.stream().map(Map.Entry::getKey).collect(Collectors.toList());
if (isListType(keyValueList, isString)) {
BytesColumnVector byteVector = (BytesColumnVector) mapVector.keys;
int offset = (int) mapVector.offsets[rowNum];
// set the key array vector
setBytesListVector(keyValueList, byteVector, offset, fieldName);
// set the value array vector
ColumnVector valueVector = mapVector.values;
List<Object> valueList = mapSet.stream().map(Map.Entry::getValue).collect(Collectors.toList());
setMapValueVector(valueList, valueVector, offset, fieldName);
} else {
throw new RuntimeException("For field " + fieldName + " Long key type expected to match schema");
}
}
private static void setMapVectorParameters(MapColumnVector mapVec, int maxBatchSize, int vecLength, int rowNum) {
setMultiValuedVectorParameters(mapVec, vecLength, rowNum);
mapVec.keys.ensureSize(maxBatchSize + vecLength, true);
mapVec.values.ensureSize(maxBatchSize + vecLength, true);
}
/**
* <p>
* Set the Map key and value elements for a MapColumnVector
* </p>
* <p>
* A MapColumnVector has a single ColumnVector type for each of the map key and
* map values. For example, the ColumnVector for the key values could be a
* BytesColumnVector (a set of String keys). The values could be a
* LongColumnVector.
* </p>
* <p>
* In the documentation there is no restriction given for the map key type. This
* code limits the key types to scalar values: string, long, double.
* </p>
* </p>
* <p>
* The documentation does not limit the map value types. This code limites the
* map values to the same types that are supported for ListColumnVectors: long,
* double, bytes (String), Decimal and Timestamp.
* </p>
*
* @param colVal a HashMap object
* @param typeDesc the schema description for the MapColumnVector column
* @param fieldName the field name of the MapColumnVector column
* @param mapVector The parent MapColumnVector
* @param rowNum the ORC file column number.
*/
private static void setMapColumnVector(Object colVal, TypeDescription typeDesc, String fieldName,
MapColumnVector mapVector, int rowNum) {
if (colVal == null) {
mapVector.isNull[rowNum] = true;
mapVector.noNulls = false;
} else {
if (colVal instanceof HashMap) {
@SuppressWarnings("unchecked")
Map<Object, Object> rawMap = (HashMap<Object, Object>) colVal;
int mapLen = rawMap.size();
final int maxBatchSize = typeDesc.createRowBatch().getMaxSize();
setMapVectorParameters(mapVector, maxBatchSize, mapLen, rowNum);
if (ORCCommonUtils.checkMapColumnVectorTypes(mapVector)) {
Set<Map.Entry<Object, Object>> mapSet = rawMap.entrySet();
switch (mapVector.keys.type) {
case LONG:
setLongKeyMap(mapSet, mapVector, fieldName, rowNum);
break;
case DOUBLE:
setDoubleKeyMap(mapSet, mapVector, fieldName, rowNum);
break;
case BYTES:
setStringKeyMap(mapSet, mapVector, fieldName, rowNum);
break;
default: {
break;
/* This block left intentionally empty */
}
}
} else {
throw new RuntimeException(
"For field " + fieldName + " key types are limited to string, long and double. "
+ "value types are limited to long, double, String, decimal and timestamp");
}
}
}
}
/**
* Set a column value in an ORC a row that will be written to the ORC file.
*
* @param colVal an Object containing the values to be written to the column
* @param typeDesc the TypeDescription from the schema that defines the column
* @param fieldName the column field name
* @param vector the ColumnVector that will be initialized with the values in
* the colVal argument.
* @param rowNum the ORC file row number.
*/
public static void setColumn(Object colVal, TypeDescription typeDesc, String fieldName, ColumnVector vector,
int rowNum) {
if (colVal == null) {
vector.isNull[rowNum] = true;
vector.noNulls = false;
} else {
switch (vector.type) {
case LONG: {
if (vector instanceof DateColumnVector) {
DateColumnVector dateVector = (DateColumnVector) vector;
setDateColumnVector(colVal, fieldName, dateVector, rowNum);
} else {
LongColumnVector longVector = (LongColumnVector) vector;
setLongColumnVector(colVal, fieldName, longVector, rowNum);
}
break;
}
case DOUBLE: {
DoubleColumnVector doubleVector = (DoubleColumnVector) vector;
setDoubleVector(colVal, fieldName, doubleVector, rowNum);
break;
}
case BYTES: {
BytesColumnVector bytesColVector = (BytesColumnVector) vector;
setByteColumnVector(colVal, fieldName, bytesColVector, rowNum);
break;
}
case DECIMAL: {
DecimalColumnVector decimalVector = (DecimalColumnVector) vector;
setDecimalVector(colVal, fieldName, decimalVector, rowNum);
break;
}
case DECIMAL_64:
throw new RuntimeException("Field: " + fieldName + ", Decimal64ColumnVector is not supported");
case TIMESTAMP: {
TimestampColumnVector timestampVector = (TimestampColumnVector) vector;
setTimestampVector(colVal, fieldName, timestampVector, rowNum);
break;
}
case INTERVAL_DAY_TIME:
throw new RuntimeException("Field: " + fieldName + ", HiveIntervalDayTime is not supported");
case STRUCT: {
StructColumnVector structVector = (StructColumnVector) vector;
// setStructColumnVector(colVal, typeDesc, fieldName, structVector, rowNum);
break;
}
case LIST: {
ListColumnVector listVector = (ListColumnVector) vector;
setListColumnVector(colVal, typeDesc, fieldName, listVector, rowNum);
break;
}
case MAP: {
MapColumnVector mapVector = (MapColumnVector) vector;
setMapColumnVector(colVal, typeDesc, fieldName, mapVector, rowNum);
break;
}
case UNION: {
UnionColumnVector unionVector = (UnionColumnVector) vector;
setUnionColumnVector(colVal, typeDesc, fieldName, unionVector, rowNum);
break;
}
default:
throw new RuntimeException("setColumn: Internal error: unexpected ColumnVector subtype");
} // switch
} // else
} // setColumn
private static String orcExceptionMsg(String prefixMsg, String fieldName, int rowNum) {
return prefixMsg + fieldName + " in row " + rowNum;
}
public static TypeDescription detectType(Object value) {
TypeDescription type = null;
if (value != null) {
if (value instanceof Boolean) {
type = TypeDescription.createBoolean();
} else if (value instanceof Short) {
type = TypeDescription.createShort();
} else if (value instanceof Integer) {
type = TypeDescription.createInt();
} else if (value instanceof Long) {
type = TypeDescription.createLong();
} else if (value instanceof Timestamp) {
type = TypeDescription.createTimestamp();
} else if (value instanceof BigDecimal) {
type = TypeDescription.createDecimal();
} else if (value instanceof Byte) {
type = TypeDescription.createByte();
} else if (value instanceof Float) {
type = TypeDescription.createFloat();
} else if (value instanceof Double) {
type = TypeDescription.createDouble();
} else if (value instanceof String) {
type = TypeDescription.createString();
} else if (value instanceof Date) {
type = TypeDescription.createDate();
} else if (value instanceof byte[]) {
type = TypeDescription.createBinary();
} else {
throw new RuntimeException(
value.getClass().getName() + " is not supported for ListColumnVector columns");
}
} else {
type = TypeDescription.createString();
}
return type;
}
}

View File

@@ -0,0 +1,253 @@
package org.talend.orc;
import java.io.File;
import java.io.IOException;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.orc.CompressionKind;
import org.apache.orc.OrcFile;
import org.apache.orc.OrcFile.WriterOptions;
import org.apache.orc.Reader;
import org.apache.orc.RecordReader;
import org.apache.orc.TypeDescription;
import org.apache.orc.Writer;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
class ORCUtilsTest {
private static File localFolder;
@BeforeAll
static void setup() throws IOException {
localFolder = createTempDirectory();
}
@AfterAll
static void dispose() {
localFolder.delete();
}
/**
* Test that an exception is thrown if a Date type is written.
*
* At the time this test was written, the ORC writer did not correctly write the
* date epoch value to the ORC file. The value was written as a 32-bit int,
* instead of a 64 bit long. As a result, the date is incorrect. A timestamp
* value should be used instead.
*
* @param tempDirPath
* @throws ORCFileException
* @throws InterruptedException
*/
@Test
void testAllDataTypes() throws Throwable {
String filePath = localFolder.getAbsolutePath() + "/testAllTypes.orc";
writeData(filePath);
readAndCheckData(filePath);
}
@Test
void testDetectType() {
Assertions.assertEquals(TypeDescription.Category.BOOLEAN, ORCWriteUtils.detectType(true).getCategory());
Assertions.assertEquals(TypeDescription.Category.SHORT,
ORCWriteUtils.detectType(Short.valueOf("1")).getCategory());
Assertions.assertEquals(TypeDescription.Category.INT, ORCWriteUtils.detectType(1).getCategory());
Assertions.assertEquals(TypeDescription.Category.LONG, ORCWriteUtils.detectType(1L).getCategory());
Assertions.assertEquals(TypeDescription.Category.TIMESTAMP,
ORCWriteUtils.detectType(new Timestamp(System.currentTimeMillis())).getCategory());
Assertions.assertEquals(TypeDescription.Category.DECIMAL,
ORCWriteUtils.detectType(new BigDecimal("1")).getCategory());
Assertions.assertEquals(TypeDescription.Category.BYTE,
ORCWriteUtils.detectType(Byte.valueOf("1")).getCategory());
Assertions.assertEquals(TypeDescription.Category.FLOAT, ORCWriteUtils.detectType(1.0f).getCategory());
Assertions.assertEquals(TypeDescription.Category.DOUBLE, ORCWriteUtils.detectType(1.0).getCategory());
Assertions.assertEquals(TypeDescription.Category.STRING, ORCWriteUtils.detectType("test").getCategory());
Assertions.assertEquals(TypeDescription.Category.DATE, ORCWriteUtils.detectType(new Date()).getCategory());
Assertions.assertEquals(TypeDescription.Category.BINARY,
ORCWriteUtils.detectType("test".getBytes()).getCategory());
}
private void writeData(String filePath) throws Throwable {
TypeDescription schema = TypeDescription.createStruct();
schema.addField("t_boolean", TypeDescription.createBoolean());
schema.addField("t_byte", TypeDescription.createByte());
schema.addField("t_bytes", TypeDescription.createBinary());
schema.addField("t_char", TypeDescription.createChar());
schema.addField("t_date", TypeDescription.createDate());
schema.addField("t_ts", TypeDescription.createTimestamp());
schema.addField("t_double", TypeDescription.createDouble());
schema.addField("t_float", TypeDescription.createFloat());
schema.addField("t_decimal", TypeDescription.createDecimal().withPrecision(18).withScale(5));
schema.addField("t_int", TypeDescription.createInt());
schema.addField("t_long", TypeDescription.createLong());
schema.addField("t_short", TypeDescription.createShort());
schema.addField("t_string", TypeDescription.createString());
schema.addField("t_list", TypeDescription.createList(TypeDescription.createString()));
WriterOptions writerOption = OrcFile.writerOptions(new Configuration()) //
.overwrite(true) //
.compress(CompressionKind.valueOf("ZLIB")).setSchema(schema); //
Writer writer = OrcFile.createWriter(new Path(filePath), writerOption);
VectorizedRowBatch batch = schema.createRowBatch(100);
for (int r = 0; r < 1000; ++r) {
int row = batch.size++;
for (int i = 0; i < batch.cols.length; i++) {
ColumnVector vector = batch.cols[i];
TypeDescription type = schema.getChildren().get(i);
switch (vector.type) {
case BYTES:
if (type.getCategory() == TypeDescription.Category.BINARY) {
ORCWriteUtils.setColumn(("this is byte[] " + r).getBytes(), null, "t_bytes", vector, row);
} else if (type.getCategory() == TypeDescription.Category.STRING) {
if(r==666) {
ORCWriteUtils.setColumn(null, null, "t_string", vector, row);
}else {
ORCWriteUtils.setColumn(("this is String " + r), null, "t_string", vector, row);
}
} else if (type.getCategory() == TypeDescription.Category.CHAR) {
ORCWriteUtils.setColumn("talend".charAt(r % 6), null, "t_char", vector, row);
} else {
throw new RuntimeException(type.getCategory() + " is not supported as BYTES vector");
}
break;
case DECIMAL:
ORCWriteUtils.setColumn(new BigDecimal(r + ".12345"), null, "t_decimal", vector, row);
break;
case DOUBLE:
if (type.getCategory() == TypeDescription.Category.DOUBLE) {
ORCWriteUtils.setColumn(r + 0.123, null, "t_double", vector, row);
} else if (type.getCategory() == TypeDescription.Category.FLOAT) {
ORCWriteUtils.setColumn(r + 0.456f, null, "t_float", vector, row);
} else {
throw new RuntimeException(type.getCategory() + " is not supported as DOUBLE vector");
}
break;
case LONG:
if (type.getCategory() == TypeDescription.Category.BOOLEAN) {
ORCWriteUtils.setColumn(true, null, "t_boolean", vector, row);
} else if (type.getCategory() == TypeDescription.Category.BYTE) {
ORCWriteUtils.setColumn((byte)(r % 128), null, "t_byte", vector, row);
} else if (type.getCategory() == TypeDescription.Category.INT) {
ORCWriteUtils.setColumn(r, null, "t_int", vector, row);
} else if (type.getCategory() == TypeDescription.Category.SHORT) {
ORCWriteUtils.setColumn((short)(r % 256), null, "t_short", vector, row);
} else if (type.getCategory() == TypeDescription.Category.LONG) {
ORCWriteUtils.setColumn(r * 1000L, null, "t_long", vector, row);
} else if (type.getCategory() == TypeDescription.Category.DATE) {
Date d = new Date(1633687854031L);
ORCWriteUtils.setColumn(d, null, "t_date", vector, row);
} else {
throw new RuntimeException(type.getCategory() + " is not supported as LONG vector");
}
break;
case TIMESTAMP:
Timestamp ts = new java.sql.Timestamp(1633687854031L);
ts.setNanos(123456789);
ORCWriteUtils.setColumn(ts, null, "t_ts", vector, row);
break;
case LIST:
List<String> values = new ArrayList<>();
values.add("v1_" + r);
values.add("v2_" + r);
values.add("v3_" + r);
ORCWriteUtils.setColumn(values, ORCWriteUtils.detectType("v1_" + r), "t_ list", vector, row);
break;
default:
throw new RuntimeException(vector.type + " is not supported");
}
}
if (batch.size == batch.getMaxSize()) {
writer.addRowBatch(batch);
batch.reset();
}
}
if (batch.size != 0) {
writer.addRowBatch(batch);
}
writer.close();
}
private void readAndCheckData(String filePath) throws Throwable {
Reader reader = OrcFile.createReader(new Path(filePath), OrcFile.readerOptions(new Configuration()));
TypeDescription schema = reader.getSchema();
VectorizedRowBatch batch = schema.createRowBatch();
RecordReader rowIterator = reader.rows(reader.options().schema(schema));
int nuberLine = 0;
List<Object> nb_500 = new ArrayList<>();
List<Object> nb_666 = new ArrayList<>();
while (rowIterator.nextBatch(batch)) {
ColumnVector[] colVectors = batch.cols;
for (int row = 0; row < batch.size; ++row) {
nuberLine++;
for (String columnName : schema.getFieldNames()) {
ColumnVector colVector = colVectors[schema.getFieldNames().indexOf(columnName)];
int colRow = colVector.isRepeating ? 0 : row;
Object value = ORCReadUtils.readColumnByName(batch, columnName, schema, colRow);
if (nuberLine == 500) {
nb_500.add(value);
}else if (nuberLine == 667) {
nb_666.add(value);
}
}
}
}
Assertions.assertEquals(true, nb_500.get(0));
Assertions.assertEquals(Byte.valueOf("115"), nb_500.get(1));
Assertions.assertEquals("this is byte[] 499", new String((byte[]) nb_500.get(2)));
Assertions.assertEquals("a", nb_500.get(3));
Date t_date = (Date) nb_500.get(4);
Assertions.assertEquals((1633687854000L/86400000), t_date.getTime()/86400000);
Timestamp t_ts = (Timestamp) nb_500.get(5);
Assertions.assertEquals(1633687854123L, t_ts.getTime());
Assertions.assertEquals(123456789, t_ts.getNanos());
Assertions.assertEquals(499.123, nb_500.get(6));
Assertions.assertEquals(499.456f, (((float) nb_500.get(7)) * 1000) / 1000f);
Assertions.assertEquals(new BigDecimal("499.12345"), nb_500.get(8));
Assertions.assertEquals(499, nb_500.get(9));
Assertions.assertEquals(499000L, nb_500.get(10));
Assertions.assertEquals(Short.valueOf("243"), nb_500.get(11));
Assertions.assertEquals("this is String 499", nb_500.get(12));
Assertions.assertArrayEquals(Arrays.asList("v1_499", "v2_499", "v3_499").toArray(),
((List<Object>) nb_500.get(13)).toArray());
//NB_LINE 666
Assertions.assertNull( nb_666.get(12));
rowIterator.close();
}
public static File createTempDirectory() throws IOException {
final File temp;
temp = File.createTempFile("temp", Long.toString(System.nanoTime()));
if (!temp.delete()) {
throw new IOException("Could not delete temp file: " + temp.getAbsolutePath());
}
if (!temp.mkdir()) {
throw new IOException("Could not create temp directory: " + temp.getAbsolutePath());
}
return temp;
}
}

View File

@@ -0,0 +1,98 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components</groupId>
<artifactId>talend-parquet</artifactId>
<version>1.3</version>
<properties>
<parquet.version>1.10.1</parquet.version>
<hadoop.version>3.2.2</hadoop.version>
<jodd.version>6.0.1</jodd.version>
<hamcrest.version>1.3</hamcrest.version>
<junit.version>4.13.2</junit.version>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
</properties>
<distributionManagement>
<snapshotRepository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceSnapshot/</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
<releases>
<enabled>false</enabled>
</releases>
</snapshotRepository>
<repository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceRelease/</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
<releases>
<enabled>true</enabled>
</releases>
</repository>
</distributionManagement>
<dependencies>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-hadoop</artifactId>
<version>${parquet.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.jodd</groupId>
<artifactId>jodd-util</artifactId>
<version>${jodd.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-library</artifactId>
<version>${hamcrest.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-library</artifactId>
<version>${hamcrest.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,141 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data;
import org.talend.parquet.data.simple.NanoTime;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.io.api.RecordConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
abstract public class Group extends GroupValueSource {
private static final Logger LOG = LoggerFactory.getLogger(Group.class);
public void add(String field, int value) {
add(getType().getFieldIndex(field), value);
}
public void add(String field, long value) {
add(getType().getFieldIndex(field), value);
}
public void add(String field, float value) {
add(getType().getFieldIndex(field), value);
}
public void add(String field, double value) {
add(getType().getFieldIndex(field), value);
}
public void add(String field, String value) {
add(getType().getFieldIndex(field), value);
}
public void add(String field, NanoTime value) {
add(getType().getFieldIndex(field), value);
}
public void add(String field, boolean value) {
add(getType().getFieldIndex(field), value);
}
public void add(String field, Binary value) {
add(getType().getFieldIndex(field), value);
}
public void add(String field, Group value) {
add(getType().getFieldIndex(field), value);
}
public Group addGroup(String field) {
if (LOG.isDebugEnabled()) {
LOG.debug("add group {} to {}", field, getType().getName());
}
return addGroup(getType().getFieldIndex(field));
}
@Override
public Group getGroup(String field, int index) {
return getGroup(getType().getFieldIndex(field), index);
}
abstract public void add(int fieldIndex, int value);
abstract public void add(int fieldIndex, long value);
abstract public void add(int fieldIndex, String value);
abstract public void add(int fieldIndex, boolean value);
abstract public void add(int fieldIndex, NanoTime value);
abstract public void add(int fieldIndex, Binary value);
abstract public void add(int fieldIndex, float value);
abstract public void add(int fieldIndex, double value);
abstract public void add(int fieldIndex, Group value);
abstract public Group addGroup(int fieldIndex);
@Override
abstract public Group getGroup(int fieldIndex, int index);
public Group asGroup() {
return this;
}
public Group append(String fieldName, int value) {
add(fieldName, value);
return this;
}
public Group append(String fieldName, float value) {
add(fieldName, value);
return this;
}
public Group append(String fieldName, double value) {
add(fieldName, value);
return this;
}
public Group append(String fieldName, long value) {
add(fieldName, value);
return this;
}
public Group append(String fieldName, NanoTime value) {
add(fieldName, value);
return this;
}
public Group append(String fieldName, String value) {
add(fieldName, Binary.fromString(value));
return this;
}
public Group append(String fieldName, boolean value) {
add(fieldName, value);
return this;
}
public Group append(String fieldName, Binary value) {
add(fieldName, value);
return this;
}
abstract public void writeValue(int field, int index, RecordConsumer recordConsumer);
}

View File

@@ -0,0 +1,19 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data;
abstract public class GroupFactory {
abstract public Group newGroup();
}

View File

@@ -0,0 +1,83 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.schema.GroupType;
abstract public class GroupValueSource {
public int getFieldRepetitionCount(String field) {
return getFieldRepetitionCount(getType().getFieldIndex(field));
}
public GroupValueSource getGroup(String field, int index) {
return getGroup(getType().getFieldIndex(field), index);
}
public String getString(String field, int index) {
return getString(getType().getFieldIndex(field), index);
}
public int getInteger(String field, int index) {
return getInteger(getType().getFieldIndex(field), index);
}
public long getLong(String field, int index) {
return getLong(getType().getFieldIndex(field), index);
}
public double getDouble(String field, int index) {
return getDouble(getType().getFieldIndex(field), index);
}
public float getFloat(String field, int index) {
return getFloat(getType().getFieldIndex(field), index);
}
public boolean getBoolean(String field, int index) {
return getBoolean(getType().getFieldIndex(field), index);
}
public Binary getBinary(String field, int index) {
return getBinary(getType().getFieldIndex(field), index);
}
public Binary getInt96(String field, int index) {
return getInt96(getType().getFieldIndex(field), index);
}
abstract public int getFieldRepetitionCount(int fieldIndex);
abstract public GroupValueSource getGroup(int fieldIndex, int index);
abstract public String getString(int fieldIndex, int index);
abstract public Integer getInteger(int fieldIndex, int index);
abstract public Long getLong(int fieldIndex, int index);
abstract public Double getDouble(int fieldIndex, int index);
abstract public Float getFloat(int fieldIndex, int index);
abstract public Boolean getBoolean(int fieldIndex, int index);
abstract public Binary getBinary(int fieldIndex, int index);
abstract public Binary getInt96(int fieldIndex, int index);
abstract public String getValueToString(int fieldIndex, int index);
abstract public GroupType getType();
}

View File

@@ -0,0 +1,56 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data;
import org.apache.parquet.io.api.RecordConsumer;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.Type;
public class GroupWriter {
private final RecordConsumer recordConsumer;
private final GroupType schema;
public GroupWriter(RecordConsumer recordConsumer, GroupType schema) {
this.recordConsumer = recordConsumer;
this.schema = schema;
}
public void write(Group group) {
recordConsumer.startMessage();
writeGroup(group, schema);
recordConsumer.endMessage();
}
private void writeGroup(Group group, GroupType type) {
int fieldCount = type.getFieldCount();
for (int field = 0; field < fieldCount; ++field) {
int valueCount = group.getFieldRepetitionCount(field);
if (valueCount > 0) {
Type fieldType = type.getType(field);
String fieldName = fieldType.getName();
recordConsumer.startField(fieldName, field);
for (int index = 0; index < valueCount; ++index) {
if (fieldType.isPrimitive()) {
group.writeValue(field, index, recordConsumer);
} else {
recordConsumer.startGroup();
writeGroup(group.getGroup(field, index), fieldType.asGroupType());
recordConsumer.endGroup();
}
}
recordConsumer.endField(fieldName, field);
}
}
}
}

View File

@@ -0,0 +1,45 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.io.api.RecordConsumer;
public class BinaryValue extends Primitive {
private final Binary binary;
public BinaryValue(Binary binary) {
this.binary = binary;
}
@Override
public Binary getBinary() {
return binary;
}
@Override
public String getString() {
return binary.toStringUsingUTF8();
}
@Override
public void writeValue(RecordConsumer recordConsumer) {
recordConsumer.addBinary(binary);
}
@Override
public String toString() {
return getString();
}
}

View File

@@ -0,0 +1,39 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple;
import org.apache.parquet.io.api.RecordConsumer;
public class BooleanValue extends Primitive {
private final boolean bool;
public BooleanValue(boolean bool) {
this.bool = bool;
}
@Override
public String toString() {
return String.valueOf(bool);
}
@Override
public boolean getBoolean() {
return bool;
}
@Override
public void writeValue(RecordConsumer recordConsumer) {
recordConsumer.addBoolean(bool);
}
}

View File

@@ -0,0 +1,39 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple;
import org.apache.parquet.io.api.RecordConsumer;
public class DoubleValue extends Primitive {
private final double value;
public DoubleValue(double value) {
this.value = value;
}
@Override
public double getDouble() {
return value;
}
@Override
public void writeValue(RecordConsumer recordConsumer) {
recordConsumer.addDouble(value);
}
@Override
public String toString() {
return String.valueOf(value);
}
}

View File

@@ -0,0 +1,39 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple;
import org.apache.parquet.io.api.RecordConsumer;
public class FloatValue extends Primitive {
private final float value;
public FloatValue(float value) {
this.value = value;
}
@Override
public float getFloat() {
return value;
}
@Override
public void writeValue(RecordConsumer recordConsumer) {
recordConsumer.addFloat(value);
}
@Override
public String toString() {
return String.valueOf(value);
}
}

View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.io.api.RecordConsumer;
public class Int96Value extends Primitive {
private final Binary value;
public Int96Value(Binary value) {
this.value = value;
}
@Override
public Binary getInt96() {
return value;
}
@Override
public void writeValue(RecordConsumer recordConsumer) {
recordConsumer.addBinary(value);
}
@Override
public String toString() {
return "Int96Value{" + String.valueOf(value) + "}";
}
}

View File

@@ -0,0 +1,39 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple;
import org.apache.parquet.io.api.RecordConsumer;
public class IntegerValue extends Primitive {
private final int value;
public IntegerValue(int value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@Override
public int getInteger() {
return value;
}
@Override
public void writeValue(RecordConsumer recordConsumer) {
recordConsumer.addInteger(value);
}
}

View File

@@ -0,0 +1,39 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple;
import org.apache.parquet.io.api.RecordConsumer;
public class LongValue extends Primitive {
private final long value;
public LongValue(long value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@Override
public long getLong() {
return value;
}
@Override
public void writeValue(RecordConsumer recordConsumer) {
recordConsumer.addLong(value);
}
}

View File

@@ -0,0 +1,74 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import org.apache.parquet.Preconditions;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.io.api.RecordConsumer;
public class NanoTime extends Primitive {
private final int julianDay;
private final long timeOfDayNanos;
public static NanoTime fromBinary(Binary bytes) {
Preconditions.checkArgument(bytes.length() == 12, "Must be 12 bytes");
ByteBuffer buf = bytes.toByteBuffer();
buf.order(ByteOrder.LITTLE_ENDIAN);
long timeOfDayNanos = buf.getLong();
int julianDay = buf.getInt();
return new NanoTime(julianDay, timeOfDayNanos);
}
public static NanoTime fromInt96(Int96Value int96) {
ByteBuffer buf = int96.getInt96().toByteBuffer();
return new NanoTime(buf.getInt(), buf.getLong());
}
public NanoTime(int julianDay, long timeOfDayNanos) {
this.julianDay = julianDay;
this.timeOfDayNanos = timeOfDayNanos;
}
public int getJulianDay() {
return julianDay;
}
public long getTimeOfDayNanos() {
return timeOfDayNanos;
}
public Binary toBinary() {
ByteBuffer buf = ByteBuffer.allocate(12);
buf.order(ByteOrder.LITTLE_ENDIAN);
buf.putLong(timeOfDayNanos);
buf.putInt(julianDay);
buf.flip();
return Binary.fromConstantByteBuffer(buf);
}
public Int96Value toInt96() {
return new Int96Value(toBinary());
}
@Override
public void writeValue(RecordConsumer recordConsumer) {
recordConsumer.addBinary(toBinary());
}
@Override
public String toString() {
return "NanoTime{julianDay=" + julianDay + ", timeOfDayNanos=" + timeOfDayNanos + "}";
}
}

View File

@@ -0,0 +1,54 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.io.api.RecordConsumer;
public abstract class Primitive {
public String getString() {
throw new UnsupportedOperationException();
}
public int getInteger() {
throw new UnsupportedOperationException();
}
public long getLong() {
throw new UnsupportedOperationException();
}
public boolean getBoolean() {
throw new UnsupportedOperationException();
}
public Binary getBinary() {
throw new UnsupportedOperationException();
}
public Binary getInt96() {
throw new UnsupportedOperationException();
}
public float getFloat() {
throw new UnsupportedOperationException();
}
public double getDouble() {
throw new UnsupportedOperationException();
}
abstract public void writeValue(RecordConsumer recordConsumer);
}

View File

@@ -0,0 +1,274 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple;
import java.util.ArrayList;
import java.util.List;
import org.talend.parquet.data.Group;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.io.api.RecordConsumer;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.Type;
public class SimpleGroup extends Group {
private final GroupType schema;
private final List<Object>[] data;
@SuppressWarnings("unchecked")
public SimpleGroup(GroupType schema) {
this.schema = schema;
this.data = new List[schema.getFields().size()];
for (int i = 0; i < schema.getFieldCount(); i++) {
this.data[i] = new ArrayList<>();
}
}
@Override
public String toString() {
return toString("");
}
private StringBuilder appendToString(StringBuilder builder, String indent) {
int i = 0;
for (Type field : schema.getFields()) {
String name = field.getName();
List<Object> values = data[i];
++i;
if (values != null && !values.isEmpty()) {
for (Object value : values) {
builder.append(indent).append(name);
if (value == null) {
builder.append(": NULL\n");
} else if (value instanceof Group) {
builder.append('\n');
((SimpleGroup) value).appendToString(builder, indent + " ");
} else {
builder.append(": ").append(value.toString()).append('\n');
}
}
}
}
return builder;
}
public String toString(String indent) {
StringBuilder builder = new StringBuilder();
appendToString(builder, indent);
return builder.toString();
}
@Override
public Group addGroup(int fieldIndex) {
SimpleGroup g = new SimpleGroup(schema.getType(fieldIndex).asGroupType());
add(fieldIndex, g);
return g;
}
@Override
public Group getGroup(int fieldIndex, int index) {
return (Group) getValue(fieldIndex, index);
}
private Object getValue(int fieldIndex, int index) {
List<Object> list;
try {
list = data[fieldIndex];
} catch (IndexOutOfBoundsException e) {
throw new RuntimeException(
"not found " + fieldIndex + "(" + schema.getFieldName(fieldIndex) + ") in group:\n" + this);
}
try {
if(list == null || list.isEmpty()) {
return null;
}
return list.get(index);
} catch (IndexOutOfBoundsException e) {
throw new RuntimeException("not found " + fieldIndex + "(" + schema.getFieldName(fieldIndex)
+ ") element number " + index + " in group:\n" + this);
}
}
private void add(int fieldIndex, Primitive value) {
Type type = schema.getType(fieldIndex);
List<Object> list = data[fieldIndex];
if (!type.isRepetition(Type.Repetition.REPEATED) && !list.isEmpty()) {
throw new IllegalStateException(
"field " + fieldIndex + " (" + type.getName() + ") can not have more than one value: " + list);
}
list.add(value);
}
@Override
public int getFieldRepetitionCount(int fieldIndex) {
List<Object> list = data[fieldIndex];
return list == null ? 0 : list.size();
}
@Override
public String getValueToString(int fieldIndex, int index) {
Object value = getValue(fieldIndex, index);
if(value == null) {
return null;
}
return String.valueOf(value);
}
@Override
public String getString(int fieldIndex, int index) {
Object value = getValue(fieldIndex, index);
if(value == null) {
return null;
}
return ((BinaryValue) value).getString();
}
@Override
public Integer getInteger(int fieldIndex, int index) {
Object value = getValue(fieldIndex, index);
if(value == null) {
return null;
}
return ((IntegerValue)value).getInteger();
}
@Override
public Long getLong(int fieldIndex, int index) {
Object value = getValue(fieldIndex, index);
if(value == null) {
return null;
}
return ((LongValue)value).getLong();
}
@Override
public Double getDouble(int fieldIndex, int index) {
Object value = getValue(fieldIndex, index);
if(value == null) {
return null;
}
return ((DoubleValue)value).getDouble();
}
@Override
public Float getFloat(int fieldIndex, int index) {
Object value = getValue(fieldIndex, index);
if(value == null) {
return null;
}
return ((FloatValue)value).getFloat();
}
@Override
public Boolean getBoolean(int fieldIndex, int index) {
Object value = getValue(fieldIndex, index);
if(value == null) {
return null;
}
return ((BooleanValue) value).getBoolean();
}
@Override
public Binary getBinary(int fieldIndex, int index) {
Object value = getValue(fieldIndex, index);
if(value == null) {
return null;
}
return ((BinaryValue) value).getBinary();
}
public NanoTime getTimeNanos(int fieldIndex, int index) {
Object value = getValue(fieldIndex, index);
if(value == null) {
return null;
}
return NanoTime.fromInt96((Int96Value) value);
}
@Override
public Binary getInt96(int fieldIndex, int index) {
Object value = getValue(fieldIndex, index);
if(value == null) {
return null;
}
return ((Int96Value) value).getInt96();
}
@Override
public void add(int fieldIndex, int value) {
add(fieldIndex, new IntegerValue(value));
}
@Override
public void add(int fieldIndex, long value) {
add(fieldIndex, new LongValue(value));
}
@Override
public void add(int fieldIndex, String value) {
add(fieldIndex, new BinaryValue(Binary.fromString(value)));
}
@Override
public void add(int fieldIndex, NanoTime value) {
add(fieldIndex, value.toInt96());
}
@Override
public void add(int fieldIndex, boolean value) {
add(fieldIndex, new BooleanValue(value));
}
@Override
public void add(int fieldIndex, Binary value) {
switch (getType().getType(fieldIndex).asPrimitiveType().getPrimitiveTypeName()) {
case BINARY:
case FIXED_LEN_BYTE_ARRAY:
add(fieldIndex, new BinaryValue(value));
break;
case INT96:
add(fieldIndex, new Int96Value(value));
break;
default:
throw new UnsupportedOperationException(
getType().asPrimitiveType().getName() + " not supported for Binary");
}
}
@Override
public void add(int fieldIndex, float value) {
add(fieldIndex, new FloatValue(value));
}
@Override
public void add(int fieldIndex, double value) {
add(fieldIndex, new DoubleValue(value));
}
@Override
public void add(int fieldIndex, Group value) {
data[fieldIndex].add(value);
}
@Override
public GroupType getType() {
return schema;
}
@Override
public void writeValue(int field, int index, RecordConsumer recordConsumer) {
((Primitive) getValue(field, index)).writeValue(recordConsumer);
}
}

View File

@@ -0,0 +1,32 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple;
import org.talend.parquet.data.Group;
import org.talend.parquet.data.GroupFactory;
import org.apache.parquet.schema.MessageType;
public class SimpleGroupFactory extends GroupFactory {
private final MessageType schema;
public SimpleGroupFactory(MessageType schema) {
this.schema = schema;
}
@Override
public Group newGroup() {
return new SimpleGroup(schema);
}
}

View File

@@ -0,0 +1,51 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple.convert;
import org.talend.parquet.data.Group;
import org.talend.parquet.data.simple.SimpleGroupFactory;
import org.apache.parquet.io.api.GroupConverter;
import org.apache.parquet.io.api.RecordMaterializer;
import org.apache.parquet.schema.MessageType;
public class GroupRecordConverter extends RecordMaterializer<Group> {
private final SimpleGroupFactory simpleGroupFactory;
private SimpleGroupConverter root;
public GroupRecordConverter(MessageType schema) {
this.simpleGroupFactory = new SimpleGroupFactory(schema);
this.root = new SimpleGroupConverter(null, 0, schema) {
@Override
public void start() {
this.current = simpleGroupFactory.newGroup();
}
@Override
public void end() {
}
};
}
@Override
public Group getCurrentRecord() {
return root.getCurrentRecord();
}
@Override
public GroupConverter getRootConverter() {
return root;
}
}

View File

@@ -0,0 +1,61 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple.convert;
import org.talend.parquet.data.Group;
import org.apache.parquet.io.api.Converter;
import org.apache.parquet.io.api.GroupConverter;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.Type;
class SimpleGroupConverter extends GroupConverter {
private final SimpleGroupConverter parent;
private final int index;
protected Group current;
private Converter[] converters;
SimpleGroupConverter(SimpleGroupConverter parent, int index, GroupType schema) {
this.parent = parent;
this.index = index;
converters = new Converter[schema.getFieldCount()];
for (int i = 0; i < converters.length; i++) {
final Type type = schema.getType(i);
if (type.isPrimitive()) {
converters[i] = new SimplePrimitiveConverter(this, i);
} else {
converters[i] = new SimpleGroupConverter(this, i, type.asGroupType());
}
}
}
@Override
public void start() {
current = parent.getCurrentRecord().addGroup(index);
}
@Override
public Converter getConverter(int fieldIndex) {
return converters[fieldIndex];
}
@Override
public void end() {
}
public Group getCurrentRecord() {
return current;
}
}

View File

@@ -0,0 +1,88 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.data.simple.convert;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.io.api.PrimitiveConverter;
class SimplePrimitiveConverter extends PrimitiveConverter {
private final SimpleGroupConverter parent;
private final int index;
SimplePrimitiveConverter(SimpleGroupConverter parent, int index) {
this.parent = parent;
this.index = index;
}
/**
* {@inheritDoc}
*
* @see org.apache.parquet.io.api.PrimitiveConverter#addBinary(Binary)
*/
@Override
public void addBinary(Binary value) {
parent.getCurrentRecord().add(index, value);
}
/**
* {@inheritDoc}
*
* @see org.apache.parquet.io.api.PrimitiveConverter#addBoolean(boolean)
*/
@Override
public void addBoolean(boolean value) {
parent.getCurrentRecord().add(index, value);
}
/**
* {@inheritDoc}
*
* @see org.apache.parquet.io.api.PrimitiveConverter#addDouble(double)
*/
@Override
public void addDouble(double value) {
parent.getCurrentRecord().add(index, value);
}
/**
* {@inheritDoc}
*
* @see org.apache.parquet.io.api.PrimitiveConverter#addFloat(float)
*/
@Override
public void addFloat(float value) {
parent.getCurrentRecord().add(index, value);
}
/**
* {@inheritDoc}
*
* @see org.apache.parquet.io.api.PrimitiveConverter#addInt(int)
*/
@Override
public void addInt(int value) {
parent.getCurrentRecord().add(index, value);
}
/**
* {@inheritDoc}
*
* @see org.apache.parquet.io.api.PrimitiveConverter#addLong(long)
*/
@Override
public void addLong(long value) {
parent.getCurrentRecord().add(index, value);
}
}

View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.hadoop;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.parquet.hadoop.api.ReadSupport;
import org.apache.parquet.io.api.RecordMaterializer;
import org.apache.parquet.schema.MessageType;
import org.talend.parquet.data.Group;
import org.talend.parquet.data.simple.convert.GroupRecordConverter;
public class TalendGroupReadSupport extends ReadSupport<Group> {
@Override
public org.apache.parquet.hadoop.api.ReadSupport.ReadContext init(Configuration configuration,
Map<String, String> keyValueMetaData, MessageType fileSchema) {
String partialSchemaString = configuration.get(ReadSupport.PARQUET_READ_SCHEMA);
MessageType requestedProjection = getSchemaForRead(fileSchema, partialSchemaString);
return new ReadContext(requestedProjection);
}
@Override
public RecordMaterializer<Group> prepareForRead(Configuration configuration, Map<String, String> keyValueMetaData,
MessageType fileSchema, org.apache.parquet.hadoop.api.ReadSupport.ReadContext readContext) {
return new GroupRecordConverter(readContext.getRequestedSchema());
}
}

View File

@@ -0,0 +1,81 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.hadoop;
import static org.apache.parquet.schema.MessageTypeParser.parseMessageType;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import org.apache.hadoop.conf.Configuration;
import org.apache.parquet.hadoop.api.WriteSupport;
import org.apache.parquet.io.api.RecordConsumer;
import org.apache.parquet.schema.MessageType;
import org.talend.parquet.data.Group;
import org.talend.parquet.data.GroupWriter;
public class TalendGroupWriteSupport extends WriteSupport<Group> {
public static final String PARQUET_SCHEMA = "parquet.talend.schema";
public static void setSchema(MessageType schema, Configuration configuration) {
configuration.set(PARQUET_SCHEMA, schema.toString());
}
public static MessageType getSchema(Configuration configuration) {
return parseMessageType(Objects.requireNonNull(configuration.get(PARQUET_SCHEMA), PARQUET_SCHEMA));
}
private MessageType schema;
private GroupWriter groupWriter;
private Map<String, String> extraMetaData;
public TalendGroupWriteSupport() {
this(null, new HashMap<String, String>());
}
TalendGroupWriteSupport(MessageType schema) {
this(schema, new HashMap<String, String>());
}
TalendGroupWriteSupport(MessageType schema, Map<String, String> extraMetaData) {
this.schema = schema;
this.extraMetaData = extraMetaData;
}
@Override
public String getName() {
return "Talend";
}
@Override
public org.apache.parquet.hadoop.api.WriteSupport.WriteContext init(Configuration configuration) {
// if present, prefer the schema passed to the constructor
if (schema == null) {
schema = getSchema(configuration);
}
return new WriteContext(schema, this.extraMetaData);
}
@Override
public void prepareForWrite(RecordConsumer recordConsumer) {
groupWriter = new GroupWriter(recordConsumer, schema);
}
@Override
public void write(Group record) {
groupWriter.write(record);
}
}

View File

@@ -0,0 +1,30 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.hadoop;
import org.apache.parquet.hadoop.ParquetInputFormat;
import org.talend.parquet.data.Group;
/**
* Example input format to read Parquet files
*
* This Input format uses a rather inefficient data model but works
* independently of higher level abstractions.
*/
public class TalendInputFormat extends ParquetInputFormat<Group> {
public TalendInputFormat() {
super(TalendGroupReadSupport.class);
}
}

View File

@@ -0,0 +1,54 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.hadoop;
import org.apache.hadoop.mapreduce.Job;
import org.apache.parquet.hadoop.ParquetOutputFormat;
import org.apache.parquet.hadoop.util.ContextUtil;
import org.apache.parquet.schema.MessageType;
import org.talend.parquet.data.Group;
/**
* An example output format
*
* must be provided the schema up front
*
* @see TalendOutputFormat#setSchema(Job, MessageType)
* @see TalendGroupWriteSupport#PARQUET_SCHEMA
*/
public class TalendOutputFormat extends ParquetOutputFormat<Group> {
/**
* set the schema being written to the job conf
*
* @param job a job
* @param schema the schema of the data
*/
public static void setSchema(Job job, MessageType schema) {
TalendGroupWriteSupport.setSchema(schema, ContextUtil.getConfiguration(job));
}
/**
* retrieve the schema from the conf
*
* @param job a job
* @return the schema
*/
public static MessageType getSchema(Job job) {
return TalendGroupWriteSupport.getSchema(ContextUtil.getConfiguration(job));
}
public TalendOutputFormat() {
super(new TalendGroupWriteSupport());
}
}

View File

@@ -0,0 +1,108 @@
/*
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.talend.parquet.hadoop;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.column.ParquetProperties;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.api.WriteSupport;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.apache.parquet.io.OutputFile;
import org.apache.parquet.schema.MessageType;
import org.talend.parquet.data.Group;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
* An example file writer class.
*/
public class TalendParquetWriter extends ParquetWriter<Group> {
/**
* Creates a Builder for configuring ParquetWriter with the example object
*
* @param file the output file to create
* @return a {@link Builder} to create a {@link ParquetWriter}
*/
public static Builder builder(Path file) {
return new Builder(file);
}
/**
* Creates a Builder for configuring ParquetWriter with the example object
*
* @param file the output file to create
* @return a {@link Builder} to create a {@link ParquetWriter}
*/
public static Builder builder(OutputFile file) {
return new Builder(file);
}
/**
* Create a new {@link TalendParquetWriter}.
*
* @param file The file name to write to.
* @param writeSupport The schema to write with.
* @param compressionCodecName Compression code to use, or
* CompressionCodecName.UNCOMPRESSED
* @param blockSize the block size threshold.
* @param pageSize See parquet write up. Blocks are subdivided into
* pages for alignment and other purposes.
* @param enableDictionary Whether to use a dictionary to compress columns.
* @param conf The Configuration to use.
* @throws IOException
*/
TalendParquetWriter(Path file, WriteSupport<Group> writeSupport, CompressionCodecName compressionCodecName,
int blockSize, int pageSize, boolean enableDictionary, boolean enableValidation,
ParquetProperties.WriterVersion writerVersion, Configuration conf) throws IOException {
super(file, writeSupport, compressionCodecName, blockSize, pageSize, pageSize, enableDictionary,
enableValidation, writerVersion, conf);
}
public static class Builder extends ParquetWriter.Builder<Group, Builder> {
private MessageType type = null;
private Map<String, String> extraMetaData = new HashMap<String, String>();
private Builder(Path file) {
super(file);
}
private Builder(OutputFile file) {
super(file);
}
public Builder withType(MessageType type) {
this.type = type;
return this;
}
public Builder withExtraMetaData(Map<String, String> extraMetaData) {
this.extraMetaData = extraMetaData;
return this;
}
@Override
protected Builder self() {
return this;
}
@Override
protected WriteSupport<Group> getWriteSupport(Configuration conf) {
return new TalendGroupWriteSupport(type, extraMetaData);
}
}
}

View File

@@ -0,0 +1,131 @@
package org.talend.parquet.utils;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
import org.talend.parquet.data.simple.NanoTime;
import jodd.time.JulianDate;
public class NanoTimeUtils {
/**
* Number of days between Julian day epoch (January 1, 4713 BC) and Unix day
* epoch (January 1, 1970). The value of this constant is {@value}.
*/
public static final long JULIAN_EPOCH_OFFSET_DAYS = 2440588;
private static final long MILLIS_IN_DAY = TimeUnit.DAYS.toMillis(1);
private static final long NANOS_PER_MILLISECOND = TimeUnit.MILLISECONDS.toNanos(1);
static final long NANOS_PER_HOUR = java.util.concurrent.TimeUnit.HOURS.toNanos(1);
static final long NANOS_PER_MINUTE = java.util.concurrent.TimeUnit.MINUTES.toNanos(1);
static final long NANOS_PER_SECOND = java.util.concurrent.TimeUnit.SECONDS.toNanos(1);
static final long NANOS_PER_DAY = java.util.concurrent.TimeUnit.DAYS.toNanos(1);
private static final ThreadLocal<java.util.Calendar> parquetGMTCalendar = new ThreadLocal<Calendar>();
private static final ThreadLocal<Calendar> parquetLocalCalendar = new ThreadLocal<Calendar>();
private static Calendar getGMTCalendar() {
// Calendar.getInstance calculates the current-time needlessly, so cache
// an instance.
if (parquetGMTCalendar.get() == null) {
parquetGMTCalendar.set(Calendar.getInstance(TimeZone.getTimeZone("GMT")));
}
return parquetGMTCalendar.get();
}
private static Calendar getLocalCalendar() {
if (parquetLocalCalendar.get() == null) {
parquetLocalCalendar.set(Calendar.getInstance());
}
return parquetLocalCalendar.get();
}
private static Calendar getCalendar(boolean skipConversion) {
Calendar calendar = skipConversion ? getLocalCalendar() : getGMTCalendar();
calendar.clear();
return calendar;
}
/**
* Converts a timestamp to NanoTime.
*/
public static NanoTime getNanoTime(Timestamp ts, boolean skipConversion) {
Calendar calendar = getCalendar(skipConversion);
calendar.setTimeInMillis(ts.getTime());
int year = calendar.get(Calendar.YEAR);
if (calendar.get(Calendar.ERA) == GregorianCalendar.BC) {
year = 1 - year;
}
JulianDate jDateTime;
jDateTime = JulianDate.of(year, calendar.get(Calendar.MONTH) + 1, // java calendar index starting at 1.
calendar.get(Calendar.DAY_OF_MONTH), 0, 0, 0, 0);
int days = jDateTime.getJulianDayNumber();
long hour = calendar.get(Calendar.HOUR_OF_DAY);
long minute = calendar.get(Calendar.MINUTE);
long second = calendar.get(Calendar.SECOND);
long nanos = ts.getNanos();
long nanosOfDay = nanos + NANOS_PER_SECOND * second + NANOS_PER_MINUTE * minute + NANOS_PER_HOUR * hour;
return new NanoTime(days, nanosOfDay);
}
public static Timestamp getTimestamp(NanoTime nt, boolean skipConversion) {
int julianDay = nt.getJulianDay();
long nanosOfDay = nt.getTimeOfDayNanos();
long remainder = nanosOfDay;
julianDay += remainder / NANOS_PER_DAY;
remainder %= NANOS_PER_DAY;
if (remainder < 0) {
remainder += NANOS_PER_DAY;
julianDay--;
}
JulianDate jDateTime = new JulianDate((double) julianDay);
LocalDateTime datetime = jDateTime.toLocalDateTime();
Calendar calendar = getCalendar(skipConversion);
calendar.set(Calendar.YEAR, datetime.getYear());
calendar.set(Calendar.MONTH, datetime.getMonthValue() - 1);
calendar.set(Calendar.DAY_OF_MONTH, datetime.getYear());
int hour = (int) (remainder / (NANOS_PER_HOUR));
remainder = remainder % (NANOS_PER_HOUR);
int minutes = (int) (remainder / (NANOS_PER_MINUTE));
remainder = remainder % (NANOS_PER_MINUTE);
int seconds = (int) (remainder / (NANOS_PER_SECOND));
long nanos = remainder % NANOS_PER_SECOND;
calendar.set(Calendar.HOUR_OF_DAY, hour);
calendar.set(Calendar.MINUTE, minutes);
calendar.set(Calendar.SECOND, seconds);
Timestamp ts = new Timestamp(calendar.getTimeInMillis());
ts.setNanos((int) nanos);
return ts;
}
/**
* Returns timestamp millis from NanoTime type value.
*
* @param nt NanoTime value
* @return timestamp in millis
*/
public static long getTimestampMillis(NanoTime nt) {
long timeOfDayNanos = nt.getTimeOfDayNanos();
int julianDay = nt.getJulianDay();
return (julianDay - JULIAN_EPOCH_OFFSET_DAYS) * MILLIS_IN_DAY + (timeOfDayNanos / NANOS_PER_MILLISECOND);
}
public static Timestamp getTimestamp(NanoTime nt) {
Timestamp ts = new Timestamp(getTimestampMillis(nt));
ts.setNanos((int) (nt.getTimeOfDayNanos() % 1000000000));
return ts;
}
}

View File

@@ -0,0 +1,231 @@
package org.talend.parquet.utils;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.schema.DecimalMetadata;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.PrimitiveType;
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
import org.apache.parquet.schema.Type;
import org.apache.parquet.schema.Type.Repetition;
import org.apache.parquet.schema.Types;
import org.apache.parquet.schema.Types.GroupBuilder;
import org.talend.parquet.data.Group;
import org.talend.parquet.data.simple.NanoTime;
public class TalendParquetUtils {
public static final String ARRAY_FIELD_NAME = "array";
public static PrimitiveType createPrimitiveType(String fieldName, boolean nullable, String primitiveType,
String originalTypeName) {
OriginalType originalType = null;
if (originalTypeName != null) {
originalType = OriginalType.valueOf(originalTypeName);
}
return new PrimitiveType((nullable ? Repetition.OPTIONAL : Repetition.REQUIRED),
PrimitiveTypeName.valueOf(primitiveType), fieldName, originalType);
}
public static PrimitiveType createDecimalType(String fieldName, boolean nullable, int precision, int scale) {
DecimalMetadata decimalMetadata = new DecimalMetadata(precision, scale);
return new PrimitiveType((nullable ? Repetition.OPTIONAL : Repetition.REQUIRED),
PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY, 16, fieldName, OriginalType.DECIMAL, decimalMetadata, null);
}
public static Type createGroupElementType(String fieldName, Object element) {
if (element == null) {
return Types.repeated(PrimitiveTypeName.BINARY).as(OriginalType.UTF8).named(fieldName);
}
if (String.class.isInstance(element)) {
return Types.repeated(PrimitiveTypeName.BINARY).as(OriginalType.UTF8).named(fieldName);
} else if (Double.class.isInstance(element)) {
return Types.repeated(PrimitiveTypeName.DOUBLE).named(fieldName);
} else if (Float.class.isInstance(element)) {
return Types.repeated(PrimitiveTypeName.FLOAT).named(fieldName);
} else if (Byte.class.isInstance(element)) {
return Types.repeated(PrimitiveTypeName.INT32).as(OriginalType.INT_8).named(fieldName);
} else if (Short.class.isInstance(element)) {
return Types.repeated(PrimitiveTypeName.INT32).as(OriginalType.INT_16).named(fieldName);
} else if (Integer.class.isInstance(element)) {
return Types.repeated(PrimitiveTypeName.INT32).named(fieldName);
} else if (Long.class.isInstance(element)) {
return Types.repeated(PrimitiveTypeName.INT64).named(fieldName);
} else if (Boolean.class.isInstance(element)) {
return Types.repeated(PrimitiveTypeName.BOOLEAN).named(fieldName);
} else if (Date.class.isInstance(element)) {
return Types.repeated(PrimitiveTypeName.INT64).as(OriginalType.TIMESTAMP_MILLIS).named(fieldName);
} else if (Group.class.isInstance(element)) {
return ((Group) element).getType();
} else {
throw new IllegalArgumentException("Unsupported type: " + element.getClass().getCanonicalName()
+ " for group type field'" + fieldName + "'");
}
}
public static GroupType createGroupType(String fieldName, boolean nullable, Object element) {
GroupBuilder<GroupType> builder = null;
if (nullable) {
builder = Types.optionalGroup();
} else {
builder = Types.requiredGroup();
}
return builder.as(OriginalType.LIST).addField(createGroupElementType("array", element)).named(fieldName);
}
/*
* Here group only support List value with one field
*/
public static List<Object> groupFieldValueToList(Group group) {
if (group == null) {
return null;
}
List<Object> values = new ArrayList<>();
int listSize = group.getFieldRepetitionCount(0);
for (int elementIndex = 0; elementIndex < listSize; elementIndex++) {
Type elelemntType = group.getType().getType(0);
if (elelemntType.isPrimitive()) {
PrimitiveType pType = elelemntType.asPrimitiveType();
switch (pType.getPrimitiveTypeName()) {
case INT64:
if (OriginalType.TIMESTAMP_MILLIS == elelemntType.getOriginalType()) {
values.add(new Date(group.getLong(0, elementIndex)));
} else {
values.add(group.getLong(0, elementIndex));
}
break;
case INT32:
values.add(group.getInteger(0, elementIndex));
break;
case BOOLEAN:
values.add(group.getBoolean(0, elementIndex));
break;
case INT96:
Binary value = group.getInt96(0, elementIndex);
if (value != null) {
NanoTime nanoTime = NanoTime.fromBinary(value);
values.add(new Date(NanoTimeUtils.getTimestamp(nanoTime, false).getTime()));
} else {
values.add(value);
}
break;
case FLOAT:
values.add(group.getFloat(0, elementIndex));
break;
case DOUBLE:
values.add(group.getDouble(0, elementIndex));
break;
default:
values.add(group.getValueToString(0, elementIndex));
}
} else {
values.add(groupFieldValueToList(group.getGroup(0, elementIndex)));
}
}
return values;
}
public static void writeGroupField(Group nestGroup, List<?> values) {
if (values == null || values.isEmpty()) {
return;
}
// only support one field currently
for (int i = 0; i < values.size(); i++) {
Object element = values.get(i);
if (String.class.isInstance(element)) {
nestGroup.add(0, (String) element);
} else if (Double.class.isInstance(element)) {
nestGroup.add(0, (Double) element);
} else if (Float.class.isInstance(element)) {
nestGroup.add(0, (Float) element);
} else if (Byte.class.isInstance(element)) {
nestGroup.add(0, (Byte) element);
} else if (Short.class.isInstance(element)) {
nestGroup.add(0, (Short) element);
} else if (Integer.class.isInstance(element)) {
nestGroup.add(0, (Integer) element);
} else if (Long.class.isInstance(element)) {
nestGroup.add(0, (Long) element);
} else if (Boolean.class.isInstance(element)) {
nestGroup.add(0, (Boolean) element);
} else if (Date.class.isInstance(element)) {
nestGroup.add(0, ((Date) element).getTime());
} else if (Group.class.isInstance(element)) {
nestGroup.add(0, (Group) element);
} else {
throw new IllegalArgumentException("Unsupported type: " + element.getClass().getCanonicalName()
+ " for group type field'" + nestGroup + "'");
}
}
}
public static BigDecimal binaryToDecimal(Binary value, int precision, int scale) {
/*
* Precision <= 18 checks for the max number of digits for an unscaled long,
* else treat with big integer conversion
*/
if (precision <= 18) {
ByteBuffer buffer = value.toByteBuffer();
byte[] bytes = buffer.array();
int start = buffer.arrayOffset() + buffer.position();
int end = buffer.arrayOffset() + buffer.limit();
long unscaled = 0L;
int i = start;
while (i < end) {
unscaled = (unscaled << 8 | bytes[i] & 0xff);
i++;
}
int bits = 8 * (end - start);
long unscaledNew = (unscaled << (64 - bits)) >> (64 - bits);
if (scale == 0 || unscaledNew <= -Math.pow(10, 18) || unscaledNew >= Math.pow(10, 18)) {
return new BigDecimal(unscaledNew);
} else {
return BigDecimal.valueOf(unscaledNew / Math.pow(10, scale));
}
} else {
return new BigDecimal(new BigInteger(value.getBytes()), scale);
}
}
public static Binary decimalToBinary(BigDecimal decimalValue, int scale) {
// First we need to make sure the BigDecimal matches our schema scale:
decimalValue = decimalValue.setScale(scale, RoundingMode.HALF_UP);
// Next we get the decimal value as one BigInteger (like there was no decimal
// point)
BigInteger unscaledDecimalValue = decimalValue.unscaledValue();
byte[] decimalBuffer = null;
// Finally we serialize the integer
byte[] decimalBytes = unscaledDecimalValue.toByteArray();
if (decimalValue.compareTo(BigDecimal.ZERO) < 0) {
decimalBuffer = new byte[] { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 };
} else {
decimalBuffer = new byte[16];
}
if (decimalBuffer.length >= decimalBytes.length) {
// Because we set our fixed byte array size as 16 bytes, we need to
// pad-left our original value's bytes with 0 or -1
int decimalBufferIndex = decimalBuffer.length - 1;
for (int i = decimalBytes.length - 1; i >= 0; i--) {
decimalBuffer[decimalBufferIndex] = decimalBytes[i];
decimalBufferIndex--;
}
} else {
throw new IllegalArgumentException(String
.format("Decimal size: %d was greater than the allowed max: %d",
decimalBytes.length, decimalBuffer.length));
}
return Binary.fromReusedByteArray(decimalBuffer);
}
}

View File

@@ -0,0 +1,86 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.talend.parquet;
import java.io.IOException;
import java.util.concurrent.Callable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.column.statistics.Statistics;
import org.hamcrest.CoreMatchers;
import org.junit.Assert;
public class TestUtils {
public static void enforceEmptyDir(Configuration conf, Path path) throws IOException {
FileSystem fs = path.getFileSystem(conf);
if (fs.exists(path)) {
if (!fs.delete(path, true)) {
throw new IOException("can not delete path " + path);
}
}
if (!fs.mkdirs(path)) {
throw new IOException("can not create path " + path);
}
}
/**
* A convenience method to avoid a large number of @Test(expected=...) tests
*
* @param message A String message to describe this assertion
* @param expected An Exception class that the Runnable should throw
* @param callable A Callable that is expected to throw the exception
*/
public static void assertThrows(String message, Class<? extends Exception> expected, Callable callable) {
try {
callable.call();
Assert.fail("No exception was thrown (" + message + "), expected: " + expected.getName());
} catch (Exception actual) {
try {
Assert.assertEquals(message, expected, actual.getClass());
} catch (AssertionError e) {
e.addSuppressed(actual);
throw e;
}
}
}
public static void assertStatsValuesEqual(Statistics<?> stats1, Statistics<?> stats2) {
assertStatsValuesEqual(null, stats1, stats2);
}
// To be used to assert that the values (min, max, num-of-nulls) equals. It
// might be used in cases when creating
// Statistics object for the proper Type would require too much work/code
// duplications etc.
public static void assertStatsValuesEqual(String message, Statistics<?> expected, Statistics<?> actual) {
if (expected == actual) {
return;
}
if (expected == null || actual == null) {
Assert.assertEquals(expected, actual);
}
Assert.assertThat(actual, CoreMatchers.instanceOf(expected.getClass()));
Assert.assertArrayEquals(message, expected.getMaxBytes(), actual.getMaxBytes());
Assert.assertArrayEquals(message, expected.getMinBytes(), actual.getMinBytes());
Assert.assertEquals(message, expected.getNumNulls(), actual.getNumNulls());
}
}

View File

@@ -0,0 +1,63 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.talend.parquet.hadoop;
import static org.junit.Assert.assertEquals;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.parquet.hadoop.api.ReadSupport;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.MessageTypeParser;
import org.junit.Test;
import org.talend.parquet.hadoop.TalendGroupReadSupport;
public class TalendGroupReadSupportTest {
private String fullSchemaStr = "message example {\n" + "required int32 line;\n" + "optional binary content;\n"
+ "}";
private String partialSchemaStr = "message example {\n" + "required int32 line;\n" + "}";
@Test
public void testInitWithoutSpecifyingRequestSchema() throws Exception {
TalendGroupReadSupport s = new TalendGroupReadSupport();
Configuration configuration = new Configuration();
Map<String, String> keyValueMetaData = new HashMap<String, String>();
MessageType fileSchema = MessageTypeParser.parseMessageType(fullSchemaStr);
ReadSupport.ReadContext context = s.init(configuration, keyValueMetaData, fileSchema);
assertEquals(context.getRequestedSchema(), fileSchema);
}
@Test
public void testInitWithPartialSchema() {
TalendGroupReadSupport s = new TalendGroupReadSupport();
Configuration configuration = new Configuration();
Map<String, String> keyValueMetaData = new HashMap<String, String>();
MessageType fileSchema = MessageTypeParser.parseMessageType(fullSchemaStr);
MessageType partialSchema = MessageTypeParser.parseMessageType(partialSchemaStr);
configuration.set(ReadSupport.PARQUET_READ_SCHEMA, partialSchemaStr);
ReadSupport.ReadContext context = s.init(configuration, keyValueMetaData, fileSchema);
assertEquals(context.getRequestedSchema(), partialSchema);
}
}

View File

@@ -0,0 +1,169 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.talend.parquet.hadoop;
import static java.util.Arrays.asList;
import static org.apache.parquet.column.Encoding.DELTA_BYTE_ARRAY;
import static org.apache.parquet.column.Encoding.PLAIN;
import static org.apache.parquet.column.Encoding.PLAIN_DICTIONARY;
import static org.apache.parquet.column.Encoding.RLE_DICTIONARY;
import static org.apache.parquet.format.converter.ParquetMetadataConverter.NO_FILTER;
import static org.apache.parquet.hadoop.ParquetFileReader.readFooter;
import static org.apache.parquet.hadoop.metadata.CompressionCodecName.UNCOMPRESSED;
import static org.apache.parquet.schema.MessageTypeParser.parseMessageType;
import static org.apache.parquet.schema.Type.Repetition.REQUIRED;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.column.Encoding;
import org.apache.parquet.column.ParquetProperties;
import org.apache.parquet.column.ParquetProperties.WriterVersion;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
import org.apache.parquet.hadoop.ParquetReader;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.example.ExampleParquetWriter;
import org.apache.parquet.hadoop.example.GroupReadSupport;
import org.apache.parquet.hadoop.example.GroupWriteSupport;
import org.apache.parquet.hadoop.metadata.BlockMetaData;
import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData;
import org.apache.parquet.hadoop.metadata.ParquetMetadata;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.InvalidSchemaException;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.Types;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.talend.parquet.TestUtils;
public class TestParquetWriter {
@Test
public void test() throws Exception {
Configuration conf = new Configuration();
Path root = new Path("target/tests/TestParquetWriter/");
TestUtils.enforceEmptyDir(conf, root);
MessageType schema = parseMessageType(
"message test { "
+ "required binary binary_field; "
+ "required int32 int32_field; "
+ "required int64 int64_field; "
+ "required boolean boolean_field; "
+ "required float float_field; "
+ "required double double_field; "
+ "required fixed_len_byte_array(3) flba_field; "
+ "required int96 int96_field; "
+ "} ");
GroupWriteSupport.setSchema(schema, conf);
SimpleGroupFactory f = new SimpleGroupFactory(schema);
Map<String, Encoding> expected = new HashMap<String, Encoding>();
expected.put("10-" + ParquetProperties.WriterVersion.PARQUET_1_0, PLAIN_DICTIONARY);
expected.put("1000-" + ParquetProperties.WriterVersion.PARQUET_1_0, PLAIN);
expected.put("10-" + ParquetProperties.WriterVersion.PARQUET_2_0, RLE_DICTIONARY);
expected.put("1000-" + ParquetProperties.WriterVersion.PARQUET_2_0, DELTA_BYTE_ARRAY);
for (int modulo : asList(10, 1000)) {
for (WriterVersion version : WriterVersion.values()) {
Path file = new Path(root, version.name() + "_" + modulo);
ParquetWriter<Group> writer = new ParquetWriter<Group>(
file,
new GroupWriteSupport(),
UNCOMPRESSED, 1024, 1024, 512, true, false, version, conf);
for (int i = 0; i < 1000; i++) {
writer.write(
f.newGroup()
.append("binary_field", "test" + (i % modulo))
.append("int32_field", 32)
.append("int64_field", 64l)
.append("boolean_field", true)
.append("float_field", 1.0f)
.append("double_field", 2.0d)
.append("flba_field", "foo")
.append("int96_field", Binary.fromConstantByteArray(new byte[12])));
}
writer.close();
ParquetReader<Group> reader = ParquetReader.builder(new GroupReadSupport(), file).withConf(conf).build();
for (int i = 0; i < 1000; i++) {
Group group = reader.read();
assertEquals("test" + (i % modulo), group.getBinary("binary_field", 0).toStringUsingUTF8());
assertEquals(32, group.getInteger("int32_field", 0));
assertEquals(64l, group.getLong("int64_field", 0));
assertEquals(true, group.getBoolean("boolean_field", 0));
assertEquals(1.0f, group.getFloat("float_field", 0), 0.001);
assertEquals(2.0d, group.getDouble("double_field", 0), 0.001);
assertEquals("foo", group.getBinary("flba_field", 0).toStringUsingUTF8());
assertEquals(Binary.fromConstantByteArray(new byte[12]),
group.getInt96("int96_field",0));
}
reader.close();
ParquetMetadata footer = readFooter(conf, file, NO_FILTER);
for (BlockMetaData blockMetaData : footer.getBlocks()) {
for (ColumnChunkMetaData column : blockMetaData.getColumns()) {
if (column.getPath().toDotString().equals("binary_field")) {
String key = modulo + "-" + version;
Encoding expectedEncoding = expected.get(key);
assertTrue(
key + ":" + column.getEncodings() + " should contain " + expectedEncoding,
column.getEncodings().contains(expectedEncoding));
}
}
}
assertEquals("Object model property should be example",
"example", footer.getFileMetaData().getKeyValueMetaData()
.get(ParquetWriter.OBJECT_MODEL_NAME_PROP));
}
}
}
@Rule
public TemporaryFolder temp = new TemporaryFolder();
@Test
public void testBadWriteSchema() throws IOException {
final File file = temp.newFile("test.parquet");
file.delete();
TestUtils.assertThrows("Should reject a schema with an empty group",
InvalidSchemaException.class, new Callable<Void>() {
@Override
public Void call() throws IOException {
ExampleParquetWriter.builder(new Path(file.toString()))
.withType(Types.buildMessage()
.addField(new GroupType(REQUIRED, "invalid_group"))
.named("invalid_message"))
.build();
return null;
}
});
Assert.assertFalse("Should not create a file when schema is rejected",
file.exists());
}
}

View File

@@ -0,0 +1,286 @@
package org.talend.parquet.util;
import static org.apache.parquet.schema.MessageTypeParser.parseMessageType;
import static org.apache.parquet.schema.OriginalType.DECIMAL;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY;
import static org.apache.parquet.schema.Type.Repetition.REQUIRED;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.List;
import org.apache.parquet.schema.DecimalMetadata;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.PrimitiveType;
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
import org.apache.parquet.schema.Type;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Test;
import org.talend.parquet.data.Group;
import org.talend.parquet.data.simple.SimpleGroup;
import org.talend.parquet.utils.TalendParquetUtils;
public class TalendParquetUtilsTest {
@Test
public void testGetStringList() {
MessageType schema = parseMessageType("message Schema { " //
+ " optional int64 fieldo; " //
+ " optional group field1 { " //
+ " repeated binary field2 (UTF8); " //
+ " } " //
+ "}"); //
Group group = new SimpleGroup(schema.getType(1).asGroupType());
group.add(0, "element 1");
group.add(0, "element 2");
group.add(0, "element 3");
group.add(0, "element 4");
List<Object> values = TalendParquetUtils.groupFieldValueToList(group);
MatcherAssert.assertThat("", values, Matchers.contains("element 1", "element 2", "element 3", "element 4"));
}
@Test
public void testGetIntList() {
MessageType schema = parseMessageType("message Schema { " //
+ " optional int64 fieldo; " //
+ " optional group field1 { " //
+ " repeated int32 field2 ; " //
+ " } " //
+ "}"); //
Group group = new SimpleGroup(schema.getType(1).asGroupType());
group.add(0, 123);
group.add(0, 345);
group.add(0, 431);
List<Object> values = TalendParquetUtils.groupFieldValueToList(group);
MatcherAssert.assertThat("", values, Matchers.contains(123, 345, 431));
}
@SuppressWarnings("unchecked")
@Test
public void testNestGroupList() {
MessageType schema = parseMessageType("message Schema { " //
+ " optional int64 fieldo; " //
+ " optional group field1 { " //
+ " repeated group field2 {" //
+ " repeated double field3;" //
+ " } " //
+ " } " //
+ "}"); //
Group group = new SimpleGroup(schema.getType(1).asGroupType());
Group nest1 = new SimpleGroup(schema.getType(1).asGroupType().getType(0).asGroupType());
nest1.add(0, 123.0);
nest1.add(0, 345.0);
nest1.add(0, 431.0);
Group nest2 = new SimpleGroup(schema.getType(1).asGroupType().getType(0).asGroupType());
nest2.add(0, 2123.0);
nest2.add(0, 2345.0);
nest2.add(0, 2431.0);
group.add(0, nest1);
group.add(0, nest2);
List<Object> values = TalendParquetUtils.groupFieldValueToList(group);
MatcherAssert.assertThat("", (List<Object>) values.get(0), Matchers.contains(123.0, 345.0, 431.0));
MatcherAssert.assertThat("", (List<Object>) values.get(1), Matchers.contains(2123.0, 2345.0, 2431.0));
}
@Test
public void testNullGroupList() {
List<Object> values = TalendParquetUtils.groupFieldValueToList(null);
Assert.assertNull(values);
}
@Test
public void testCreateGroupElementType() {
Type emptyElement = TalendParquetUtils.createGroupElementType("field0", null);
Assert.assertEquals(PrimitiveTypeName.BINARY, emptyElement.asPrimitiveType().getPrimitiveTypeName());
emptyElement = TalendParquetUtils.createGroupElementType("field0", "1");
Assert.assertEquals(PrimitiveTypeName.BINARY, emptyElement.asPrimitiveType().getPrimitiveTypeName());
emptyElement = TalendParquetUtils.createGroupElementType("field0", 1.0);
Assert.assertEquals(PrimitiveTypeName.DOUBLE, emptyElement.asPrimitiveType().getPrimitiveTypeName());
emptyElement = TalendParquetUtils.createGroupElementType("field0", 1.0f);
Assert.assertEquals(PrimitiveTypeName.FLOAT, emptyElement.asPrimitiveType().getPrimitiveTypeName());
emptyElement = TalendParquetUtils.createGroupElementType("field0", 1);
Assert.assertEquals(PrimitiveTypeName.INT32, emptyElement.asPrimitiveType().getPrimitiveTypeName());
emptyElement = TalendParquetUtils.createGroupElementType("field0", 1L);
Assert.assertEquals(PrimitiveTypeName.INT64, emptyElement.asPrimitiveType().getPrimitiveTypeName());
emptyElement = TalendParquetUtils.createGroupElementType("field0", true);
Assert.assertEquals(PrimitiveTypeName.BOOLEAN, emptyElement.asPrimitiveType().getPrimitiveTypeName());
// Nest group
MessageType schema = parseMessageType("message Schema { " //
+ " optional group field1 { " //
+ " repeated group field2 {" //
+ " repeated double field3;" //
+ " } " //
+ " } " //
+ "}"); //
Group group = new SimpleGroup(schema.getType(0).asGroupType());
Group nest1 = new SimpleGroup(schema.getType(0).asGroupType().getType(0).asGroupType());
nest1.add(0, 123.0);
nest1.add(0, 345.0);
nest1.add(0, 431.0);
Group nest2 = new SimpleGroup(schema.getType(0).asGroupType().getType(0).asGroupType());
nest2.add(0, 2123.0);
nest2.add(0, 2345.0);
nest2.add(0, 2431.0);
group.add(0, nest1);
group.add(0, nest2);
Assert.assertFalse("Should be group type", group.getType().isPrimitive());
Assert.assertEquals(2, group.getFieldRepetitionCount(0));
emptyElement = TalendParquetUtils.createGroupElementType("field0", group);
Assert.assertFalse("Should be group type", emptyElement.isPrimitive());
Assert.assertEquals(schema.getType(0).asGroupType(), emptyElement);
}
@Test
public void testCreateGroupType() {
GroupType emptyElement = TalendParquetUtils.createGroupType("field0", true, null);
Assert.assertEquals(OriginalType.LIST, emptyElement.asGroupType().getOriginalType());
Assert.assertEquals(OriginalType.UTF8, emptyElement.getType(0).asPrimitiveType().getOriginalType());
emptyElement = TalendParquetUtils.createGroupType("field0", true, 2);
Assert.assertEquals(OriginalType.LIST, emptyElement.asGroupType().getOriginalType());
Assert.assertEquals(PrimitiveTypeName.INT32, emptyElement.getType(0).asPrimitiveType().getPrimitiveTypeName());
emptyElement = TalendParquetUtils.createGroupType("field0", true, Byte.valueOf("1"));
Assert.assertEquals(OriginalType.LIST, emptyElement.asGroupType().getOriginalType());
Assert.assertEquals(OriginalType.INT_8, emptyElement.getType(0).asPrimitiveType().getOriginalType());
Assert.assertEquals(PrimitiveTypeName.INT32, emptyElement.getType(0).asPrimitiveType().getPrimitiveTypeName());
emptyElement = TalendParquetUtils.createGroupType("field0", true, Short.valueOf("1"));
Assert.assertEquals(OriginalType.LIST, emptyElement.asGroupType().getOriginalType());
Assert.assertEquals(OriginalType.INT_16, emptyElement.getType(0).asPrimitiveType().getOriginalType());
Assert.assertEquals(PrimitiveTypeName.INT32, emptyElement.getType(0).asPrimitiveType().getPrimitiveTypeName());
}
@Test
public void testWriteGroupField() {
Group group = null;
MessageType schema = parseMessageType("message Schema { " //
+ " optional group field0 (LIST) {" + " repeated int32 array;" + " } " //
+ "}"); //
group = new SimpleGroup(schema.getType(0).asGroupType());
List<?> values = Arrays.asList(1, 2, 3);
TalendParquetUtils.writeGroupField(group, values);
Assert.assertEquals(3, group.getFieldRepetitionCount(0));
schema = parseMessageType("message Schema { " //
+ " optional group field0 (LIST) {" + " repeated int32 array(INT_8);" + " } " //
+ "}"); //
group = new SimpleGroup(schema.getType(0).asGroupType());
values = Arrays.asList(Byte.valueOf("1"), Byte.valueOf("2"));
TalendParquetUtils.writeGroupField(group, values);
Assert.assertEquals(2, group.getFieldRepetitionCount(0));
schema = parseMessageType("message Schema { " //
+ " optional group field0 (LIST) {" + " repeated int32 array(INT_16);" + " } " //
+ "}"); //
group = new SimpleGroup(schema.getType(0).asGroupType());
values = Arrays.asList(Short.valueOf("1"));
TalendParquetUtils.writeGroupField(group, values);
Assert.assertEquals(1, group.getFieldRepetitionCount(0));
schema = parseMessageType("message Schema { " //
+ " optional group field0 (LIST) {" + " repeated int64 array;" + " } " //
+ "}"); //
group = new SimpleGroup(schema.getType(0).asGroupType());
values = Arrays.asList(1L, 2L, 3L);
TalendParquetUtils.writeGroupField(group, values);
Assert.assertEquals(3, group.getFieldRepetitionCount(0));
schema = parseMessageType("message Schema { " //
+ " optional group field0 (LIST) {" + " repeated double array;" + " } " //
+ "}"); //
group = new SimpleGroup(schema.getType(0).asGroupType());
values = Arrays.asList(1.0, 2.0, 3.0);
TalendParquetUtils.writeGroupField(group, values);
Assert.assertEquals(3, group.getFieldRepetitionCount(0));
schema = parseMessageType("message Schema { " //
+ " optional group field0 (LIST) {" + " repeated float array;" + " } " //
+ "}"); //
group = new SimpleGroup(schema.getType(0).asGroupType());
values = Arrays.asList(1.0f, 2.0f, 3.0f);
TalendParquetUtils.writeGroupField(group, values);
Assert.assertEquals(3, group.getFieldRepetitionCount(0));
schema = parseMessageType("message Schema { " //
+ " optional group field0 (LIST) {" + " repeated binary array (UTF8);" + " } " //
+ "}"); //
group = new SimpleGroup(schema.getType(0).asGroupType());
values = Arrays.asList("element 1", "element 2");
TalendParquetUtils.writeGroupField(group, values);
Assert.assertEquals(2, group.getFieldRepetitionCount(0));
schema = parseMessageType("message Schema { " //
+ " optional group field0 (LIST) {" + " repeated boolean array ;" + " } " //
+ "}"); //
group = new SimpleGroup(schema.getType(0).asGroupType());
values = Arrays.asList(true, false);
TalendParquetUtils.writeGroupField(group, values);
Assert.assertEquals(2, group.getFieldRepetitionCount(0));
}
@Test
public void testDecimalAnnotation() {
Group group = null;
MessageType schema = new MessageType("DecimalMessage", new PrimitiveType(REQUIRED, FIXED_LEN_BYTE_ARRAY, 16,
"aDecimal", DECIMAL, new DecimalMetadata(38, 2), null));
BigDecimal decimalValue = new BigDecimal("1234423199.9999");
group = new SimpleGroup(schema);
group.append("aDecimal", TalendParquetUtils.decimalToBinary(decimalValue, 5));
Assert.assertEquals(decimalValue.setScale(5), TalendParquetUtils.binaryToDecimal(group.getBinary(0, 0), 38, 5));
group = new SimpleGroup(schema);
group.append("aDecimal", TalendParquetUtils.decimalToBinary(decimalValue, 4));
Assert.assertEquals(decimalValue, TalendParquetUtils.binaryToDecimal(group.getBinary(0, 0), 38, 4));
decimalValue = new BigDecimal("1234");
group = new SimpleGroup(schema);
group.append("aDecimal", TalendParquetUtils.decimalToBinary(decimalValue, 0));
Assert.assertEquals(decimalValue, TalendParquetUtils.binaryToDecimal(group.getBinary(0, 0), 10, 0));
decimalValue = new BigDecimal("-93.5788130000");
group = new SimpleGroup(schema);
group.append("aDecimal", TalendParquetUtils.decimalToBinary(decimalValue, 10));
Assert.assertEquals(decimalValue, TalendParquetUtils.binaryToDecimal(group.getBinary(0, 0), 38, 10));
decimalValue = new BigDecimal("-0.00");
group = new SimpleGroup(schema);
group.append("aDecimal", TalendParquetUtils.decimalToBinary(decimalValue, 2));
Assert.assertEquals(decimalValue, TalendParquetUtils.binaryToDecimal(group.getBinary(0, 0), 38, 2));
decimalValue = new BigDecimal("0.000");
group = new SimpleGroup(schema);
group.append("aDecimal", TalendParquetUtils.decimalToBinary(decimalValue, 3));
Assert.assertEquals(decimalValue, TalendParquetUtils.binaryToDecimal(group.getBinary(0, 0), 38, 3));
}
}

View File

@@ -0,0 +1,59 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components</groupId>
<artifactId>talend-scp-helper</artifactId>
<version>1.1</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<ssd.version>2.8.0</ssd.version>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
</properties>
<distributionManagement>
<snapshotRepository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceSnapshot/</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
<releases>
<enabled>false</enabled>
</releases>
</snapshotRepository>
<repository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceRelease/</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
<releases>
<enabled>true</enabled>
</releases>
</repository>
</distributionManagement>
<dependencies>
<dependency>
<groupId>org.apache.sshd</groupId>
<artifactId>sshd-scp</artifactId>
<version>${ssd.version}</version>
</dependency>
<dependency>
<groupId>org.apache.sshd</groupId>
<artifactId>sshd-common</artifactId>
<version>${ssd.version}</version>
</dependency>
<dependency>
<groupId>org.apache.sshd</groupId>
<artifactId>sshd-core</artifactId>
<version>${ssd.version}</version>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,117 @@
package org.talend.components.talendscp;
import org.apache.sshd.client.channel.ChannelExec;
import org.apache.sshd.scp.client.AbstractScpClient;
import org.apache.sshd.scp.client.DefaultScpClient;
import org.apache.sshd.scp.client.DefaultScpStreamResolver;
import org.apache.sshd.scp.client.ScpClient;
import org.apache.sshd.client.session.ClientSession;
import org.apache.sshd.common.FactoryManager;
import org.apache.sshd.common.file.FileSystemFactory;
import org.apache.sshd.common.file.util.MockFileSystem;
import org.apache.sshd.common.file.util.MockPath;
import org.apache.sshd.scp.common.ScpFileOpener;
import org.apache.sshd.scp.common.ScpHelper;
import org.apache.sshd.common.util.ValidateUtils;
import org.apache.sshd.scp.common.ScpTransferEventListener;
import org.apache.sshd.scp.common.helpers.ScpTimestampCommandDetails;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.FileSystem;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
import java.nio.file.attribute.PosixFilePermission;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
/*
What we do here is just overwrite the ScpHelper to throw Exception when scp return status 1
to keep the old behavior
*/
public class TalendScpClient extends DefaultScpClient {
public TalendScpClient(ClientSession clientSession, ScpFileOpener fileOpener,
ScpTransferEventListener eventListener) {
super(clientSession, fileOpener, eventListener);
}
@Override public void upload(InputStream local, String remote, long size, Collection<PosixFilePermission> perms,
ScpTimestampCommandDetails time) throws IOException {
int namePos = ValidateUtils.checkNotNullAndNotEmpty(remote, "No remote location specified").lastIndexOf('/');
String name = (namePos < 0) ?
remote :
ValidateUtils.checkNotNullAndNotEmpty(remote.substring(namePos + 1), "No name value in remote=%s",
remote);
Collection<Option> options = (time != null) ? EnumSet.of(Option.PreserveAttributes) : Collections.emptySet();
String cmd = ScpClient.createSendCommand(remote, options);
ClientSession session = getClientSession();
ChannelExec channel = openCommandChannel(session, cmd);
try (InputStream invOut = channel.getInvertedOut(); OutputStream invIn = channel.getInvertedIn()) {
// NOTE: we use a mock file system since we expect no invocations for it
ScpHelper helper = new TalendScpHelper(session, invOut, invIn, new MockFileSystem(remote), opener, listener);
Path mockPath = new MockPath(remote);
helper.sendStream(new DefaultScpStreamResolver(name, mockPath, perms, time, size, local, cmd),
options.contains(Option.PreserveAttributes), ScpHelper.DEFAULT_SEND_BUFFER_SIZE);
handleCommandExitStatus(cmd, channel);
} finally {
channel.close(false);
}
}
@Override protected <T> void runUpload(String remote, Collection<Option> options, Collection<T> local,
AbstractScpClient.ScpOperationExecutor<T> executor) throws IOException {
local = ValidateUtils.checkNotNullAndNotEmpty(local, "Invalid argument local: %s", local);
remote = ValidateUtils.checkNotNullAndNotEmpty(remote, "Invalid argument remote: %s", remote);
if (local.size() > 1) {
options = addTargetIsDirectory(options);
}
String cmd = ScpClient.createSendCommand(remote, options);
ClientSession session = getClientSession();
ChannelExec channel = openCommandChannel(session, cmd);
try {
FactoryManager manager = session.getFactoryManager();
FileSystemFactory factory = manager.getFileSystemFactory();
FileSystem fs = factory.createFileSystem(session);
try (InputStream invOut = channel.getInvertedOut(); OutputStream invIn = channel.getInvertedIn()) {
ScpHelper helper = new TalendScpHelper(session, invOut, invIn, fs, opener, listener);
executor.execute(helper, local, options);
} finally {
try {
fs.close();
} catch (UnsupportedOperationException e) {
if (log.isDebugEnabled()) {
log.debug("runUpload({}) {} => {} - failed ({}) to close file system={}: {}", session, remote,
local, e.getClass().getSimpleName(), fs, e.getMessage());
}
}
}
handleCommandExitStatus(cmd, channel);
} finally {
channel.close(false);
}
}
@Override public void download(String remote, OutputStream local) throws IOException {
String cmd = ScpClient.createReceiveCommand(remote, Collections.emptyList());
ClientSession session = getClientSession();
ChannelExec channel = openCommandChannel(session, cmd);
try (InputStream invOut = channel.getInvertedOut(); OutputStream invIn = channel.getInvertedIn()) {
// NOTE: we use a mock file system since we expect no invocations for it
ScpHelper helper =
new TalendScpHelper(session, invOut, invIn, new MockFileSystem(remote), opener, listener);
helper.receiveFileStream(local, ScpHelper.DEFAULT_RECEIVE_BUFFER_SIZE);
handleCommandExitStatus(cmd, channel);
} finally {
}
}
}

View File

@@ -0,0 +1,22 @@
package org.talend.components.talendscp;
import org.apache.sshd.scp.client.AbstractScpClientCreator;
import org.apache.sshd.scp.client.ScpClient;
import org.apache.sshd.scp.client.ScpClientCreator;
import org.apache.sshd.client.session.ClientSession;
import org.apache.sshd.scp.common.ScpFileOpener;
import org.apache.sshd.scp.common.ScpTransferEventListener;
public class TalendScpClientCreator extends AbstractScpClientCreator implements ScpClientCreator {
public static final TalendScpClientCreator INSTANCE = new TalendScpClientCreator();
@Override
public ScpClient createScpClient(ClientSession session) {
return this.createScpClient(session, this.getScpFileOpener(), this.getScpTransferEventListener());
}
@Override public ScpClient createScpClient(ClientSession clientSession, ScpFileOpener scpFileOpener,
ScpTransferEventListener scpTransferEventListener) {
return new TalendScpClient(clientSession, scpFileOpener, scpTransferEventListener);
}
}

View File

@@ -0,0 +1,65 @@
package org.talend.components.talendscp;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.FileSystem;
import org.apache.sshd.scp.common.ScpException;
import org.apache.sshd.scp.common.ScpFileOpener;
import org.apache.sshd.scp.common.ScpHelper;
import org.apache.sshd.scp.common.ScpTransferEventListener;
import org.apache.sshd.common.session.Session;
import org.apache.sshd.scp.common.helpers.ScpAckInfo;
public class TalendScpHelper extends ScpHelper {
public TalendScpHelper(Session session, InputStream in, OutputStream out, FileSystem fileSystem,
ScpFileOpener opener, ScpTransferEventListener eventListener) {
super(session, in, out, fileSystem, opener, eventListener);
}
@Override
public ScpAckInfo readAck(boolean canEof) throws IOException {
final ScpAckInfo scpAckInfo = ScpAckInfo.readAck(this.in, this.csIn, canEof);
int c = scpAckInfo == null ? -1 : scpAckInfo.getStatusCode();
switch (c) {
case -1:
if (log.isDebugEnabled()) {
log.debug("readAck({})[EOF={}] received EOF", this, canEof);
}
if (!canEof) {
throw new EOFException("readAck - EOF before ACK");
}
break;
case ScpAckInfo.OK:
if (log.isDebugEnabled()) {
log.debug("readAck({})[EOF={}] read OK", this, canEof);
}
break;
case ScpAckInfo.WARNING: {
if (log.isDebugEnabled()) {
log.debug("readAck({})[EOF={}] read warning message", this, canEof);
}
String line = readLine();
log.warn("readAck({})[EOF={}] - Received warning: {}", this, canEof, line);
throw new ScpException("received error: " + line, c);
}
case ScpAckInfo.ERROR: {
if (log.isDebugEnabled()) {
log.debug("readAck({})[EOF={}] read error message", this, canEof);
}
String line = readLine();
if (log.isDebugEnabled()) {
log.debug("readAck({})[EOF={}] received error: {}", this, canEof, line);
}
throw new ScpException("Received nack: " + line, c);
}
default:
break;
}
return scpAckInfo;
}
}

View File

@@ -60,9 +60,9 @@
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
<version>1.2.19</version>
</dependency>

View File

@@ -2,17 +2,28 @@
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.libraries</groupId>
<groupId>org.talend.components</groupId>
<artifactId>talend-ws</artifactId>
<version>1.0.1-20191112</version>
<version>1.0.7-20220526</version>
<packaging>jar</packaging>
<licenses>
<license>
<name>Apache License, Version 2.0</name>
<url>https://www.talendforge.org/modules/licenses/APACHE_v2.txt</url>
<distribution>may be downloaded from the Maven repository</distribution>
</license>
</licenses>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<cxf.version>3.3.4</cxf.version>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
<cxf.version>3.4.7</cxf.version>
<odata.version>4.3.0</odata.version>
<slf4j.version>1.7.12</slf4j.version>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
<httpclient.version>4.5.13</httpclient.version>
</properties>
<distributionManagement>
@@ -43,13 +54,19 @@
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.10</version>
<version>1.14</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-httpclient/commons-httpclient -->
<!-- https://mvnrepository.com/artifact/org.apache.httpcomponents/httpclient -->
<dependency>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
<version>3.1</version>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>${httpclient.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.httpcomponents/httpcore -->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.4.13</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.codehaus.woodstox/stax2-api -->
<dependency>
@@ -91,6 +108,12 @@
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-core</artifactId>
<version>${cxf.version}</version>
<exclusions>
<exclusion>
<groupId>org.glassfish.jaxb</groupId>
<artifactId>jaxb-runtime</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
@@ -101,6 +124,16 @@
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-tools-common</artifactId>
<version>${cxf.version}</version>
<exclusions>
<exclusion>
<groupId>org.glassfish.jaxb</groupId>
<artifactId>jaxb-xjc</artifactId>
</exclusion>
<exclusion>
<groupId>org.glassfish.jaxb</groupId>
<artifactId>jaxb-runtime</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
@@ -127,6 +160,16 @@
<artifactId>cxf-rt-transports-http</artifactId>
<version>${cxf.version}</version>
</dependency>
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>2.2.6</version>
</dependency>
<dependency>
<groupId>org.talend.libraries</groupId>
<artifactId>jaxb-impl-2.2.6-modified</artifactId>
<version>6.0.0</version>
</dependency>
<dependency>
<groupId>org.talend.libraries</groupId>
<artifactId>jaxb-xjc-2.2.6-modified</artifactId>
@@ -244,6 +287,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
@@ -251,4 +295,4 @@
</plugin>
</plugins>
</build>
</project>
</project>

View File

@@ -48,4 +48,6 @@ please @see org\talend\ws\helper\ServiceDiscoveryHelper.java
please @see org\talend\ws\helper\ServiceInvokerHelper.java
org\talend\ws\mapper\MapperFactory.java
10.(2019-01-18 modified by dchmyga) fixed TDI-41647
10.(2019-01-18 modified by dchmyga) fixed TDI-41647
11.(2020-08-24 modified by ozhelezniak) updated commons-codec to 1.14 in scope of TDI-44145

View File

@@ -1,6 +1,6 @@
// ============================================================================
//
// Copyright (C) 2006-2019 Talend Inc. - www.talend.com
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt

View File

@@ -3,29 +3,22 @@
*/
package org.talend.webservice.helper;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import com.ibm.wsdl.Constants;
import com.ibm.wsdl.extensions.schema.SchemaConstants;
import com.ibm.wsdl.util.xml.DOMUtils;
import com.ibm.wsdl.util.xml.QNameUtils;
import org.apache.ws.commons.schema.XmlSchema;
import org.apache.ws.commons.schema.XmlSchemaCollection;
import org.talend.webservice.helper.conf.ServiceHelperConfiguration;
import org.talend.webservice.helper.conf.WSDLLocatorImpl;
import org.w3c.dom.Element;
import javax.wsdl.Definition;
import javax.wsdl.Import;
import javax.wsdl.Types;
import javax.wsdl.WSDLException;
import javax.wsdl.extensions.ExtensibilityElement;
import javax.wsdl.extensions.UnknownExtensibilityElement;
import javax.wsdl.extensions.schema.Schema;
import javax.wsdl.extensions.schema.SchemaImport;
import javax.wsdl.extensions.schema.SchemaReference;
@@ -37,16 +30,15 @@ import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.ws.commons.schema.XmlSchemaCollection;
import org.talend.webservice.helper.conf.ServiceHelperConfiguration;
import org.talend.webservice.helper.conf.WSDLLocatorImpl;
import org.w3c.dom.Element;
import com.ibm.wsdl.Constants;
import com.ibm.wsdl.extensions.schema.SchemaConstants;
import com.ibm.wsdl.util.xml.DOMUtils;
import com.ibm.wsdl.util.xml.QNameUtils;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.*;
/**
* This helper allow easy discovery of services and types
@@ -70,26 +62,36 @@ public class ServiceDiscoveryHelper {
private Set<String> namespaces;
private final String LOCAL_WSDL_NAME = "mainWSDL.wsdl";
private boolean createTempFiles = true;
public ServiceDiscoveryHelper(String wsdlUri) throws WSDLException, IOException, TransformerException, URISyntaxException {
this(wsdlUri, null, null);
this(wsdlUri, null, null, true);
}
public ServiceDiscoveryHelper(String wsdlUri, String tempPath) throws WSDLException, IOException, TransformerException,
URISyntaxException {
this(wsdlUri, null, tempPath);
this(wsdlUri, null, tempPath, true);
}
public ServiceDiscoveryHelper(String wsdlUri, ServiceHelperConfiguration configuration) throws WSDLException, IOException,
TransformerException, URISyntaxException {
this(wsdlUri, configuration, null);
this(wsdlUri, configuration, null, true);
}
public ServiceDiscoveryHelper(String wsdlUri, ServiceHelperConfiguration configuration, String tempPath)
throws WSDLException, IOException, TransformerException, URISyntaxException {
this(wsdlUri, configuration, tempPath, true);
}
public ServiceDiscoveryHelper(String wsdlUri, ServiceHelperConfiguration configuration, String tempPath, boolean createTempFiles)
throws WSDLException, IOException, TransformerException, URISyntaxException {
this.wsdlUri = wsdlUri;
this.configuration = configuration;
this.wsdlTmpDir = createTempWsdlDir(tempPath);
this.createTempFiles = createTempFiles;
if(createTempFiles) {
this.wsdlTmpDir = createTempWsdlDir(tempPath);
}
init();
}
@@ -139,7 +141,9 @@ public class ServiceDiscoveryHelper {
namespaces = collectNamespaces();
generateTempWsdlFile();
if(this.createTempFiles) {
generateTempWsdlFile();
}
}
@@ -436,6 +440,10 @@ public class ServiceDiscoveryHelper {
return definitions.get(this.LOCAL_WSDL_NAME);
}
Collection<Definition> getDefinitions() {
return definitions.values();
}
/**
* Return the xml schema collection
*
@@ -450,7 +458,11 @@ public class ServiceDiscoveryHelper {
}
public String getLocalWsdlUri() {
return new File(wsdlTmpDir, this.LOCAL_WSDL_NAME).toURI().toString();
if(createTempFiles) {
return new File(wsdlTmpDir, this.LOCAL_WSDL_NAME).toURI().toString();
} else {
return this.wsdlUri;
}
}
public Set<String> getNamespaces() {
@@ -464,4 +476,33 @@ public class ServiceDiscoveryHelper {
return "NOLOCATION";
}
}
public static void main(String[] args) throws Exception {
System.setProperty("javax.xml.transform.TransformerFactory", "org.apache.xalan.processor.TransformerFactoryImpl");
System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.NoOpLog");
System.setProperty("javax.xml.accessExternalSchema", "all");
//shade the log level for DynamicClientFactory.class
java.util.logging.Logger LOG = org.apache.cxf.common.logging.LogUtils.getL7dLogger(org.apache.cxf.endpoint.dynamic.DynamicClientFactory.class);
LOG.setLevel(java.util.logging.Level.WARNING);
ServiceDiscoveryHelper helper = new ServiceDiscoveryHelper("http://gcomputer.net/webservices/knowledge.asmx?WSDL", null, null, false);
//ServiceDiscoveryHelper helper = new ServiceDiscoveryHelper("/Users/wangwei/Downloads/knowledge.wsdl", null, null, false);
/*
WSDLMetadataUtils utils = new WSDLMetadataUtils();
//WSDLMetadataUtils.OperationInfo info = utils.parseOperationInfo(helper, "KnowledgeLeakSoap12", "Knowledge");
WSDLMetadataUtils.OperationInfo info = utils.parseOperationInfo(helper, null, "Knowledge");
System.out.println(info.operationName);
System.out.println(info.port);
System.out.println(info.service);
System.out.println(info.inputParameters);
System.out.println(info.outputParameter);
System.out.println("done");
*/
org.talend.webservice.helper.ServiceInvokerHelper serviceInvokerHelper = new org.talend.webservice.helper.ServiceInvokerHelper(helper, null);
Map<String, Object> result = serviceInvokerHelper.invokeDynamic("Knowledge", Arrays.asList(1));
System.out.println(result);
}
}

View File

@@ -3,30 +3,6 @@
*/
package org.talend.webservice.helper;
import java.beans.PropertyDescriptor;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import javax.wsdl.Input;
import javax.wsdl.Message;
import javax.wsdl.Operation;
import javax.wsdl.Output;
import javax.wsdl.Port;
import javax.wsdl.Service;
import javax.wsdl.WSDLException;
import javax.xml.bind.annotation.XmlSchema;
import javax.xml.bind.annotation.XmlType;
import javax.xml.namespace.QName;
import javax.xml.transform.TransformerException;
import org.apache.commons.beanutils.PropertyUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.cxf.endpoint.Client;
@@ -39,11 +15,18 @@ import org.talend.webservice.helper.conf.ServiceHelperConfiguration;
import org.talend.webservice.helper.map.MapConverter;
import org.talend.webservice.jaxb.JAXBUtils;
import org.talend.webservice.jaxb.JAXBUtils.IdentifierType;
import org.talend.webservice.mapper.AnyPropertyMapper;
import org.talend.webservice.mapper.ClassMapper;
import org.talend.webservice.mapper.EmptyMessageMapper;
import org.talend.webservice.mapper.MapperFactory;
import org.talend.webservice.mapper.MessageMapper;
import org.talend.webservice.mapper.*;
import javax.wsdl.*;
import javax.xml.bind.annotation.XmlSchema;
import javax.xml.bind.annotation.XmlType;
import javax.xml.namespace.QName;
import javax.xml.transform.TransformerException;
import java.beans.PropertyDescriptor;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.*;
/**
*
@@ -261,6 +244,61 @@ public class ServiceInvokerHelper implements ClassMapper {
return MapConverter.deepMapToMap(result);
}
//auto decide the service, port, and operation name and params are necessary
public Map<String, Object> invokeDynamic(String operationNameAndPortName, List<Object> param_values)
throws Exception, LocalizedException {
String portName = null;
String operationName = operationNameAndPortName;
try {
portName = operationName.substring(operationName.indexOf("(") + 1, operationName.indexOf(")"));
operationName = operationName.substring(0, operationName.indexOf("("));
} catch (Exception ignored) {
}
WSDLMetadataUtils utils = new WSDLMetadataUtils();
WSDLMetadataUtils.OperationInfo info = utils.parseOperationInfo(this.serviceDiscoveryHelper, portName, operationName);
Map<String, Object> paramsMap = null;
if(param_values!=null && !param_values.isEmpty()) {
List<String> paths = new ArrayList<>();
flat(paths, info.inputParameters, null);
int size = Math.min(paths.size(), param_values.size());
paramsMap = new HashMap<>();
for(int i=0;i<size;i++) {
paramsMap.put(paths.get(i), param_values.get(i));
}
if (!paramsMap.isEmpty()) {
paramsMap = MapConverter.mapToDeepMap(paramsMap);
}
if (paramsMap.isEmpty()) {
paramsMap = null;
}
}
Map<String, Object> result = invoke(info.service, info.port, info.operationName, paramsMap);
if(result==null || result.isEmpty()) return null;
return MapConverter.deepMapToMap(result, true);
}
private void flat(List<String> paths, List<WSDLMetadataUtils.ParameterInfo> inputParameters, String path) {
if(inputParameters==null || inputParameters.isEmpty()) {
if(path!=null) {
paths.add(path);
}
return;
}
for(WSDLMetadataUtils.ParameterInfo info : inputParameters) {
flat(paths, info.childParameters, path!=null? path + "." + info.name : info.name);
}
}
protected String getClassNameForType(QName xmlSchemaTypeMapperQname) {
StringBuilder sb = new StringBuilder();
sb.append(getPackageForNamespaceURI(xmlSchemaTypeMapperQname.getNamespaceURI()));

View File

@@ -0,0 +1,640 @@
package org.talend.webservice.helper;
import org.apache.ws.commons.schema.*;
import org.w3c.dom.Element;
import javax.wsdl.*;
import javax.wsdl.extensions.ExtensibilityElement;
import javax.wsdl.extensions.UnknownExtensibilityElement;
import javax.wsdl.extensions.soap.SOAPBinding;
import javax.wsdl.extensions.soap.SOAPBody;
import javax.wsdl.extensions.soap.SOAPOperation;
import javax.wsdl.extensions.soap12.SOAP12Binding;
import javax.wsdl.extensions.soap12.SOAP12Body;
import javax.wsdl.extensions.soap12.SOAP12Operation;
import javax.wsdl.factory.WSDLFactory;
import javax.xml.namespace.QName;
import java.io.File;
import java.net.URL;
import java.util.*;
public class WSDLMetadataUtils {
WSDLFactory wsdlFactory = null;
private Vector<XmlSchema> wsdlTypes = new Vector<XmlSchema>();
private List<String> parametersName = new ArrayList<String>();
private List<String> schemaNames = new ArrayList<String>();
private List<String> documentBaseList = new ArrayList<String>();
private List<XmlSchemaElement> allXmlSchemaElement = new ArrayList<XmlSchemaElement>();
private List<XmlSchemaType> allXmlSchemaType = new ArrayList<XmlSchemaType>();
public final static String DEFAULT_SOAP_ENCODING_STYLE = "http://schemas.xmlsoap.org/soap/encoding/";
public WSDLMetadataUtils() throws WSDLException {
wsdlFactory = WSDLFactory.newInstance();
}
public static class OperationInfo {
QName port;
QName service;
String operationName;
List<ParameterInfo> inputParameters = new ArrayList<ParameterInfo>();
List<ParameterInfo> outputParameter = new ArrayList<ParameterInfo>();
}
public class ParameterInfo {
String name;
/* list of parameters, only filled if complex type */
List<ParameterInfo> childParameters = new ArrayList<ParameterInfo>();
}
//not thread safe
private List<OperationInfo> operations;
private String targetOperationName;
private String currentPort;
private QName currentService;
public OperationInfo parseOperationInfo(ServiceDiscoveryHelper sdh, String port, String operationName) throws Exception {
this.targetOperationName = operationName;
Collection<Definition> defs = sdh.getDefinitions();
wsdlTypes = createSchemaFromTypes(defs);
collectAllXmlSchemaElement();
collectAllXmlSchemaType();
//only fetch services from main wsdl/definition, others for elements and type definition
Map services = defs.iterator().next().getServices();
if (services != null) {
Iterator iter = services.values().iterator();
while (iter.hasNext()) {
List<OperationInfo> operations = getOperations((Service) iter.next());
for(OperationInfo info : operations) {
if(port==null) {
return info;
}
if(port.equals(info.port.getLocalPart())) {
return info;
}
}
}
}
throw new RuntimeException("can't find the operation : " + operationName + " with port : " + port);
}
private void collectAllXmlSchemaElement() {
for (int i = 0; i < wsdlTypes.size(); i++) {
XmlSchema xmlSchema = (wsdlTypes.elementAt(i));
if (xmlSchema == null) {
continue;
}
Map<QName, XmlSchemaElement> elements = xmlSchema.getElements();
Iterator elementsItr = elements.values().iterator();
while (elementsItr.hasNext()) {
XmlSchemaElement xmlSchemaElement = (XmlSchemaElement) elementsItr.next();
allXmlSchemaElement.add(xmlSchemaElement);
}
}
}
private void collectAllXmlSchemaType() {
for (int i = 0; i < wsdlTypes.size(); i++) {
XmlSchema xmlSchema = (wsdlTypes.elementAt(i));
if (xmlSchema == null) {
continue;
}
Map<QName, XmlSchemaType> xmlSchemaObjectTable = xmlSchema.getSchemaTypes();
Iterator typesItr = xmlSchemaObjectTable.values().iterator();
while (typesItr.hasNext()) {
XmlSchemaType xmlSchemaType = (XmlSchemaType) typesItr.next();
allXmlSchemaType.add(xmlSchemaType);
}
}
}
protected Vector<XmlSchema> createSchemaFromTypes(Collection<Definition> wsdlDefinitions) throws WSDLException {
Vector<XmlSchema> schemas = new Vector<XmlSchema>();
Set<String> imports = new HashSet<String>();
Element schemaElementt = null;
Map importElement = null;
List includeElement = null;
for (Definition def : wsdlDefinitions) {
if (def.getTypes() != null) {
List schemaExtElem = findExtensibilityElement(def.getTypes().getExtensibilityElements(), "schema");
for (int i = 0; i < schemaExtElem.size(); i++) {
ExtensibilityElement schemaElement = (ExtensibilityElement) schemaExtElem.get(i);
if (schemaElement != null && schemaElement instanceof UnknownExtensibilityElement) {
schemaElementt = ((UnknownExtensibilityElement) schemaElement).getElement();
String documentBase = ((javax.wsdl.extensions.schema.Schema) schemaElement).getDocumentBaseURI();
XmlSchema schema = createschemafromtype(schemaElementt, def, documentBase);
if (schema != null) {
schemas.add(schema);
if (schema.getTargetNamespace() != null) {
schemaNames.add(schema.getTargetNamespace());
}
}
importElement = ((javax.wsdl.extensions.schema.Schema) schemaElement).getImports();
if (importElement != null && importElement.size() > 0) {
findImportSchema(def, schemas, importElement, imports);
}
}
if (schemaElement != null && schemaElement instanceof javax.wsdl.extensions.schema.Schema) {
schemaElementt = ((javax.wsdl.extensions.schema.Schema) schemaElement).getElement();
String documentBase = ((javax.wsdl.extensions.schema.Schema) schemaElement).getDocumentBaseURI();
Boolean isHaveImport = false;
importElement = ((javax.wsdl.extensions.schema.Schema) schemaElement).getImports();
if (importElement != null && importElement.size() > 0) {
Iterator keyIterator = importElement.keySet().iterator();
if (importElement.size() > 0) {
isHaveImport = true;
}
}
XmlSchema schema = createschemafromtype(schemaElementt, def, documentBase);
if (schema != null) {
schemas.add(schema);
if (schema.getTargetNamespace() != null) {
schemaNames.add(schema.getTargetNamespace());
}
}
if (isHaveImport) {
findImportSchema(def, schemas, importElement, imports);
}
}
}
}
}
return schemas;
}
private void findIncludesSchema(Definition wsdlDefinition, Vector schemas, List includeElement) throws WSDLException {
Element schemaElementt;
for (int i = 0; i < includeElement.size(); i++) {
schemaElementt = ((com.ibm.wsdl.extensions.schema.SchemaReferenceImpl) includeElement.get(i)).getReferencedSchema()
.getElement();
String documentBase = ((com.ibm.wsdl.extensions.schema.SchemaReferenceImpl) includeElement.get(i))
.getReferencedSchema().getDocumentBaseURI();
XmlSchema schemaInclude = createschemafromtype(schemaElementt, wsdlDefinition, documentBase);
if (schemaInclude != null) {
schemas.add(schemaInclude);
if (schemaInclude.getTargetNamespace() != null) {
schemaNames.add(schemaInclude.getTargetNamespace());
}
}
}
}
private void findImportSchema(Definition wsdlDefinition, Vector schemas, Map importElement, Set<String> imports)
throws WSDLException {
Element schemaElementt;
List includeElement = null;
Iterator keyIterator = importElement.keySet().iterator();
Boolean isHaveImport = false;
while (keyIterator.hasNext()) {
Object object = keyIterator.next();
if (object != null) {
String key = object.toString();
Vector importEle = (Vector) importElement.get(key);
for (int i = 0; i < importEle.size(); i++) {
Map importChildElement = null;
com.ibm.wsdl.extensions.schema.SchemaImportImpl importImpl = (com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle
.elementAt(i);
// to avoid import cycle
String importLocation = importImpl.getSchemaLocationURI() + ":" + importImpl.getNamespaceURI();
if (imports.contains(importLocation)) {
continue;
} else {
imports.add(importLocation);
}
if (importImpl.getReferencedSchema() != null) {
schemaElementt = importImpl.getReferencedSchema().getElement();
String documentBase = importImpl.getReferencedSchema().getDocumentBaseURI();
if ((com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle.elementAt(i) != null) {
if (((com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle.elementAt(i)).getReferencedSchema() != null) {
importChildElement = ((com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle.elementAt(i))
.getReferencedSchema().getImports();
if (importChildElement != null && importChildElement.size() > 0 && !isIncludeSchema(documentBase)) {
isHaveImport = true;
documentBaseList.add(documentBase);
// validateImportUrlPath(importElement);
}
}
}
XmlSchema schemaImport = createschemafromtype(schemaElementt, wsdlDefinition, documentBase);
if (schemaImport != null) {
schemas.add(schemaImport);
if (schemaImport.getTargetNamespace() != null) {
schemaNames.add(schemaImport.getTargetNamespace());
}
}
}
if (isHaveImport) {
findImportSchema(wsdlDefinition, schemas, importChildElement, imports);
}
if ((com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle.elementAt(i) != null) {
if (((com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle.elementAt(i)).getReferencedSchema() != null) {
includeElement = ((com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle.elementAt(i))
.getReferencedSchema().getIncludes();
if (includeElement != null && includeElement.size() > 0) {
findIncludesSchema(wsdlDefinition, schemas, includeElement);
}
}
}
}
}
}
}
private List findExtensibilityElement(List extensibilityElements, String elementType) {
List elements = new ArrayList();
if (extensibilityElements != null) {
Iterator iter = extensibilityElements.iterator();
while (iter.hasNext()) {
ExtensibilityElement element = (ExtensibilityElement) iter.next();
if (element.getElementType().getLocalPart().equalsIgnoreCase(elementType)) {
elements.add(element);
}
}
}
return elements;
}
private XmlSchema createschemafromtype(Element schemaElement, Definition wsdlDefinition, String documentBase)
throws WSDLException {
if (schemaElement == null) {
throw new WSDLException(WSDLException.INVALID_WSDL, "Unable to find schema extensibility element in WSDL");
}
XmlSchema xmlSchema = null;
XmlSchemaCollection xmlSchemaCollection = new XmlSchemaCollection();
xmlSchemaCollection.setBaseUri(fixDocumentBase(documentBase));
xmlSchema = xmlSchemaCollection.read(schemaElement);
return xmlSchema;
}
private String fixDocumentBase(String documentBase) {
String fixedPath = documentBase;
try {
URL url = new URL(documentBase);
File file = new File(url.getFile());
fixedPath = file.toURI().toString();
} catch (Exception e) {
fixedPath = documentBase;
}
return fixedPath;
}
private Boolean isIncludeSchema(String documentBase) {
Boolean isHaveSchema = false;
for (int i = 0; i < documentBaseList.size(); i++) {
String documentBaseTem = documentBaseList.get(i);
if (documentBaseTem.equals(documentBase)) {
isHaveSchema = true;
}
}
return isHaveSchema;
}
private List<OperationInfo> getOperations(Service service) {
currentService = service.getQName();
List<OperationInfo> result = new ArrayList<>();
Map ports = service.getPorts();
Iterator portIter = ports.values().iterator();
while (portIter.hasNext()) {
Port port = (Port) portIter.next();
Binding binding = port.getBinding();
currentPort = port.getName();
result.addAll(buildOperations(binding));
}
return result;
}
private List<OperationInfo> buildOperations(Binding binding) {
List<OperationInfo> result = new ArrayList<>();
List operations = binding.getBindingOperations();
if (operations != null && !operations.isEmpty()) {
List soapBindingElems = findExtensibilityElement(binding.getExtensibilityElements(), "binding");
String style = "document"; // default
ExtensibilityElement soapBindingElem = (ExtensibilityElement) soapBindingElems.get(0);
if (soapBindingElem != null && soapBindingElem instanceof SOAPBinding) {
SOAPBinding soapBinding = (SOAPBinding) soapBindingElem;
style = soapBinding.getStyle();
} else if (soapBindingElem != null && soapBindingElem instanceof SOAP12Binding) {
SOAP12Binding soapBinding = (SOAP12Binding) soapBindingElem;
style = soapBinding.getStyle();
}
Iterator opIter = operations.iterator();
while (opIter.hasNext()) {
BindingOperation oper = (BindingOperation) opIter.next();
List operElems = findExtensibilityElement(oper.getExtensibilityElements(), "operation");
ExtensibilityElement operElem = (ExtensibilityElement) operElems.get(0);
if(!targetOperationName.equals(oper.getName())) {
continue;
}
OperationInfo operationInfo = new OperationInfo();
operationInfo.service = currentService;
operationInfo.port = new QName(currentService.getNamespaceURI(), currentPort);
operationInfo.operationName = oper.getName();
//TODO do different?
if (operElem != null && operElem instanceof SOAPOperation) {
buildOperation(operationInfo, oper);
} else if (operElem != null && operElem instanceof SOAP12Operation) {
buildOperation(operationInfo, oper);
}
result.add(operationInfo);
}
}
return result;
}
private void buildOperation(OperationInfo operationInfo, BindingOperation bindingOper) {
Operation oper = bindingOper.getOperation();
List operElems = findExtensibilityElement(bindingOper.getExtensibilityElements(), "operation");
ExtensibilityElement operElem = (ExtensibilityElement) operElems.get(0);
if (operElem != null && operElem instanceof SOAPOperation) {//TODO do different?
SOAPOperation soapOperation = (SOAPOperation) operElem;
} else if (operElem != null && operElem instanceof SOAP12Operation) {
SOAP12Operation soapOperation = (SOAP12Operation) operElem;
}
BindingInput bindingInput = bindingOper.getBindingInput();
BindingOutput bindingOutput = bindingOper.getBindingOutput();
List bodyElems = findExtensibilityElement(bindingInput.getExtensibilityElements(), "body");
ExtensibilityElement bodyElem = (ExtensibilityElement) bodyElems.get(0);
if (bodyElem != null && bodyElem instanceof SOAPBody) {
SOAPBody soapBody = (SOAPBody) bodyElem;
List styles = soapBody.getEncodingStyles();
String encodingStyle = null;
if (styles != null) {
encodingStyle = styles.get(0).toString();
}
if (encodingStyle == null) {
encodingStyle = DEFAULT_SOAP_ENCODING_STYLE;
}
//TODO get namespace uri here?
//soapBody.getNamespaceURI();
} else if (bodyElem != null && bodyElem instanceof SOAP12Body) {
SOAP12Body soapBody = (SOAP12Body) bodyElem;
String encodingStyle = null;
if (soapBody.getEncodingStyle() != null) {
encodingStyle = soapBody.getEncodingStyle().toString();
}
if (encodingStyle == null) {
encodingStyle = DEFAULT_SOAP_ENCODING_STYLE;
}
//TODO get namespace uri here?
//soapBody.getNamespaceURI();
}
Input inDef = oper.getInput();
if (inDef != null) {
Message inMsg = inDef.getMessage();
if (inMsg != null) {
getParameterFromMessage(operationInfo, inMsg, 1);
}
}
//don't need output parameter struct now
/*
Output outDef = oper.getOutput();
if (outDef != null) {
Message outMsg = outDef.getMessage();
if (outMsg != null) {
getParameterFromMessage(operationInfo, outMsg, 2);
}
}
*/
}
private void getParameterFromMessage(OperationInfo operationInfo, Message msg, int manner) {
List msgParts = msg.getOrderedParts(null);
Iterator iter = msgParts.iterator();
while (iter.hasNext()) {
Part part = (Part) iter.next();
String partName = part.getName();
String partElement = null;
String namespace = null;
if (part.getElementName() != null) {
partElement = part.getElementName().getLocalPart();
namespace = part.getElementName().getNamespaceURI();
} else if (part.getTypeName() != null) {
partElement = part.getTypeName().getLocalPart();
namespace = part.getTypeName().getNamespaceURI();
}
// add root parameter from message.
ParameterInfo parameterRoot = new ParameterInfo();
parameterRoot.name = partName;
if (manner == 1) {
operationInfo.inputParameters.add(parameterRoot);
} else {
operationInfo.outputParameter.add(parameterRoot);
}
if (allXmlSchemaElement.size() > 0) {
buildParameterFromElements(partElement, parameterRoot, manner);
} else if (allXmlSchemaType.size() > 0) {
buileParameterFromTypes(namespace, partElement, parameterRoot, manner);
}
}
}
private void buildParameterFromElements(String partElement, ParameterInfo parameterRoot, int ioOrRecursion) {
if (ioOrRecursion < 3) {
parametersName.clear();
parametersName.add(parameterRoot.name);
} else if (ioOrRecursion == 3) {
parametersName.add(parameterRoot.name);
}
Iterator elementsItr = allXmlSchemaElement.iterator();
if (partElement != null) {
while (elementsItr.hasNext()) {
XmlSchemaElement xmlSchemaElement = (XmlSchemaElement) elementsItr.next();
if (partElement.equals(xmlSchemaElement.getName())) {
if (xmlSchemaElement.getSchemaType() != null) {
if (xmlSchemaElement.getSchemaType() instanceof XmlSchemaComplexType) {
XmlSchemaComplexType xmlElementComplexType = (XmlSchemaComplexType) xmlSchemaElement.getSchemaType();
XmlSchemaParticle xmlSchemaParticle = xmlElementComplexType.getParticle();
if (xmlSchemaParticle instanceof XmlSchemaGroupParticle) {
XmlSchemaGroupParticle xmlSchemaGroupBase = (XmlSchemaGroupParticle) xmlSchemaParticle;
if (xmlSchemaGroupBase != null) {
buildParameterFromCollection(xmlSchemaGroupBase, parameterRoot, ioOrRecursion);
}
} else if (xmlSchemaElement.getSchemaTypeName() != null) {
String paraTypeName = xmlSchemaElement.getSchemaTypeName().getLocalPart();
String paraTypeNamespace = xmlSchemaElement.getSchemaTypeName().getNamespaceURI();
if (paraTypeName != null) {
buileParameterFromTypes(paraTypeNamespace, paraTypeName, parameterRoot, ioOrRecursion);
}
}
} else if (xmlSchemaElement.getSchemaType() instanceof XmlSchemaSimpleType) {
XmlSchemaSimpleType xmlSchemaSimpleType = (XmlSchemaSimpleType) xmlSchemaElement.getSchemaType();
String typeName = xmlSchemaSimpleType.getName();
if (typeName != null && typeName.equals("anyType")) {
ParameterInfo parameterSon = new ParameterInfo();
parameterSon.name = "anyType";
parameterRoot.childParameters.add(parameterSon);
}
}
} else if (xmlSchemaElement.getSchemaTypeName() != null) {
String paraTypeName = xmlSchemaElement.getSchemaTypeName().getLocalPart();
String paraTypeNamespace = xmlSchemaElement.getSchemaTypeName().getNamespaceURI();
if (paraTypeName != null) {
buileParameterFromTypes(paraTypeNamespace, paraTypeName, parameterRoot, ioOrRecursion);
}
}
}
}
}
}
private void buileParameterFromTypes(String paraNamespace, String paraType, ParameterInfo parameter, int ioOrRecursion) {
if (ioOrRecursion < 3) {
parametersName.clear();
parametersName.add(parameter.name);
} else if (ioOrRecursion == 3) {
parametersName.add(parameter.name);
}
//twebserviceinput need auto metadata fetch for input parameters, but it only support very easy, not support custom defined type usage like this:
//<element type="tns="s:anyCustomDefinedSimpleOrComplexType"">
//so nothing to do here, TODO make sure it
}
private void buildParameterFromCollection(XmlSchemaGroupParticle xmlSchemaGroupParticle, ParameterInfo parameter,
int ioOrRecursion) {
if (!(xmlSchemaGroupParticle instanceof XmlSchemaSequence)) {
throw new RuntimeException("don't support that complex parameter type, only support xsd:sequence");
}
XmlSchemaSequence xmlSchemaSequence = (XmlSchemaSequence) xmlSchemaGroupParticle;
List<XmlSchemaSequenceMember> sequences = xmlSchemaSequence.getItems();
for (XmlSchemaSequenceMember sequence : sequences) {
if (sequence instanceof XmlSchemaAny) {//TODO remove it as not support too
ParameterInfo parameterSon = new ParameterInfo();
parameterSon.name = "_content_";
parameter.childParameters.add(parameterSon);
} else if (sequence instanceof XmlSchemaElement) {//this is the major part we support
XmlSchemaElement xmlSchemaElement = (XmlSchemaElement) sequence;
String elementName = xmlSchemaElement.getName();
ParameterInfo parameterSon = new ParameterInfo();
parameterSon.name= elementName;
parameter.childParameters.add(parameterSon);
Boolean isHave = false;
if (!parametersName.isEmpty() && parameterSon.name != null) {
for (int p = 0; p < parametersName.size(); p++) {
if (parameterSon.name.equals(parametersName.get(p))) {
isHave = true;
}
}
}
if (xmlSchemaElement.getSchemaTypeName() != null) {
String elementTypeName = xmlSchemaElement.getSchemaTypeName().getLocalPart();
String elementTypeNamespace = xmlSchemaElement.getSchemaTypeName().getNamespaceURI();
if (elementTypeName != null && elementTypeName.equals("anyType")) {//TODO remove it
parameterSon.name = xmlSchemaElement.getName() + ":anyType";
}
if (!isHave && !WsdlTypeUtil.isJavaBasicType(elementTypeName)) {
buileParameterFromTypes(elementTypeNamespace, elementTypeName, parameterSon, ioOrRecursion);
}
} else if (xmlSchemaElement.getSchemaType() != null) {
if (xmlSchemaElement.getSchemaType() instanceof XmlSchemaComplexType) {
throw new RuntimeException("don't support nested or ref complex type in xsd:sequence");
} else if (xmlSchemaElement.getSchemaType() instanceof XmlSchemaSimpleType) {
XmlSchemaSimpleType xmlSchemaSimpleType = (XmlSchemaSimpleType) xmlSchemaElement.getSchemaType();
String typeName = xmlSchemaSimpleType.getName();
if (typeName != null && typeName.equals("anyType")) {
ParameterInfo pSon = new ParameterInfo();
pSon.name = "anyType";
parameter.childParameters.add(pSon);
}
}
} else if (xmlSchemaElement.getRef() != null) {//TODO twebserviceinput support it before?
String elementTypeName = xmlSchemaElement.getRef().getTargetQName().getLocalPart();
if (!isHave && !WsdlTypeUtil.isJavaBasicType(elementTypeName)) {
buildParameterFromElements(elementTypeName, parameterSon, ioOrRecursion);
}
}
} else if (sequence instanceof XmlSchemaAttribute) {
XmlSchemaAttribute xmlSchemaAttribute = (XmlSchemaAttribute) sequence;
String elementName = xmlSchemaAttribute.getName();
ParameterInfo parameterSon = new ParameterInfo();
parameterSon.name = elementName;
parameter.childParameters.add(parameterSon);
Boolean isHave = false;
if (!parametersName.isEmpty() && parameterSon.name != null) {
for (int p = 0; p < parametersName.size(); p++) {
if (parameterSon.name.equals(parametersName.get(p))) {
isHave = true;
}
}
}
if (xmlSchemaAttribute.getSchemaTypeName() != null) {
String elementTypeName = xmlSchemaAttribute.getSchemaTypeName().getLocalPart();
String elementTypeNamespace = xmlSchemaAttribute.getSchemaTypeName().getNamespaceURI();
if (!isHave && !WsdlTypeUtil.isJavaBasicType(elementTypeName)) {
buileParameterFromTypes(elementTypeNamespace, elementTypeName, parameterSon, ioOrRecursion);
}
} else if (xmlSchemaAttribute.getRef() != null) {//TODO twebserviceinput support it before?
String refName = xmlSchemaAttribute.getRef().getTargetQName().getLocalPart();
if (!isHave) {
buildParameterFromElements(refName, parameterSon, ioOrRecursion);
}
}
} else {
throw new RuntimeException("don't support the nest type in xsd:sequence");
}
}
}
}

View File

@@ -0,0 +1,39 @@
// ============================================================================
//
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.webservice.helper;
public class WsdlTypeUtil {
public static Boolean isJavaBasicType(String typeName) {
Boolean isJavaBasicType = false;
if (typeName == null) {
return false;
}
if ("String".equalsIgnoreCase(typeName)) {
isJavaBasicType = true;
} else if ("int".equalsIgnoreCase(typeName)) {
isJavaBasicType = true;
} else if ("long".equalsIgnoreCase(typeName)) {
isJavaBasicType = true;
} else if ("double".equalsIgnoreCase(typeName)) {
isJavaBasicType = true;
} else if ("float".equalsIgnoreCase(typeName)) {
isJavaBasicType = true;
} else if ("char".equalsIgnoreCase(typeName)) {
isJavaBasicType = true;
}
return isJavaBasicType;
}
}

View File

@@ -15,12 +15,17 @@ import java.util.logging.Logger;
import javax.wsdl.xml.WSDLLocator;
import org.apache.commons.httpclient.Credentials;
import org.apache.commons.httpclient.HostConfiguration;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
import org.apache.commons.httpclient.auth.AuthScope;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.client.HttpClients;
import org.xml.sax.InputSource;
/**
@@ -49,10 +54,10 @@ public class WSDLLocatorImpl implements WSDLLocator {
}
public InputSource getBaseInputSource() {
GetMethod get = createGetMethod(wsdlUri);
HttpRequestBase get = createGetMethod(wsdlUri);
try {
httpClient.executeMethod(get);
InputStream is = get.getResponseBodyAsStream();
HttpResponse response = httpClient.execute(get);
InputStream is = response.getEntity().getContent();
inputStreams.add(is);
return new InputSource(is);
} catch (IOException ex) {
@@ -64,9 +69,9 @@ public class WSDLLocatorImpl implements WSDLLocator {
try {
URL url = getURL(parentLocation, importLocation);
latestImportUri = url.toExternalForm();
GetMethod get = createGetMethod(latestImportUri);
httpClient.executeMethod(get);
InputStream is = get.getResponseBodyAsStream();
HttpRequestBase get = createGetMethod(latestImportUri);
HttpResponse response = httpClient.execute(get);
InputStream is = response.getEntity().getContent();
inputStreams.add(is);
return new InputSource(is);
} catch (MalformedURLException ex) {
@@ -110,36 +115,44 @@ public class WSDLLocatorImpl implements WSDLLocator {
inputStreams.clear();
}
private GetMethod createGetMethod(String uri) {
GetMethod get = new GetMethod(uri);
private HttpRequestBase createGetMethod(String uri) {
HttpGet get = new HttpGet(uri);
if (configuration.getCookie() != null) {
get.setRequestHeader(HTTP_HEADER_COOKIE, configuration.getCookie());
get.setHeader(HTTP_HEADER_COOKIE, configuration.getCookie());
}
return get;
}
private HttpClient createHttpClient() {
HttpClient httpClient = new HttpClient();
HttpClientBuilder builder = HttpClients.custom();
CredentialsProvider credentialsProvider = null;
if (configuration.getProxyServer() != null) {
HostConfiguration hostConfiguration = new HostConfiguration();
hostConfiguration.setProxy(configuration.getProxyServer(), configuration.getProxyPort());
httpClient.setHostConfiguration(hostConfiguration);
builder.setProxy(new HttpHost(configuration.getProxyServer(), configuration.getProxyPort()));
}
if (configuration.getUsername() != null) {
Credentials credentials = new UsernamePasswordCredentials(configuration.getUsername(), configuration.getPassword());
httpClient.getState().setCredentials(AuthScope.ANY, credentials);
if (credentialsProvider == null) {
credentialsProvider = new BasicCredentialsProvider();
}
credentialsProvider
.setCredentials(AuthScope.ANY,
new UsernamePasswordCredentials(configuration.getUsername(), configuration.getPassword()));
}
if (configuration.getProxyUsername() != null) {
Credentials credentials = new UsernamePasswordCredentials(configuration.getProxyUsername(),
configuration.getProxyPassword());
httpClient.getState().setProxyCredentials(AuthScope.ANY, credentials);
httpClient.getHostConfiguration().setProxy(configuration.getProxyServer(), configuration.getProxyPort());
if (credentialsProvider == null) {
credentialsProvider = new BasicCredentialsProvider();
}
credentialsProvider
.setCredentials(new AuthScope(configuration.getProxyServer(), configuration.getProxyPort()),
new UsernamePasswordCredentials(configuration.getProxyUsername(),
configuration.getProxyPassword()));
builder.setProxy(new HttpHost(configuration.getProxyServer(), configuration.getProxyPort()));
}
return httpClient;
if (credentialsProvider != null) {
builder.setDefaultCredentialsProvider(credentialsProvider);
}
return builder.build();
}
}

View File

@@ -4,15 +4,13 @@
*/
package org.talend.webservice.helper.map;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
import javax.xml.namespace.QName;
import org.talend.webservice.helper.PathUtil;
import org.talend.webservice.mapper.AnyTypeMapper;
import sun.awt.image.ImageWatched;
/**
*
@@ -24,35 +22,44 @@ public class MapConverter {
public static final String LEFT_SQUARE_BRACKET = "[";
public static final String RIGHT_SQUARE_BRACKET = "]";
private static Map<String, Object> newMap(boolean keepOrder) {
if(keepOrder) return new LinkedHashMap<>();
return new HashMap<>();
}
public static Map<String, Object> deepMapToMap(Map<String, Object> map) {
return deepMapToMap(map, null, SEPARATOR);
return deepMapToMap(map, null, SEPARATOR, false);
}
public static Map<String, Object> deepMapToMap(Map<String, Object> map, boolean keepOrder) {
return deepMapToMap(map, null, SEPARATOR, keepOrder);
}
public static Map<String, Object> mapToDeepMap(Map<String, Object> map) {
return mapToDeepMap(map, SEPARATOR);
}
private static Map<String, Object> deepMapToMap(Object value, String k, String sep) {
private static Map<String, Object> deepMapToMap(Object value, String k, String sep, boolean keepOrder) {
if (value instanceof Map) {
Map<String, Object> map = (Map<String, Object>) value;
Map<String, Object> out = new HashMap<String, Object>();
Map<String, Object> out = newMap(keepOrder);
for (Map.Entry<String, Object> entry : map.entrySet()) {
if (k == null) {
out.putAll(deepMapToMap(entry.getValue(), entry.getKey(), sep));
out.putAll(deepMapToMap(entry.getValue(), entry.getKey(), sep, keepOrder));
} else {
out.putAll(deepMapToMap(entry.getValue(), k + sep + entry.
getKey(), sep));
getKey(), sep, keepOrder));
}
}
return out;
} else if (value instanceof List) {
List<Object> list = (List<Object>) value;
Map<String, Object> out = new HashMap<String, Object>();
Map<String, Object> out = newMap(keepOrder);
int i = 0;
for (Object val : list) {
StringBuffer sb = new StringBuffer();
sb.append(k).append(LEFT_SQUARE_BRACKET).append(i).append(RIGHT_SQUARE_BRACKET);
out.putAll(deepMapToMap(val, sb.toString(), sep));
out.putAll(deepMapToMap(val, sb.toString(), sep, keepOrder));
i++;
}
out.put(k + ".size", list.size());

View File

@@ -42,14 +42,18 @@ import javax.xml.transform.Result;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.stream.StreamResult;
import org.apache.cxf.Bus;
import org.apache.cxf.BusFactory;
import org.apache.cxf.common.jaxb.JAXBBeanInfo;
import org.apache.cxf.common.jaxb.JAXBContextProxy;
import org.apache.cxf.common.logging.LogUtils;
import org.apache.cxf.common.spi.ClassGeneratorClassLoader;
import org.apache.cxf.common.util.ASMHelper;
import org.apache.cxf.common.util.ASMHelper.ClassWriter;
import org.apache.cxf.common.util.ASMHelper.FieldVisitor;
import org.apache.cxf.common.util.ASMHelper.Label;
import org.apache.cxf.common.util.ASMHelper.MethodVisitor;
import org.apache.cxf.common.util.ASMHelperImpl;
import org.apache.cxf.common.util.CachedClass;
import org.apache.cxf.common.util.PackageUtils;
import org.apache.cxf.common.util.ReflectionInvokationHandler;
@@ -578,7 +582,7 @@ public final class JAXBUtils {
public static void setNamespaceWrapper(final Map<String, String> nspref, Marshaller marshaller) throws PropertyException {
Object mapper = null;
if (marshaller.getClass().getName().contains(".internal.")) {
mapper = createNamespaceWrapper(nspref);
mapper = createNamespaceWrapper(null, nspref);
if (mapper == null) {
LOG.log(Level.INFO, "Could not create namespace mapper for JDK internal" + " JAXB implementation.");
} else {
@@ -595,6 +599,31 @@ public final class JAXBUtils {
}
}
/*
* To avoid possible runtime collision.
*/
public static Object setNamespaceMapper(Bus bus, final Map<String, String> nspref,
Marshaller marshaller) throws PropertyException {
Object mapper = null;
if (marshaller.getClass().getName().contains(".internal.")) {
mapper = createNamespaceWrapper(bus, nspref);
if (mapper == null) {
LOG.log(Level.INFO, "Could not create namespace mapper for JDK internal" + " JAXB implementation.");
} else {
marshaller.setProperty("com.sun.xml.internal.bind.namespacePrefixMapper", mapper);
}
} else {
try {
Class<?> cls = Class.forName("org.apache.cxf.common.jaxb.NamespaceMapper");
mapper = cls.getConstructor(Map.class).newInstance(nspref);
} catch (Exception ex) {
LOG.log(Level.INFO, "Could not create NamespaceMapper", ex);
}
marshaller.setProperty("com.sun.xml.bind.namespacePrefixMapper", mapper);
}
return mapper;
}
public static BridgeWrapper createBridge(Set<Class<?>> ctxClasses, QName qname, Class<?> refcls, Annotation anns[])
throws JAXBException {
try {
@@ -1018,16 +1047,17 @@ public final class JAXBUtils {
return false;
}
private static synchronized Object createNamespaceWrapper(Map<String, String> map) {
ASMHelper helper = new ASMHelper();
private static synchronized Object createNamespaceWrapper(Bus bus, Map<String, String> map) {
ASMHelper helper = new ASMHelperImpl();
String className = "org.apache.cxf.jaxb.NamespaceMapperInternal";
Class<?> cls = helper.findClass(className, JAXBUtils.class);
NamespaceMapperClassGenerator nmcg = new NamespaceMapperClassGenerator(bus);
Class<?> cls = nmcg.findClass(className, JAXBUtils.class);
if (cls == null) {
ClassWriter cw = helper.createClassWriter();
if (cw == null) {
return null;
}
cls = createNamespaceWrapperInternal(helper, cw);
cls = createNamespaceWrapperInternal(helper, cw, nmcg);
}
try {
return cls.getConstructor(Map.class).newInstance(map);
@@ -1036,7 +1066,7 @@ public final class JAXBUtils {
}
}
private static Class<?> createNamespaceWrapperInternal(ASMHelper helper, ClassWriter cw) {
private static Class<?> createNamespaceWrapperInternal(ASMHelper helper, ClassWriter cw, NamespaceMapperClassGenerator nmcg) {
String className = "org.apache.cxf.jaxb.NamespaceMapperInternal";
FieldVisitor fv;
MethodVisitor mv;
@@ -1127,7 +1157,7 @@ public final class JAXBUtils {
}
}
return helper.loadClass(className, cls, bts);
return nmcg.loadClass(className, cls, bts);
}
public static JAXBBeanInfo getBeanInfo(JAXBContextProxy context, Class<?> cls) {
@@ -1138,4 +1168,20 @@ public final class JAXBUtils {
return ReflectionInvokationHandler.createProxyWrapper(o, JAXBBeanInfo.class);
}
private static class NamespaceMapperClassGenerator extends ClassGeneratorClassLoader {
private NamespaceMapperClassGenerator(Bus bus) {
super(bus == null ? BusFactory.getDefaultBus() : bus);
}
@Override
protected Class<?> findClass(String className, Class<?> cls) {
return super.findClass(className, cls);
}
@Override
protected Class<?> loadClass(String className, Class<?> cls, byte[] bytes) {
return super.loadClass(className, cls, bytes);
}
}
}

View File

@@ -3,13 +3,12 @@
*/
package org.talend.webservice.mapper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.talend.webservice.exception.LocalizedException;
import javax.xml.namespace.QName;
import org.talend.webservice.exception.LocalizedException;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
*
@@ -151,7 +150,7 @@ public class ComplexTypeMapper implements TypeMapper {
if (!clazz.getName().equals(beanName)) {
ComplexTypeMapper instanceComplexTypeMapper = findInstanceByClassName(beanName);
if (instanceComplexTypeMapper != null) {
Map<String, Object> values = new HashMap<String, Object>();
Map<String, Object> values = new LinkedHashMap<String, Object>();
values.put(ABSTRACT_TYPE_NAME, instanceComplexTypeMapper.typeName);
values.put(instanceComplexTypeMapper.typeName.getLocalPart(), instanceComplexTypeMapper.typeToValue(bean));
return values;
@@ -167,7 +166,7 @@ public class ComplexTypeMapper implements TypeMapper {
return null;
}
} else {
Map<String, Object> values = new HashMap<String, Object>(mappers.size());
Map<String, Object> values = new LinkedHashMap<String, Object>(mappers.size());
for (Map.Entry<String, PropertyMapper> entry : mappers.entrySet()) {
Object value = entry.getValue().getValueFrom(bean);
if (value != null) {
@@ -198,7 +197,7 @@ public class ComplexTypeMapper implements TypeMapper {
if (params == null) {
return null;
}
Map<String, Object> values = new HashMap<String, Object>(mappers.size());
Map<String, Object> values = new LinkedHashMap<String, Object>(mappers.size());
int i = 0;
for (Object param : params) {

View File

@@ -4,6 +4,7 @@
package org.talend.webservice.mapper;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -336,7 +337,8 @@ public class MapperFactory {
Class<?> clazz = classMapper.getClassForType(xmlSchemaComplexType.getQName(), orderedMap.keyList(), 1);
// 3.create propertyMapper (propertyName,class,schemaTypeMap,typeMapperQname)
Map<String, PropertyMapper> mappers = new HashMap<String, PropertyMapper>();
//need to use the order map as the order is important for response auto parser by index
Map<String, PropertyMapper> mappers = new LinkedHashMap<>();
for (String key : properties.keySet()) {
Object xmlSchemaObject = properties.get(key);
if (xmlSchemaObject == null) {

View File

@@ -5,6 +5,7 @@
package org.talend.webservice.mapper;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.wsdl.Message;
@@ -152,7 +153,7 @@ public class MessageMapperImpl implements MessageMapper {
return wrappedValue;
} else {
List<Part> orderedParts = message.getOrderedParts(null);
Map<String, Object> values = new HashMap<String, Object>(params.length);
Map<String, Object> values = new LinkedHashMap<String, Object>(params.length);
int i = 0;
for (Object param : params) {
Part part = orderedParts.get(i);

View File

@@ -1,7 +0,0 @@
org.talend.ws.exception.IllegalPropertyAccessException=\u0391\u03B4\u03C5\u03BD\u03B1\u03BC\u03AF\u03B1 \u03C0\u03C1\u03CC\u03C3\u03B2\u03B1\u03C3\u03B7\u03C2 \u03C3\u03C4\u03B7\u03BD \u03B9\u03B4\u03B9\u03CC\u03C4\u03B7\u03C4\u03B1 {0} \u03C0\u03BF\u03C5 \u03B4\u03B5\u03BD \u03B5\u03AF\u03BD\u03B1\u03B9 \u03C0\u03C1\u03BF\u03C3\u03B2\u03AC\u03C3\u03B9\u03BC\u03B7 \u03B3\u03B9\u03B1 \u03C4\u03CD\u03C0\u03BF {1}
org.talend.ws.exception.NoSuchPropertyException=\u0397 \u03B9\u03B4\u03B9\u03CC\u03C4\u03B7\u03C4\u03B1 {0} \u03B4\u03B5\u03BD \u03C5\u03C0\u03AC\u03C1\u03C7\u03B5\u03B9 \u03B3\u03B9\u03B1 \u03C4\u03BF\u03BD \u03C4\u03CD\u03C0\u03BF {1}
org.talend.ws.exception.Instantiation=\u0391\u03B4\u03C5\u03BD\u03B1\u03BC\u03AF\u03B1 \u03BD\u03B1 \u03B4\u03BF\u03B8\u03B5\u03AF \u03C5\u03C0\u03CC\u03C3\u03C4\u03B1\u03C3\u03B7 \u03C3\u03B5 \u03BA\u03BB\u03AC\u03C3\u03B7 \u03C4\u03CD\u03C0\u03BF\u03C5 {0}
org.talend.ws.exception.illegalAccessValueOf=\u0391\u03B4\u03C5\u03BD\u03B1\u03BC\u03AF\u03B1 \u03BA\u03BB\u03AE\u03C3\u03B7\u03C2 \u03BC\u03B5\u03B8\u03CC\u03B4\u03BF\u03C5 valueOf \u03B3\u03B9\u03B1 \u03C4\u03CD\u03C0\u03BF enum {0}
org.talend.ws.exception.Unknown=\u039C\u03B9\u03B1 \u03AC\u03B3\u03BD\u03C9\u03C3\u03C4\u03B7 \u03B5\u03BE\u03B1\u03AF\u03C1\u03B5\u03C3\u03B7 \u03C0\u03C1\u03BF\u03AD\u03BA\u03C5\u03C8\u03B5
org.talend.ws.exception.InvalidEnumValueException=\u0397 \u03C4\u03B9\u03BC\u03AE {0} \u03B4\u03B5\u03BD \u03B5\u03AF\u03BD\u03B1\u03B9 \u03AD\u03B3\u03BA\u03C5\u03C1\u03B7 \u03B3\u03B9\u03B1 enum {1}
org.talend.ws.exception.InvalidParameterAnyType=\u03A0\u03C1\u03AD\u03C0\u03B5\u03B9 \u03BD\u03B1 \u03C0\u03C1\u03BF\u03C3\u03B4\u03B9\u03BF\u03C1\u03AF\u03C3\u03B5\u03C4\u03B5 \u03AD\u03BD\u03B1\u03BD \u03C7\u03AC\u03C1\u03C4\u03B7 \u03C3\u03B1\u03BD \u03B1\u03C5\u03C4\u03CC\u03BD \u03B3\u03B9\u03B1 anyTypes : {anyType: value, anyType_type: qname}

View File

@@ -1,8 +0,0 @@
org.talend.ws.exception.IllegalPropertyAccessException=Unable to access property {0} which is not accessible for type {1}
org.talend.ws.exception.NoSuchPropertyException=Property {0} does not exist for type {1}
org.talend.ws.exception.Instantiation=Unable to instantiate class of type {0}
org.talend.ws.exception.InvocationTargetPropertyAccessor=A property accessor has thrown an exception : property {0} of class {1}
org.talend.ws.exception.illegalAccessValueOf=Unable to call method valueOf for enum type {0}
org.talend.ws.exception.Unknown=An unknown exception has been thrown
org.talend.ws.exception.InvalidEnumValueException=Value {0} is not valid for enum {1}
org.talend.ws.exception.InvalidParameterAnyType=You must specify a map like this for anyTypes : {anyType: value, anyType_type: qname} pour les anyType

View File

@@ -1,6 +1,8 @@
org.talend.ws.exception.IllegalPropertyAccessException=Impossible d'acc\u00e9der \u00e0 la propri\u00e9t\u00e9 {0} qui est inaccessible pour le type {1}
org.talend.ws.exception.NoSuchPropertyException=La propri\u00e9t\u00e9 {0} n'existe pas pour le type {1}
org.talend.ws.exception.Instantiation=Impossible d'instancier la classe de type {0}
org.talend.ws.exception.illegalAccessValueOf=Impossible d'appeler une m\u00e9thode valueOf pour le type enum {0}
org.talend.ws.exception.Unknown=Une exception inconnue a \u00e9t\u00e9 rencontr\u00e9e
org.talend.ws.exception.IllegalPropertyAccessException=Impossible d'acc\u00E9der \u00E0 la propri\u00E9t\u00E9 {0} qui est inaccessible pour le type {1}
org.talend.ws.exception.NoSuchPropertyException=La propri\u00E9t\u00E9 {0} n'existe pas pour le type {1}
org.talend.ws.exception.Instantiation=Impossible d'instancier le type de classe {0}
org.talend.ws.exception.InvocationTargetPropertyAccessor=Un accesseur de propri\u00E9t\u00E9 a retourner une exception : propri\u00E9t\u00E9 {0} de classe {1}
org.talend.ws.exception.illegalAccessValueOf=Impossible d'appeler une m\u00E9thode valueOf pour le type enum {0}
org.talend.ws.exception.Unknown=Une exception inconnue a \u00E9t\u00E9 d\u00E9clench\u00E9e
org.talend.ws.exception.InvalidEnumValueException=La valeur {0} n'est pas valide pour enum {1}
org.talend.ws.exception.InvalidParameterAnyType=Vous devez sp\u00E9cifier une Map comme celle-ci pour anyTypes : {anyType: value, anyType_type: qname} pour les anyType

View File

@@ -1,8 +1,8 @@
org.talend.ws.exception.IllegalPropertyAccessException=\u30BF\u30A4\u30D7{1}\u306B\u30A2\u30AF\u30BB\u30B9\u3067\u304D\u306A\u3044\u30D7\u30ED\u30D1\u30C6\u30A3{0}\u306B\u30A2\u30AF\u30BB\u30B9\u3059\u308B\u3053\u3068\u306F\u3067\u304D\u307E\u305B\u3093
org.talend.ws.exception.IllegalPropertyAccessException={0} \u30D7\u30ED\u30D1\u30C6\u30A3\u306B\u30A2\u30AF\u30BB\u30B9\u3067\u304D\u307E\u305B\u3093\u3002\u3053\u308C\u306F {1} \u578B\u306E\u305F\u3081\u30A2\u30AF\u30BB\u30B9\u3067\u304D\u307E\u305B\u3093\u3002
org.talend.ws.exception.NoSuchPropertyException=\u30BF\u30A4\u30D7{1}\u306E\u30D7\u30ED\u30D1\u30C6\u30A3{0}\u306F\u5B58\u5728\u3057\u307E\u305B\u3093
org.talend.ws.exception.Instantiation=\u30BF\u30A4\u30D7{0}\u306E\u30AF\u30E9\u30B9\u3092\u30A4\u30F3\u30B9\u30BF\u30F3\u30B9\u5316\u3059\u308B\u3053\u3068\u306F\u3067\u304D\u307E\u305B\u3093
org.talend.ws.exception.InvocationTargetPropertyAccessor=\u30D7\u30ED\u30D1\u30C6\u30A3\u30A2\u30AF\u30BB\u30C3\u30B5\u306F\u4F8B\u5916\u3092\u767A\u751F\u3055\u305B\u307E\u3057\u305F: \u30AF\u30E9\u30B9{1}\u306E\u30D7\u30ED\u30D1\u30C6\u30A3{0}
org.talend.ws.exception.illegalAccessValueOf=enum\u578B{0}\u306E\u30E1\u30BD\u30C3\u30C9valueOf\u3092\u547C\u3073\u51FA\u3059\u3053\u3068\u304C\u3067\u304D\u307E\u305B\u3093
org.talend.ws.exception.Unknown=\u4E0D\u660E\u306A\u4F8B\u5916\u304C\u767A\u751F\u3057\u307E\u3057\u305F
org.talend.ws.exception.InvalidEnumValueException=\u5024{0}\u306F\u5217\u6319\u578B{1}\u3068\u3057\u3066\u6709\u52B9\u3067\u306F\u3042\u308A\u307E\u305B\u3093
org.talend.ws.exception.InvalidParameterAnyType=\u3053\u306E\u3088\u3046\u306AanyTypes\u306E\u30DE\u30C3\u30D7\u3092\u6307\u5B9A\u3059\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059: {anyType: value, anyType_type: qname}
org.talend.ws.exception.Instantiation={0} \u578B\u306E\u30AF\u30E9\u30B9\u306E\u30A4\u30F3\u30B9\u30BF\u30F3\u30B9\u3092\u4F5C\u6210\u3067\u304D\u307E\u305B\u3093\u3002
org.talend.ws.exception.InvocationTargetPropertyAccessor=\u30D7\u30ED\u30D1\u30C6\u30A3\u30A2\u30AF\u30BB\u30B9\u306B\u3088\u308A\u3001\u4F8B\u5916\u304C\u30B9\u30ED\u30FC\u3055\u308C\u307E\u3057\u305F\uFF1A\u30AF\u30E9\u30B9 {1} \u306E\u30D7\u30ED\u30D1\u30C6\u30A3 {0}
org.talend.ws.exception.illegalAccessValueOf=\u5217\u6319\u578B {0} \u306EvalueOf\u30E1\u30BD\u30C3\u30C9\u3092\u547C\u3073\u51FA\u3059\u3053\u3068\u306F\u3067\u304D\u307E\u305B\u3093
org.talend.ws.exception.Unknown=\u4E0D\u660E\u306A\u4F8B\u5916\u304C\u30B9\u30ED\u30FC\u3055\u308C\u307E\u3057\u305F
org.talend.ws.exception.InvalidEnumValueException=\u5024 {0} \u306Fenum\u578B {1} \u3067\u306F\u3042\u308A\u307E\u305B\u3093\u3002
org.talend.ws.exception.InvalidParameterAnyType=\u30DE\u30C3\u30D7\u306F\u3069\u306E\u30BF\u30A4\u30D7\u306B\u3064\u3044\u3066\u3082\u4EE5\u4E0B\u306E\u3088\u3046\u306B\u6307\u5B9A\u3059\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059: {anyType: value, anyType_type: qname}

View File

@@ -3,6 +3,6 @@ org.talend.ws.exception.NoSuchPropertyException=\u5C5E\u6027 {0} \u5BF9\u4E8E\u7
org.talend.ws.exception.Instantiation=\u65E0\u6CD5\u5B9E\u4F8B\u5316\u7C7B\u578B {0} \u7684\u7C7B
org.talend.ws.exception.InvocationTargetPropertyAccessor=\u5C5E\u6027\u8BBF\u95EE\u5668\u629B\u51FA\u4E86\u4E00\u4E2A\u5F02\u5E38\uFF1A\u7C7B {1} \u7684\u5C5E\u6027 {0}
org.talend.ws.exception.illegalAccessValueOf=\u65E0\u6CD5\u4E3A\u679A\u4E3E\u7C7B\u578B {0} \u8C03\u7528\u65B9\u6CD5 valueOf
org.talend.ws.exception.Unknown=\u629B\u51FA\u4E86\u4E00\u4E2A\u672A\u77E5\u7684\u5F02\u5E38
org.talend.ws.exception.Unknown=\u53D1\u751F\u4E00\u4E2A\u672A\u77E5\u5F02\u5E38
org.talend.ws.exception.InvalidEnumValueException=\u503C {0} \u5BF9\u4E8E\u679A\u4E3E {1} \u65E0\u6548
org.talend.ws.exception.InvalidParameterAnyType=\u60A8\u5FC5\u987B\u6309\u5982\u4E0B\u6240\u793A\u4E3A anyType \u6307\u5B9A\u6620\u5C04\uFF1A{anyType: value, anyType_type: qname}

View File

@@ -63,9 +63,9 @@
<version>4.1.2</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
<version>1.2.19</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->
<dependency>

View File

@@ -5,7 +5,7 @@
<groupId>org.talend</groupId>
<artifactId>talendMQConnectionUtil</artifactId>
<version>1.0.1-20190215</version>
<version>1.1.0-20220307</version>
<packaging>jar</packaging>
<name>talendMQConnectionUtil</name>
@@ -55,13 +55,8 @@
</dependency>
<dependency>
<groupId>com.ibm.mq</groupId>
<artifactId>com.ibm.mq</artifactId>
<version>8.0.0.9</version>
</dependency>
<dependency>
<groupId>com.ibm.mq</groupId>
<artifactId>com.ibm.mqjms</artifactId>
<version>8.0.0.9</version>
<artifactId>com.ibm.mq.allclient</artifactId>
<version>9.2.4.0</version>
</dependency>
<dependency>
<groupId>javax.resource</groupId>
@@ -71,7 +66,7 @@
<dependency>
<groupId>org.talend</groupId>
<artifactId>talendMQRFH2</artifactId>
<version>1.0.1-20190206</version>
<version>1.1.0-20220307</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>

View File

@@ -53,9 +53,9 @@
<version>4.1.2</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
<version>1.2.19</version>
</dependency>
</dependencies>
<build>

View File

@@ -4,7 +4,16 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components</groupId>
<artifactId>talendzip</artifactId>
<version>1.1-20201120</version>
<version>1.3</version>
<licenses>
<license>
<name>Apache License, Version 2.0</name>
<url>https://www.talendforge.org/modules/licenses/APACHE_v2.txt</url>
<distribution>may be downloaded from the Maven repository</distribution>
</license>
</licenses>
<build>
<plugins>
<plugin>
@@ -52,19 +61,14 @@
<dependency>
<groupId>net.lingala.zip4j</groupId>
<artifactId>zip4j</artifactId>
<version>1.3.3</version>
<version>2.10.0</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>1.19</version>
</dependency>
<dependency>
<groupId>org.talend.libraries</groupId>
<artifactId>checkArchive-1.1-20190917</artifactId>
<version>6.0.0</version>
<version>1.21</version>
</dependency>
</dependencies>
</project>

View File

@@ -1,184 +1,183 @@
package org.talend.archive;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.zip.GZIPInputStream;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.PBEParameterSpec;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
import org.apache.commons.compress.archivers.zip.ZipFile;
// import javax.crypto.Cipher;
public class IntegrityUtil {
/**
* Is used to check if the zip file is corrupted/destroyed
*
* @param file
* @return
*/
public static boolean isZipValid(final File file) {
ZipFile zipFile = null;
try {
zipFile = new ZipFile(file);
return true;
} catch (IOException e) {
return false;
} finally {
try {
if (zipFile != null) {
zipFile.close();
zipFile = null;
}
} catch (IOException e) {
}
}
}
public static void validate(final File file) {
ZipFile zipFile = null;
try {
zipFile = new ZipFile(file);
} catch (IOException e) {
} finally {
try {
if (zipFile != null) {
zipFile.close();
zipFile = null;
}
} catch (IOException e) {
}
}
}
/**
* To check if the encrpted zip file is corrupted or not
*
* @param file
* @param password
* @return
*/
public static boolean isEncryptedZipValid(final File file, String password) {
ZipArchiveInputStream input = null;
InputStream target = null;
try {
target = new FileInputStream(file);
target = new CipherInputStream(target, createCipher(Cipher.DECRYPT_MODE, password));
input = new ZipArchiveInputStream(target);
ArchiveEntry entry = input.getNextEntry();
return true;
} catch (IOException e) {
return false;
} catch (Exception e) {
return false;
} finally {
try {
if (input != null) {
input.close();
input = null;
}
if (target != null) {
target.close();
target = null;
}
} catch (IOException e) {
}
}
}
/**
* Used to check tar.gz/.tgz/.gz file is corrupted/destroyed
*
* @param fileName
* @return
*/
public static boolean isGZIPValid(final String fileName) {
GZIPInputStream inputStream = null;
InputStream is = null;
try {
is = new FileInputStream(new File(fileName));
inputStream = new GZIPInputStream(is);
return true;
} catch (IOException e) {
return false;
}finally {
try {
if (inputStream != null) {
inputStream.close();
inputStream = null;
} else if(is != null) {
is.close();
is = null;
}
} catch (IOException e) {
}
}
}
/**
* Used to check tar.tar file is corrupted/destroyed
*
* @param fileName
* @return
*/
public static boolean isTarValid(final String fileName) {
TarArchiveInputStream inputStream = null;
InputStream is = null;
try {
is = new FileInputStream(new File(fileName));
inputStream = new TarArchiveInputStream(is);
return inputStream.canReadEntryData(inputStream.getNextEntry());
} catch (IOException e) {
return false;
}finally {
try {
if (inputStream != null) {
inputStream.close();
inputStream = null;
} else if(is != null) {
is.close();
is = null;
}
} catch (IOException e) {
}
}
}
/**
*
* @param mode
* @param password
* @return
* @throws Exception
*/
public static Cipher createCipher(int mode, String password) throws Exception {
String alg = "PBEWithSHA1AndDESede"; // BouncyCastle has better algorithms
PBEKeySpec keySpec = new PBEKeySpec(password.toCharArray());
SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(alg);
SecretKey secretKey = keyFactory.generateSecret(keySpec);
Cipher cipher = Cipher.getInstance("PBEWithSHA1AndDESede");
cipher.init(mode, secretKey, new PBEParameterSpec("saltsalt".getBytes(), 2000));
return cipher;
}
}
package com.talend.compress.zip;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
import org.apache.commons.compress.archivers.zip.ZipFile;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.PBEParameterSpec;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.zip.GZIPInputStream;
// import javax.crypto.Cipher;
public class IntegrityUtil {
/**
* Is used to check if the zip file is corrupted/destroyed
*
* @param file
* @return
*/
public static boolean isZipValid(final File file) {
ZipFile zipFile = null;
try {
zipFile = new ZipFile(file);
return true;
} catch (IOException e) {
return false;
} finally {
try {
if (zipFile != null) {
zipFile.close();
zipFile = null;
}
} catch (IOException e) {
}
}
}
public static void validate(final File file) {
ZipFile zipFile = null;
try {
zipFile = new ZipFile(file);
} catch (IOException e) {
} finally {
try {
if (zipFile != null) {
zipFile.close();
zipFile = null;
}
} catch (IOException e) {
}
}
}
/**
* To check if the encrpted zip file is corrupted or not
*
* @param file
* @param password
* @return
*/
public static boolean isEncryptedZipValid(final File file, String password) {
ZipArchiveInputStream input = null;
InputStream target = null;
try {
target = new FileInputStream(file);
target = new CipherInputStream(target, createCipher(Cipher.DECRYPT_MODE, password));
input = new ZipArchiveInputStream(target);
ArchiveEntry entry = input.getNextEntry();
return true;
} catch (IOException e) {
return false;
} catch (Exception e) {
return false;
} finally {
try {
if (input != null) {
input.close();
input = null;
}
if (target != null) {
target.close();
target = null;
}
} catch (IOException e) {
}
}
}
/**
* Used to check tar.gz/.tgz/.gz file is corrupted/destroyed
*
* @param fileName
* @return
*/
public static boolean isGZIPValid(final String fileName) {
GZIPInputStream inputStream = null;
InputStream is = null;
try {
is = new FileInputStream(new File(fileName));
inputStream = new GZIPInputStream(is);
return true;
} catch (IOException e) {
return false;
}finally {
try {
if (inputStream != null) {
inputStream.close();
inputStream = null;
} else if(is != null) {
is.close();
is = null;
}
} catch (IOException e) {
}
}
}
/**
* Used to check tar.tar file is corrupted/destroyed
*
* @param fileName
* @return
*/
public static boolean isTarValid(final String fileName) {
TarArchiveInputStream inputStream = null;
InputStream is = null;
try {
is = new FileInputStream(new File(fileName));
inputStream = new TarArchiveInputStream(is);
return inputStream.canReadEntryData(inputStream.getNextEntry());
} catch (IOException e) {
return false;
}finally {
try {
if (inputStream != null) {
inputStream.close();
inputStream = null;
} else if(is != null) {
is.close();
is = null;
}
} catch (IOException e) {
}
}
}
/**
*
* @param mode
* @param password
* @return
* @throws Exception
*/
public static Cipher createCipher(int mode, String password) throws Exception {
String alg = "PBEWithSHA1AndDESede"; // BouncyCastle has better algorithms
PBEKeySpec keySpec = new PBEKeySpec(password.toCharArray());
SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(alg);
SecretKey secretKey = keyFactory.generateSecret(keySpec);
Cipher cipher = Cipher.getInstance("PBEWithSHA1AndDESede");
cipher.init(mode, secretKey, new PBEParameterSpec("saltsalt".getBytes(), 2000));
return cipher;
}
}

Some files were not shown because too many files have changed in this diff Show More