Compare commits
446 Commits
patchrelea
...
release/7.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7b8c60f92a | ||
|
|
0442b1a092 | ||
|
|
2cbde0756f | ||
|
|
4543d6a129 | ||
|
|
a61882573f | ||
|
|
748e0d218f | ||
|
|
0752bfd4fa | ||
|
|
503e187f96 | ||
|
|
09bdc7e42d | ||
|
|
885fe944f5 | ||
|
|
2087cdfe68 | ||
|
|
81345dd1b0 | ||
|
|
d2982b93e7 | ||
|
|
b2ce9e2ee2 | ||
|
|
25d0a3c978 | ||
|
|
1d4e07ebd0 | ||
|
|
2a591f79ce | ||
|
|
30d42547e5 | ||
|
|
fe468cbf15 | ||
|
|
ce2b399fe4 | ||
|
|
55aba706f6 | ||
|
|
4ee53f979e | ||
|
|
51142f7983 | ||
|
|
e2697a8226 | ||
|
|
86eb6ab2fe | ||
|
|
1690847460 | ||
|
|
461258adc9 | ||
|
|
3e4d33ab73 | ||
|
|
962e6b754b | ||
|
|
302b9a4921 | ||
|
|
02f5d99a23 | ||
|
|
a34a1fb6dc | ||
|
|
5c2049fb1b | ||
|
|
ddafd9607d | ||
|
|
12339ad3ab | ||
|
|
76c8733bad | ||
|
|
50a9456491 | ||
|
|
bbb82ade0d | ||
|
|
b5e8c7d605 | ||
|
|
97bc429216 | ||
|
|
0d0d0bc9db | ||
|
|
ecd65bd27d | ||
|
|
95614a456c | ||
|
|
45fd8866a1 | ||
|
|
662cb3ce5d | ||
|
|
15d8e5966f | ||
|
|
092cd497a7 | ||
|
|
b8adbf0321 | ||
|
|
2510f3ad4f | ||
|
|
00c50f158a | ||
|
|
f2d9ef95dc | ||
|
|
1a627ac736 | ||
|
|
cd0646d89c | ||
|
|
544eecb22d | ||
|
|
d2717e757e | ||
|
|
e2ef1e4028 | ||
|
|
074579381d | ||
|
|
b7bea1900e | ||
|
|
3859b1e957 | ||
|
|
74d2a8410e | ||
|
|
1f7a4875ef | ||
|
|
637aa45e61 | ||
|
|
75be28c5b1 | ||
|
|
00cc76bb3c | ||
|
|
35d710ca54 | ||
|
|
c78507196c | ||
|
|
7a60d7fdbb | ||
|
|
e05ae031de | ||
|
|
3028e357b1 | ||
|
|
7dab8ce963 | ||
|
|
35b20bb39e | ||
|
|
25d58e1fd2 | ||
|
|
ea023892f0 | ||
|
|
d6fd1cbe18 | ||
|
|
ce1fd92c74 | ||
|
|
bc4b7a489a | ||
|
|
e51292ea35 | ||
|
|
c2b7b58302 | ||
|
|
893b214fcf | ||
|
|
c0d3e099dd | ||
|
|
3970eb25af | ||
|
|
79361417eb | ||
|
|
1240c228c1 | ||
|
|
47953ea772 | ||
|
|
8bf40999cb | ||
|
|
d8824c7d80 | ||
|
|
c89e5a35db | ||
|
|
22b2899392 | ||
|
|
5c5e4a561f | ||
|
|
a972cbe1ec | ||
|
|
79599d9db9 | ||
|
|
e0c0d31522 | ||
|
|
bc6944ddf6 | ||
|
|
93afe3609d | ||
|
|
6aca900a7e | ||
|
|
bfee355f0a | ||
|
|
a5e1cc47e7 | ||
|
|
1d1adfc74c | ||
|
|
86556e52fe | ||
|
|
2e3af5d1d1 | ||
|
|
eacf6b2edc | ||
|
|
f8db539d1d | ||
|
|
9041181162 | ||
|
|
3379f247d4 | ||
|
|
0f587babf5 | ||
|
|
21135a3514 | ||
|
|
483a0f4308 | ||
|
|
9580f89ac6 | ||
|
|
816212547d | ||
|
|
b67a5e2bce | ||
|
|
b2f1487ae9 | ||
|
|
8c01f53dd0 | ||
|
|
f057c13965 | ||
|
|
d0bd3a8de2 | ||
|
|
529bb5a138 | ||
|
|
bc920a8e4a | ||
|
|
a4f1afa7d9 | ||
|
|
240510aa13 | ||
|
|
4ea4723304 | ||
|
|
05e1edbbc1 | ||
|
|
1eebeae0c2 | ||
|
|
122af47d85 | ||
|
|
6f373d1522 | ||
|
|
b6161bdc13 | ||
|
|
df7fd9386e | ||
|
|
bf6cbaf984 | ||
|
|
c8b5f40e43 | ||
|
|
4e39ebf917 | ||
|
|
106244a15f | ||
|
|
df42fc827b | ||
|
|
9d12d4b08a | ||
|
|
a6c5e1f537 | ||
|
|
06462122bb | ||
|
|
2c3eafb7c2 | ||
|
|
eef575ea44 | ||
|
|
7be4c6a799 | ||
|
|
f852743538 | ||
|
|
f590b9214d | ||
|
|
3544beccec | ||
|
|
195c2997d2 | ||
|
|
74f1c89f0b | ||
|
|
9134549c94 | ||
|
|
9e472b477a | ||
|
|
41516246d1 | ||
|
|
f9fc607e59 | ||
|
|
59ace742af | ||
|
|
1e35baefb1 | ||
|
|
bbe4460cd4 | ||
|
|
35e6bf01f9 | ||
|
|
4191ba8730 | ||
|
|
22cad3b97a | ||
|
|
87fbabdccd | ||
|
|
b62c16ff6a | ||
|
|
32408cd9a7 | ||
|
|
94a80b55d4 | ||
|
|
c7a9cc1145 | ||
|
|
85ed098bcb | ||
|
|
25637d3857 | ||
|
|
b3774b643b | ||
|
|
51d6fb4cac | ||
|
|
37ae765116 | ||
|
|
1b891d23f8 | ||
|
|
26c3b77921 | ||
|
|
6af4903291 | ||
|
|
f3a1279436 | ||
|
|
667e43c56e | ||
|
|
6120adbd1e | ||
|
|
63dae01a82 | ||
|
|
52b46db595 | ||
|
|
5cac10311a | ||
|
|
fc995fd934 | ||
|
|
0c1aa7d269 | ||
|
|
58cb31cd0d | ||
|
|
2de786b6db | ||
|
|
4aa47a90a9 | ||
|
|
018e3e3e06 | ||
|
|
69168c56b8 | ||
|
|
cfb02c57c3 | ||
|
|
1b20a2d08c | ||
|
|
d8af56e14f | ||
|
|
23070c60a1 | ||
|
|
7278437430 | ||
|
|
0d1d63b882 | ||
|
|
732e383f8e | ||
|
|
c7e01ebe67 | ||
|
|
4b365b194b | ||
|
|
7f7c963cdc | ||
|
|
586bcb9d23 | ||
|
|
bac3605a26 | ||
|
|
c37faee0d1 | ||
|
|
bca1ab75b5 | ||
|
|
e03c026b74 | ||
|
|
342a7350be | ||
|
|
4cc1dd3de9 | ||
|
|
ffb98f3f6f | ||
|
|
62c2e341c5 | ||
|
|
d22a213e38 | ||
|
|
5a3fd2ef23 | ||
|
|
f6448c1316 | ||
|
|
b863480a14 | ||
|
|
19a9126382 | ||
|
|
8e9540b70b | ||
|
|
8c0a3390a6 | ||
|
|
9eac6bc883 | ||
|
|
37a0af7c4e | ||
|
|
167f9aa41e | ||
|
|
250580cddd | ||
|
|
1f8d842706 | ||
|
|
cc3061d762 | ||
|
|
da2d50fbe2 | ||
|
|
9b9a1dfaed | ||
|
|
cfb02bd135 | ||
|
|
36fd68a527 | ||
|
|
e27ab22bd0 | ||
|
|
0776ee5b9f | ||
|
|
8b1bc0e1ac | ||
|
|
9715a9e018 | ||
|
|
62e441c8d7 | ||
|
|
665fd4c320 | ||
|
|
fdabd40f24 | ||
|
|
617dbff52b | ||
|
|
1740c3626a | ||
|
|
5b0e0b449c | ||
|
|
103b26f50b | ||
|
|
d7b050ded7 | ||
|
|
d642a8efda | ||
|
|
85f43e22db | ||
|
|
e6e3581be6 | ||
|
|
079173a85e | ||
|
|
cf4e374d71 | ||
|
|
13f68ebe73 | ||
|
|
667a4a2649 | ||
|
|
46a1f26e66 | ||
|
|
e16aa8da65 | ||
|
|
781a5addc6 | ||
|
|
fe44ae77b4 | ||
|
|
515028bad5 | ||
|
|
8323cb0c5b | ||
|
|
cf137543ff | ||
|
|
3ca0f5ac72 | ||
|
|
a5320f0b67 | ||
|
|
7bebaa9fbf | ||
|
|
c51a8712dd | ||
|
|
57ba9552b5 | ||
|
|
31ca62fba6 | ||
|
|
1990b363f3 | ||
|
|
b0175e0c6e | ||
|
|
3a8ba3d0d0 | ||
|
|
301d689b33 | ||
|
|
d6293c745a | ||
|
|
6a6f966c51 | ||
|
|
f5474437f2 | ||
|
|
408ea73812 | ||
|
|
045525ac75 | ||
|
|
e96de986bb | ||
|
|
5555f862df | ||
|
|
8770aa2225 | ||
|
|
b97d359382 | ||
|
|
4e6a70a0ef | ||
|
|
c84b02c186 | ||
|
|
f2e2d9dd43 | ||
|
|
4f362b8cd4 | ||
|
|
838c12ba2d | ||
|
|
a20f8c75f0 | ||
|
|
e6a05e0738 | ||
|
|
2e5d89b14a | ||
|
|
a0bf8ea8b7 | ||
|
|
cb2d011370 | ||
|
|
fe9f23eee5 | ||
|
|
e7903640b2 | ||
|
|
b6676e4fbd | ||
|
|
5ec26c5514 | ||
|
|
aee262c30d | ||
|
|
4b68070278 | ||
|
|
c7cc06102f | ||
|
|
cfe68fa443 | ||
|
|
a961d53357 | ||
|
|
f9004ebd4c | ||
|
|
e43b872877 | ||
|
|
387871fb59 | ||
|
|
159cdc9c9d | ||
|
|
09bcc66d09 | ||
|
|
3bd63f5795 | ||
|
|
32d256d666 | ||
|
|
b96ee6514b | ||
|
|
fa08aef33c | ||
|
|
f2325c166d | ||
|
|
36f23162bb | ||
|
|
7118b02042 | ||
|
|
d2ae45d2df | ||
|
|
dfa91dd61e | ||
|
|
f75b7895db | ||
|
|
e05955934f | ||
|
|
38acaab6e1 | ||
|
|
19300112e8 | ||
|
|
25ace64c68 | ||
|
|
a188cd0e07 | ||
|
|
e3473f4aa5 | ||
|
|
7ac39ecd46 | ||
|
|
bfe5e903c6 | ||
|
|
e18a8f48a0 | ||
|
|
f7937c3710 | ||
|
|
85e8040773 | ||
|
|
70908ad2df | ||
|
|
a7f1809476 | ||
|
|
2d04f97a64 | ||
|
|
3d5992f017 | ||
|
|
6b11676a66 | ||
|
|
a102775762 | ||
|
|
2721082b75 | ||
|
|
0c9629ef55 | ||
|
|
14da1383e1 | ||
|
|
01d97c8f63 | ||
|
|
59a1b91e4a | ||
|
|
e01d4de5c3 | ||
|
|
d343213ecb | ||
|
|
1bd7157e10 | ||
|
|
b68c9ef23f | ||
|
|
bf8ae50d77 | ||
|
|
0757392e0a | ||
|
|
bdb2545a42 | ||
|
|
3e46ca4dee | ||
|
|
650b50420b | ||
|
|
76137c4b3c | ||
|
|
371908919b | ||
|
|
8a20a15f9f | ||
|
|
40e5c5f7fd | ||
|
|
bcb2d60a99 | ||
|
|
3d9d6734c2 | ||
|
|
a54823f72d | ||
|
|
2a4167eb4f | ||
|
|
a3a53e8447 | ||
|
|
545bc72afa | ||
|
|
dbc2f213c2 | ||
|
|
2a5cb99f75 | ||
|
|
73a00f14bb | ||
|
|
d5386d1114 | ||
|
|
2b90106385 | ||
|
|
cfc6477b33 | ||
|
|
df55122199 | ||
|
|
98a1bed1e1 | ||
|
|
194ac012c4 | ||
|
|
5f5c92a766 | ||
|
|
d46b547fc9 | ||
|
|
b05d599f3f | ||
|
|
816d395f2d | ||
|
|
bfe02643b3 | ||
|
|
f99d97538f | ||
|
|
8d2ff69e40 | ||
|
|
f23c9b02ee | ||
|
|
d794cc9a7b | ||
|
|
ac5cc1ee1d | ||
|
|
70a75cf790 | ||
|
|
db870ecc30 | ||
|
|
78f9b554eb | ||
|
|
7146bdf26c | ||
|
|
4655c0a059 | ||
|
|
49658a28d3 | ||
|
|
cda46bb231 | ||
|
|
9df3a48b78 | ||
|
|
9cce21a3bd | ||
|
|
174ea89be9 | ||
|
|
b4f2124a60 | ||
|
|
5ac16bb7cc | ||
|
|
bc445f065c | ||
|
|
891e6a9d5e | ||
|
|
c6e4e79411 | ||
|
|
47ffb3d242 | ||
|
|
ae30bc1fb3 | ||
|
|
6b7fce2f78 | ||
|
|
6049577e03 | ||
|
|
186fcafb29 | ||
|
|
7f3d3b7a59 | ||
|
|
9a11a94043 | ||
|
|
a47de9821f | ||
|
|
f6114ef000 | ||
|
|
9d93ff1652 | ||
|
|
51a97c8b24 | ||
|
|
29ec16e725 | ||
|
|
6e5e7d1e0a | ||
|
|
5dda69da6a | ||
|
|
e534bed3e0 | ||
|
|
56bc8ee766 | ||
|
|
71413a41dc | ||
|
|
6240c4331e | ||
|
|
92fac62ac0 | ||
|
|
8bdca657d4 | ||
|
|
ea33bcd37e | ||
|
|
61b2b21833 | ||
|
|
399ae80700 | ||
|
|
10fd426856 | ||
|
|
c113df2c41 | ||
|
|
4eb679c6e9 | ||
|
|
1cf44a07ec | ||
|
|
0bdf41d228 | ||
|
|
c64fec7601 | ||
|
|
8ab6492011 | ||
|
|
780ce47ad7 | ||
|
|
9d04099b86 | ||
|
|
e3775bacfe | ||
|
|
0e1a65b82f | ||
|
|
75c51b6dec | ||
|
|
f809f597b4 | ||
|
|
97bad0d5ca | ||
|
|
a74a54214e | ||
|
|
bbc2e81686 | ||
|
|
6bf37640b9 | ||
|
|
b5d8c8d0f3 | ||
|
|
95afb4904e | ||
|
|
322a55e751 | ||
|
|
028578141e | ||
|
|
d7c09e2d71 | ||
|
|
24ae727858 | ||
|
|
1e39f1e09c | ||
|
|
3c58d86789 | ||
|
|
d51d53c3b5 | ||
|
|
2ca61108c6 | ||
|
|
8c2ea5dd99 | ||
|
|
ba7c5e45c2 | ||
|
|
ba7830ad5c | ||
|
|
82bc2123f1 | ||
|
|
5f70c22c91 | ||
|
|
9f48439f53 | ||
|
|
cf25104e30 | ||
|
|
ea33684b50 | ||
|
|
61c03b2eda | ||
|
|
127c703af5 | ||
|
|
0176cb23ca | ||
|
|
d38412eb01 | ||
|
|
f041bee6b8 | ||
|
|
dfcd6e3f2d | ||
|
|
0a7e0e56e4 | ||
|
|
bd2e612a44 | ||
|
|
d561e36a7e | ||
|
|
2fd9e82220 | ||
|
|
11a41a331e | ||
|
|
e9fa81a1c8 | ||
|
|
9153d30f6e | ||
|
|
dd863cfd15 | ||
|
|
55d48cfe91 | ||
|
|
7b325e8707 | ||
|
|
7465b41a34 | ||
|
|
79fb201844 | ||
|
|
45edbf18a1 | ||
|
|
f347a16522 | ||
|
|
b9e4faf2bd |
@@ -584,13 +584,11 @@ EParameterName.jdbcURL=JDBC URL
|
||||
EParameterName.driverJar=Driver jar
|
||||
EParameterName.className=Class name
|
||||
EParameterName.mappingFile=Mapping file
|
||||
SetupProcessDependenciesRoutinesAction.title=Setup routine dependencies
|
||||
SetupProcessDependenciesRoutinesAction.title=Setup Codes Dependencies
|
||||
SetupProcessDependenciesRoutinesDialog.systemRoutineLabel=System routines
|
||||
SetupProcessDependenciesRoutinesDialog.userRoutineLabel=User routines
|
||||
PerformancePreferencePage.addAllSystemRoutines=Add all system routines to job dependencies, when creating a new job
|
||||
PerformancePreferencePage.addAllUserRoutines=Add all user routines to job dependencies, when creating a new job
|
||||
ShowRoutineItemsDialog.systemTitle=Select Sytem Routines
|
||||
ShowRoutineItemsDialog.title=Select Routines
|
||||
AbstractMultiPageTalendEditor_pleaseWait=Saving Please Wait....
|
||||
DocumentationPreferencePage.use_css_template=Use CSS file as a template when export to HTML
|
||||
DocumentationPreferencePage.css_file=CSS File
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
NavigatorContent.contexts=Contexts
|
||||
NavigatorContent.routines=Routines
|
||||
NavigatorContent.routines=Global Routines
|
||||
NavigatorContent.sqlTemplates=SQL Templates
|
||||
NavigatorContent.documentation=Documentation
|
||||
NavigatorContent.activation=di.fake.for.activation
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<license url="http://www.example.com/license">[Enter License Description here.]</license>
|
||||
<requires>
|
||||
<import feature="org.eclipse.test" version="0.0.0" match="greaterOrEqual"/>
|
||||
<import plugin="org.junit" version="0.0.0" match="greaterOrEqual"/>
|
||||
<import plugin="org.junit" version="4.13.2" match="greaterOrEqual"/>
|
||||
<import plugin="org.talend.commons.runtime" version="0.0.0" match="greaterOrEqual"/>
|
||||
<import plugin="org.talend.commons.ui" version="0.0.0" match="greaterOrEqual"/>
|
||||
<import plugin="org.talend.core" version="0.0.0" match="greaterOrEqual"/>
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
</requires>
|
||||
<plugin id="org.talend.libraries.apache" download-size="0" install-size="0" version="0.0.0"/>
|
||||
<plugin id="org.talend.libraries.apache.axis2" download-size="0" install-size="0" version="0.0.0"/>
|
||||
<plugin id="org.talend.libraries.apache.batik" download-size="0" install-size="0" version="0.0.0"/>
|
||||
<plugin id="org.talend.libraries.apache.chemistry" download-size="0" install-size="0" version="0.0.0"/>
|
||||
<plugin id="org.talend.libraries.apache.common" download-size="0" install-size="0" version="0.0.0"/>
|
||||
<plugin id="org.talend.libraries.apache.cxf" download-size="0" install-size="0" version="0.0.0"/>
|
||||
@@ -51,5 +50,4 @@
|
||||
<plugin id="org.talend.libraries.slf4j" download-size="0" install-size="0" version="0.0.0"/>
|
||||
<plugin id="org.talend.libraries.xml" download-size="0" install-size="0" version="0.0.0"/>
|
||||
<plugin id="org.talend.libraries.zmq" download-size="0" install-size="0" version="0.0.0"/>
|
||||
<plugin id="org.talend.libraries.zookeeper" download-size="0" install-size="0" version="0.0.0"/>
|
||||
</feature>
|
||||
|
||||
@@ -412,7 +412,7 @@
|
||||
|
||||
if(ignoredParamsNames.contains(name)) {
|
||||
//do nothing
|
||||
} else if(org.talend.core.model.process.EParameterFieldType.PASSWORD.equals(ep.getFieldType())){
|
||||
} else if(org.talend.core.model.process.EParameterFieldType.PASSWORD.equals(ep.getFieldType()) || org.talend.core.model.process.EParameterFieldType.HIDDEN_TEXT.equals(ep.getFieldType())){
|
||||
//not log password
|
||||
}else{
|
||||
String value = org.talend.core.model.utils.NodeUtil.getRuntimeParameterValue(node, ep);
|
||||
|
||||
@@ -126,8 +126,16 @@
|
||||
|
||||
boolean exist_tSCP = false;
|
||||
List<INode> scpComponentsList = (List<INode>)process.getNodesOfType("tSCPConnection");
|
||||
if (scpComponentsList.size() > 0) {
|
||||
String parameterNames = "";
|
||||
int scpsize = scpComponentsList.size();
|
||||
if (scpsize > 0) {
|
||||
exist_tSCP = true;
|
||||
for (int i = 0; i < scpsize; i++) {
|
||||
parameterNames += "\"conn_" + scpComponentsList.get(i).getUniqueName() + "\"";
|
||||
if(i < scpsize-1){
|
||||
parameterNames += ",";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
boolean exist_tCassandra = false;
|
||||
@@ -506,17 +514,35 @@
|
||||
}
|
||||
org.apache.logging.log4j.core.config.Configurator.setLevel(org.apache.logging.log4j.LogManager.getRootLogger().getName(), log.getLevel());
|
||||
<%}%>
|
||||
}
|
||||
log.info("TalendJob: '<%=codeGenArgument.getJobName()%>' - Start.");
|
||||
<%}%>
|
||||
|
||||
<%
|
||||
INode jobCatcherNode = null;
|
||||
}
|
||||
log.info("TalendJob: '<%=codeGenArgument.getJobName()%>' - Start.");
|
||||
<%}%>
|
||||
|
||||
<%
|
||||
INode jobCatcherNode = null;
|
||||
|
||||
int threadPoolSize = 0;
|
||||
boolean tRESTRequestLoopExists = false;
|
||||
for (INode nodeInProcess : process.getGeneratingNodes()) {
|
||||
String componentName = nodeInProcess.getComponent().getName();
|
||||
if("tJobStructureCatcher".equals(componentName)) {
|
||||
|
||||
if(jobCatcherNode==null && "tJobStructureCatcher".equals(componentName)) {
|
||||
jobCatcherNode = nodeInProcess;
|
||||
break;
|
||||
continue;
|
||||
}
|
||||
|
||||
if(!nodeInProcess.isActivate()) continue;
|
||||
|
||||
if("tRESTRequestLoop".equals(componentName)) {
|
||||
tRESTRequestLoopExists = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if("tWriteXMLFieldOut".equals(componentName)) {
|
||||
IConnection nextMergeConn = NodeUtil.getNextMergeConnection(nodeInProcess);
|
||||
if(nextMergeConn == null || nextMergeConn.getInputId()==1){
|
||||
threadPoolSize++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -644,15 +670,15 @@
|
||||
|
||||
boolean inOSGi = routines.system.BundleUtils.inOSGi();
|
||||
|
||||
if (inOSGi) {
|
||||
java.util.Dictionary<String, Object> jobProperties = routines.system.BundleUtils.getJobProperties(jobName);
|
||||
|
||||
if (jobProperties != null) {
|
||||
contextStr = (String)jobProperties.get("context");
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
java.util.Dictionary<String, Object> jobProperties = null;
|
||||
if (inOSGi) {
|
||||
jobProperties = routines.system.BundleUtils.getJobProperties(jobName);
|
||||
|
||||
if (jobProperties != null && jobProperties.get("context") != null) {
|
||||
contextStr = (String)jobProperties.get("context");
|
||||
}
|
||||
}
|
||||
//call job/subjob with an existing context, like: --context=production. if without this parameter, there will use the default context instead.
|
||||
java.io.InputStream inContext = <%=className%>.class.getClassLoader().getResourceAsStream("<%=jobClassPackageFolder%>/contexts/" + contextStr + ".properties");
|
||||
if (inContext == null) {
|
||||
@@ -662,8 +688,17 @@
|
||||
try {
|
||||
//defaultProps is in order to keep the original context value
|
||||
if(context != null && context.isEmpty()) {
|
||||
defaultProps.load(inContext);
|
||||
context = new ContextProperties(defaultProps);
|
||||
defaultProps.load(inContext);
|
||||
if (inOSGi && jobProperties != null) {
|
||||
java.util.Enumeration<String> keys = jobProperties.keys();
|
||||
while (keys.hasMoreElements()) {
|
||||
String propKey = keys.nextElement();
|
||||
if (defaultProps.containsKey(propKey)) {
|
||||
defaultProps.put(propKey, (String) jobProperties.get(propKey));
|
||||
}
|
||||
}
|
||||
}
|
||||
context = new ContextProperties(defaultProps);
|
||||
}
|
||||
} finally {
|
||||
inContext.close();
|
||||
@@ -733,34 +768,39 @@
|
||||
<%
|
||||
} else if(typeToGenerate.equals("java.util.Date")) {
|
||||
%>
|
||||
try{
|
||||
String context_<%=ctxParam.getName()%>_value = context.getProperty("<%=ctxParam.getName()%>");
|
||||
if (context_<%=ctxParam.getName()%>_value == null){
|
||||
context_<%=ctxParam.getName()%>_value = "";
|
||||
}
|
||||
int context_<%=ctxParam.getName()%>_pos = context_<%=ctxParam.getName()%>_value.indexOf(";");
|
||||
String context_<%=ctxParam.getName()%>_pattern = "yyyy-MM-dd HH:mm:ss";
|
||||
if(context_<%=ctxParam.getName()%>_pos > -1){
|
||||
context_<%=ctxParam.getName()%>_pattern = context_<%=ctxParam.getName()%>_value.substring(0, context_<%=ctxParam.getName()%>_pos);
|
||||
context_<%=ctxParam.getName()%>_value = context_<%=ctxParam.getName()%>_value.substring(context_<%=ctxParam.getName()%>_pos + 1);
|
||||
}
|
||||
try{
|
||||
if (context_<%=ctxParam.getName()%>_value == null){
|
||||
context_<%=ctxParam.getName()%>_value = "";
|
||||
}
|
||||
int context_<%=ctxParam.getName()%>_pos = context_<%=ctxParam.getName()%>_value.indexOf(";");
|
||||
String context_<%=ctxParam.getName()%>_pattern = "yyyy-MM-dd HH:mm:ss";
|
||||
if(context_<%=ctxParam.getName()%>_pos > -1){
|
||||
context_<%=ctxParam.getName()%>_pattern = context_<%=ctxParam.getName()%>_value.substring(0, context_<%=ctxParam.getName()%>_pos);
|
||||
context_<%=ctxParam.getName()%>_value = context_<%=ctxParam.getName()%>_value.substring(context_<%=ctxParam.getName()%>_pos + 1);
|
||||
}
|
||||
|
||||
context.<%=ctxParam.getName()%>=(java.util.Date)(new java.text.SimpleDateFormat(context_<%=ctxParam.getName()%>_pattern).parse(context_<%=ctxParam.getName()%>_value));
|
||||
context.<%=ctxParam.getName()%>=(java.util.Date)(new java.text.SimpleDateFormat(context_<%=ctxParam.getName()%>_pattern).parse(context_<%=ctxParam.getName()%>_value));
|
||||
|
||||
} catch(ParseException e) {
|
||||
} catch(ParseException e) {
|
||||
try { <% /*try to check if date passed as long also*/ %>
|
||||
long context_<%=ctxParam.getName()%>_longValue = Long.valueOf(context_<%=ctxParam.getName()%>_value);
|
||||
context.<%=ctxParam.getName()%> = new java.util.Date(context_<%=ctxParam.getName()%>_longValue);
|
||||
} catch (NumberFormatException cantParseToLongException) {
|
||||
<%
|
||||
if (isLog4jEnabled) {
|
||||
if (isLog4jEnabled) {
|
||||
%>
|
||||
log.warn(String.format("<%=warningMessageFormat %>", "<%=ctxParam.getName() %>", e.getMessage()));
|
||||
log.warn(String.format("<%=warningMessageFormat %>", "<%=ctxParam.getName() %>", "Can't parse date string: " + e.getMessage() + " and long: " + cantParseToLongException.getMessage()));
|
||||
<%
|
||||
} else {
|
||||
} else {
|
||||
%>
|
||||
System.err.println(String.format("<%=warningMessageFormat %>", "<%=ctxParam.getName() %>", e.getMessage()));
|
||||
System.err.println(String.format("<%=warningMessageFormat %>", "<%=ctxParam.getName() %>", "Can't parse date string: " + e.getMessage() + " and long: " + cantParseToLongException.getMessage()));
|
||||
<%
|
||||
}
|
||||
%>
|
||||
context.<%=ctxParam.getName()%>=null;
|
||||
}
|
||||
}
|
||||
%>
|
||||
context.<%=ctxParam.getName()%>=null;
|
||||
}
|
||||
<%
|
||||
} else if(typeToGenerate.equals("Object")||typeToGenerate.equals("String")||typeToGenerate.equals("java.lang.String")) {
|
||||
%>
|
||||
@@ -1177,6 +1217,26 @@ this.globalResumeTicket = true;//to run tPostJob
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
<%
|
||||
}
|
||||
|
||||
//tRESTRequest may appear in microservice, the code may be called before call submit(task) method, so can't shutdown it here
|
||||
if(!tRESTRequestLoopExists && threadPoolSize>0) {
|
||||
%>
|
||||
es.shutdown();
|
||||
<%//shutdownNow should never be executed, only for safe%>
|
||||
try {
|
||||
if(!es.awaitTermination(60, java.util.concurrent.TimeUnit.SECONDS)) {
|
||||
es.shutdownNow();
|
||||
if(!es.awaitTermination(60, java.util.concurrent.TimeUnit.SECONDS)) {
|
||||
|
||||
}
|
||||
}
|
||||
} catch (java.lang.InterruptedException ie) {
|
||||
es.shutdownNow();
|
||||
} catch (java.lang.Exception e) {
|
||||
|
||||
}
|
||||
<%
|
||||
}
|
||||
%>
|
||||
@@ -1185,9 +1245,12 @@ this.globalResumeTicket = true;//to run tPostJob
|
||||
closeJmsConnections();
|
||||
<% } %>
|
||||
|
||||
<% if (exist_tSCP) { %>
|
||||
closeScpConnections();
|
||||
<% } %>
|
||||
<% if (exist_tSCP) {
|
||||
%>
|
||||
closeCloseableConnections(<%=parameterNames%>);
|
||||
<%
|
||||
}
|
||||
%>
|
||||
|
||||
<%
|
||||
if (stats) {
|
||||
@@ -1241,7 +1304,7 @@ if (execStat) {
|
||||
closeJmsConnections();
|
||||
<% } %>
|
||||
<% if(exist_tSCP) { %>
|
||||
closeScpConnections();
|
||||
closeCloseableConnections(<%=parameterNames%>);
|
||||
<% } %>
|
||||
<% if (exist_tSQLDB) { %>
|
||||
closeSqlDbConnections();
|
||||
@@ -1309,22 +1372,17 @@ if (execStat) {
|
||||
<%
|
||||
if(exist_tSCP) {
|
||||
%>
|
||||
private void closeScpConnections() {
|
||||
try {
|
||||
Object obj_conn;
|
||||
<%
|
||||
for (INode scpNode : scpComponentsList) {
|
||||
%>
|
||||
obj_conn = globalMap.remove("conn_<%=scpNode.getUniqueName() %>");
|
||||
if (null != obj_conn) {
|
||||
((ch.ethz.ssh2.Connection) obj_conn).close();
|
||||
private void closeCloseableConnections(String... names) {
|
||||
java.util.Arrays.stream(names).forEach(name-> {
|
||||
try {
|
||||
Object obj_conn = globalMap.remove(name);
|
||||
if(obj_conn != null){
|
||||
((java.io.Closeable)obj_conn).close();
|
||||
}
|
||||
} catch (IOException ioException) {
|
||||
}
|
||||
<%
|
||||
}
|
||||
%>
|
||||
} catch (java.lang.Exception e) {
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
<%
|
||||
}
|
||||
%>
|
||||
@@ -1434,6 +1492,7 @@ if (execStat) {
|
||||
if ("sftp".equals(type)) { %>
|
||||
((com.jcraft.jsch.ChannelSftp) obj_conn).quit();
|
||||
<%} else { %>
|
||||
((org.apache.commons.net.ftp.FTPClient) obj_conn).logout();
|
||||
((org.apache.commons.net.ftp.FTPClient) obj_conn).disconnect();
|
||||
<%}%>
|
||||
}
|
||||
|
||||
@@ -56,11 +56,25 @@ if ((metadatas != null) && (metadatas.size() > 0)) { // metadata
|
||||
// Set up the component definition, and the properties for all types of
|
||||
// components.
|
||||
|
||||
List<? extends IConnection> allInLineJobConns = NodeUtil.getFirstIncomingLineConnectionsOfType(node, "tRESTRequestIn");
|
||||
%>
|
||||
boolean doesNodeBelongToRequest_<%=cid%> = <%= allInLineJobConns.size() %> == 0;
|
||||
@SuppressWarnings("unchecked")
|
||||
java.util.Map<String, Object> restRequest_<%=cid%> = (java.util.Map<String, Object>)globalMap.get("restRequest");
|
||||
String currentTRestRequestOperation_<%=cid%> = (String)(restRequest_<%=cid%> != null ? restRequest_<%=cid%>.get("OPERATION") : null);
|
||||
<%
|
||||
for (IConnection inLineConn : allInLineJobConns) {
|
||||
%>
|
||||
if("<%= inLineConn.getName() %>".equals(currentTRestRequestOperation_<%=cid%>)) {
|
||||
doesNodeBelongToRequest_<%=cid%> = true;
|
||||
}
|
||||
<%
|
||||
}
|
||||
%>
|
||||
|
||||
org.talend.components.api.component.ComponentDefinition def_<%=cid %> =
|
||||
new <%= def.getClass().getName()%>();
|
||||
|
||||
|
||||
org.talend.components.api.component.runtime.Writer writer_<%=cid%> = null;
|
||||
org.talend.components.api.component.runtime.Reader reader_<%=cid%> = null;
|
||||
|
||||
@@ -149,7 +163,7 @@ globalMap.put("TALEND_COMPONENTS_VERSION", "<%=component.getVersion()%>");
|
||||
boolean isParallelize ="true".equalsIgnoreCase(ElementParameterParser.getValue(node, "__PARALLELIZE__"));
|
||||
if (isParallelize) {
|
||||
%>
|
||||
final String buffersSizeKey_<%=cid%> = "buffersSizeKey_<%=cid%>_" + Thread.currentThread().getId();
|
||||
final String buffersSizeKey_<%=cid%> = "buffersSizeKey_<%=cid%>_" + Thread.currentThread().getId();
|
||||
<%
|
||||
}
|
||||
%>
|
||||
@@ -219,9 +233,11 @@ if(componentRuntime_<%=cid%> instanceof org.talend.components.api.component.runt
|
||||
org.talend.components.api.component.runtime.SourceOrSink sourceOrSink_<%=cid%> = null;
|
||||
if(componentRuntime_<%=cid%> instanceof org.talend.components.api.component.runtime.SourceOrSink) {
|
||||
sourceOrSink_<%=cid%> = (org.talend.components.api.component.runtime.SourceOrSink)componentRuntime_<%=cid%>;
|
||||
org.talend.daikon.properties.ValidationResult vr_<%=cid%> = sourceOrSink_<%=cid%>.validate(container_<%=cid%>);
|
||||
if (vr_<%=cid%>.getStatus() == org.talend.daikon.properties.ValidationResult.Result.ERROR ) {
|
||||
throw new RuntimeException(vr_<%=cid%>.getMessage());
|
||||
if (doesNodeBelongToRequest_<%=cid%>) {
|
||||
org.talend.daikon.properties.ValidationResult vr_<%=cid%> = sourceOrSink_<%=cid%>.validate(container_<%=cid%>);
|
||||
if (vr_<%=cid%>.getStatus() == org.talend.daikon.properties.ValidationResult.Result.ERROR ) {
|
||||
throw new RuntimeException(vr_<%=cid%>.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -297,13 +313,13 @@ if (hasOutputOnly || asInputComponent) {
|
||||
|
||||
for (; available_<%=cid%>; available_<%=cid%> = reader_<%=cid%>.advance()) {
|
||||
nb_line_<%=cid %>++;
|
||||
|
||||
|
||||
<%if(hasDataOutput) {%>
|
||||
if (multi_output_is_allowed_<%=cid%>) {
|
||||
<%if(main!=null){%>
|
||||
<%=main.getName()%> = null;
|
||||
<%}%>
|
||||
|
||||
|
||||
<%if(reject!=null){%>
|
||||
<%=reject.getName()%> = null;
|
||||
<%}%>
|
||||
@@ -315,11 +331,11 @@ if (hasOutputOnly || asInputComponent) {
|
||||
<%
|
||||
if (main != null) {
|
||||
%>
|
||||
|
||||
|
||||
if(multi_output_is_allowed_<%=cid%>) {
|
||||
<%=main.getName()%> = new <%=main.getName() %>Struct();
|
||||
}
|
||||
|
||||
|
||||
<%
|
||||
irToRow.generateConvertRecord("data_" + cid, main.getName(), main.getMetadataTable().getListColumns());
|
||||
}
|
||||
@@ -330,7 +346,7 @@ if (hasOutputOnly || asInputComponent) {
|
||||
if (reject!=null) {
|
||||
%>
|
||||
Object data_<%=cid%> = info_<%=cid%>.get("talend_record");
|
||||
|
||||
|
||||
if (multi_output_is_allowed_<%=cid%>) {
|
||||
<%=reject.getName()%> = new <%=reject.getName() %>Struct();
|
||||
}
|
||||
@@ -343,19 +359,19 @@ if (hasOutputOnly || asInputComponent) {
|
||||
}
|
||||
<%
|
||||
Set<String> commonColumns = new HashSet<String>();
|
||||
|
||||
|
||||
for (IMetadataColumn column : columnList) {
|
||||
commonColumns.add(column.getLabel());
|
||||
}
|
||||
|
||||
|
||||
//pass error columns
|
||||
List<IMetadataColumn> rejectColumns = reject.getMetadataTable().getListColumns();
|
||||
for(IMetadataColumn column : rejectColumns) {
|
||||
String columnName = column.getLabel();
|
||||
|
||||
|
||||
// JavaType javaType = JavaTypesManager.getJavaTypeFromId(column.getTalendType());
|
||||
String typeToGenerate = JavaTypesManager.getTypeToGenerate(column.getTalendType(), column.isNullable());
|
||||
|
||||
|
||||
//error columns
|
||||
if(!commonColumns.contains(columnName)) {
|
||||
%>
|
||||
@@ -385,7 +401,7 @@ if (hasOutputOnly || asInputComponent) {
|
||||
<%
|
||||
}
|
||||
%>
|
||||
} // end of catch
|
||||
} // end of catch
|
||||
<%
|
||||
// The for loop around the incoming records from the reader is left open.
|
||||
|
||||
@@ -397,9 +413,13 @@ if (hasOutputOnly || asInputComponent) {
|
||||
org.talend.components.api.component.runtime.Sink sink_<%=cid%> =
|
||||
(org.talend.components.api.component.runtime.Sink)sourceOrSink_<%=cid%>;
|
||||
org.talend.components.api.component.runtime.WriteOperation writeOperation_<%=cid%> = sink_<%=cid%>.createWriteOperation();
|
||||
writeOperation_<%=cid%>.initialize(container_<%=cid%>);
|
||||
if (doesNodeBelongToRequest_<%=cid%>) {
|
||||
writeOperation_<%=cid%>.initialize(container_<%=cid%>);
|
||||
}
|
||||
writer_<%=cid%> = writeOperation_<%=cid%>.createWriter(container_<%=cid%>);
|
||||
writer_<%=cid%>.open("<%=cid%>");
|
||||
if (doesNodeBelongToRequest_<%=cid%>) {
|
||||
writer_<%=cid%>.open("<%=cid%>");
|
||||
}
|
||||
|
||||
resourceMap.put("writer_<%=cid%>", writer_<%=cid%>);
|
||||
} // end of "sourceOrSink_<%=cid%> instanceof ...Sink"
|
||||
@@ -448,7 +468,7 @@ if (hasOutputOnly || asInputComponent) {
|
||||
}
|
||||
}
|
||||
%>
|
||||
|
||||
|
||||
java.lang.Iterable<?> outgoingMainRecordsList_<%=cid%> = new java.util.ArrayList<Object>();
|
||||
java.util.Iterator outgoingMainRecordsIt_<%=cid%> = null;
|
||||
|
||||
|
||||
@@ -120,7 +120,8 @@ if(hasInput){
|
||||
dm_<%=cid%>.getLogicalType(),
|
||||
dm_<%=cid%>.getFormat(),
|
||||
dm_<%=cid%>.getDescription(),
|
||||
dm_<%=cid%>.isNullable());
|
||||
dm_<%=cid%>.isNullable(),
|
||||
dm_<%=cid%>.isKey());
|
||||
}
|
||||
incomingEnforcer_<%=cid%>.createRuntimeSchema();
|
||||
}
|
||||
|
||||
@@ -170,16 +170,8 @@ class IndexedRecordToRowStructGenerator {
|
||||
if (columnName.equals(dynamicColName)) {
|
||||
%>
|
||||
java.util.Map<String, Object> dynamicValue_<%=cid%> = (java.util.Map<String, Object>) <%=codeVarSchemaEnforcer%>.get(<%=i%>);
|
||||
org.apache.avro.Schema dynSchema_<%=cid%> = ((org.talend.codegen.enforcer.OutgoingDynamicSchemaEnforcer) <%=codeVarSchemaEnforcer%>).getDynamicFieldsSchema();
|
||||
for (org.apache.avro.Schema.Field dynamicField_<%=cid%> : dynSchema_<%=cid%>.getFields()){
|
||||
String name = dynamicField_<%=cid%>.name();
|
||||
if("true".equals(dynamicField_<%=cid%>.getProp("ENABLE_SPECIAL_TABLENAME"))){
|
||||
dynamicValue_<%=cid%>.put(dynamicField_<%=cid%>.getProp("talend.field.dbColumnName"), dynamicValue_<%=cid%>.get(name));
|
||||
dynamicValue_<%=cid%>.remove(name);
|
||||
}
|
||||
}
|
||||
for (java.util.Map.Entry<String, Object> dynamicValueEntry_<%=cid%> : dynamicValue_<%=cid%>.entrySet()) {
|
||||
<%=codeVarDynamic%>.setColumnValue(<%=codeVarDynamic%>.getIndex(dynamicValueEntry_<%=cid%>.getKey()), dynamicValueEntry_<%=cid%>.getValue());
|
||||
<%=codeVarDynamic%>.addColumnValue(dynamicValueEntry_<%=cid%>.getValue());
|
||||
}
|
||||
<%=codeVarRowStruct%>.<%=dynamicColName%> = <%=codeVarDynamic%>;
|
||||
<%
|
||||
|
||||
@@ -73,6 +73,9 @@ import pigudf.<%=routine%>;
|
||||
import routines.<%=routine%>;
|
||||
<% }
|
||||
}%>
|
||||
<%for (String codesJar : CodeGeneratorRoutine.getRequiredCodesJarName(process)) {%>
|
||||
import <%=codesJar%>;
|
||||
<%}%>
|
||||
import routines.system.*;
|
||||
import routines.system.api.*;
|
||||
import java.text.ParseException;
|
||||
@@ -382,11 +385,101 @@ public <%=JavaTypesManager.getTypeToGenerate(ctxParam.getType(),true)%> get<%=Ch
|
||||
|
||||
<%
|
||||
INode jobCatcherNode = null;
|
||||
|
||||
//one matched component or part, one thread
|
||||
//why not computed by cpu or resource : please image this case :
|
||||
//loop==>(input==>(twritexmlfield A)==>(twritexmlfield B)==>(twritexmlfield C)==>output), dead lock as cycle dependency and only one thead in thread pool
|
||||
//maybe newCachedThreadPool is a better idea, but that have risk for creating more threads, then more memory for TDI-47230
|
||||
//why not generate thread pool object in subprocess scope :
|
||||
// 1: major reason : difficult to control the var scope, somewhere can't access it, then compiler issue
|
||||
// 2: we may need this thread pool for bigger scope, not only for twritexmlfield/twritejsonfield in future
|
||||
// 3: we don't suppose this thread pool cost big resource after all tasks done, so we can shutdown it more later,
|
||||
// for example, most time, user will use less than 3 twritexmlfield in one job, then 3 threads thread pool, we can close them in job finish code part,
|
||||
// not a big cost to keep that. And of course, we best to start&clean it in subprocess finish, but that's risk of 1 above.
|
||||
int threadPoolSize = 0;
|
||||
boolean tHMapExists = false;
|
||||
boolean tHMapOutExists = false;
|
||||
boolean tRESTRequestLoopExists = false;
|
||||
for (INode nodeInProcess : processNodes) {
|
||||
String componentName = nodeInProcess.getComponent().getName();
|
||||
if("tJobStructureCatcher".equals(componentName)) {
|
||||
|
||||
if(jobCatcherNode==null && "tJobStructureCatcher".equals(componentName)) {
|
||||
jobCatcherNode = nodeInProcess;
|
||||
break;
|
||||
continue;
|
||||
}
|
||||
|
||||
if(!nodeInProcess.isActivate()) continue;
|
||||
|
||||
if("tHMap".equals(componentName)) {
|
||||
tHMapExists = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if("tHMapOut".equals(componentName)) {
|
||||
tHMapOutExists = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if("tRESTRequestLoop".equals(componentName)) {
|
||||
tRESTRequestLoopExists = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if("tWriteXMLFieldOut".equals(componentName)) {
|
||||
IConnection nextMergeConn = NodeUtil.getNextMergeConnection(nodeInProcess);
|
||||
if(nextMergeConn == null || nextMergeConn.getInputId()==1){
|
||||
threadPoolSize++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(threadPoolSize>0) {
|
||||
if(tRESTRequestLoopExists) {//miscroservice
|
||||
%>
|
||||
private class DaemonThreadFactory implements java.util.concurrent.ThreadFactory {
|
||||
java.util.concurrent.ThreadFactory factory = java.util.concurrent.Executors.defaultThreadFactory();
|
||||
|
||||
public java.lang.Thread newThread(java.lang.Runnable r) {
|
||||
java.lang.Thread t = factory.newThread(r);
|
||||
t.setDaemon(true);
|
||||
return t;
|
||||
}
|
||||
}
|
||||
<%
|
||||
}
|
||||
|
||||
if(tHMapExists || tHMapOutExists) {
|
||||
%>
|
||||
private final java.util.concurrent.ExecutorService es = java.util.concurrent.Executors.newFixedThreadPool(<%=threadPoolSize%> <%if(tRESTRequestLoopExists) {%>,new DaemonThreadFactory()<%}%>);
|
||||
<%
|
||||
} else {
|
||||
%>
|
||||
private final java.util.concurrent.ExecutorService es = java.util.concurrent.Executors.newCachedThreadPool(<%if(tRESTRequestLoopExists) {%>new DaemonThreadFactory()<%}%>);
|
||||
<%
|
||||
}
|
||||
|
||||
if(tRESTRequestLoopExists) {//miscroservice
|
||||
%>
|
||||
{
|
||||
java.lang.Runtime.getRuntime().addShutdownHook(new java.lang.Thread() {
|
||||
public void run() {
|
||||
es.shutdown();
|
||||
try {
|
||||
if(!es.awaitTermination(60, java.util.concurrent.TimeUnit.SECONDS)) {
|
||||
es.shutdownNow();
|
||||
if(!es.awaitTermination(60, java.util.concurrent.TimeUnit.SECONDS)) {
|
||||
|
||||
}
|
||||
}
|
||||
} catch (java.lang.InterruptedException ie) {
|
||||
es.shutdownNow();
|
||||
} catch (java.lang.Exception e) {
|
||||
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
<%
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
org.talend.core.model.process.IHashConfiguration
|
||||
org.talend.core.model.process.IHashableColumn
|
||||
org.talend.core.model.utils.NodeUtil
|
||||
org.talend.core.model.utils.TalendTextUtils
|
||||
"
|
||||
class="SubProcessHeader"
|
||||
skeleton="subprocess_header_java.skeleton"
|
||||
@@ -183,6 +184,48 @@ public static class <%=conn.getName() %>Struct<%=templateOrigin %> implements ro
|
||||
public <%= typeToGenerate %> get<%=column.getLabel().substring(0, 1).toUpperCase()%><%=column.getLabel().substring(1)%> () {
|
||||
return this.<%=column.getLabel()%>;
|
||||
}
|
||||
|
||||
public Boolean <%=column.getLabel()%>IsNullable(){
|
||||
return <%=column.isNullable()%>;
|
||||
}
|
||||
public Boolean <%=column.getLabel()%>IsKey(){
|
||||
return <%=column.isKey()%>;
|
||||
}
|
||||
public Integer <%=column.getLabel()%>Length(){
|
||||
return <%=column.getLength()%>;
|
||||
}
|
||||
public Integer <%=column.getLabel()%>Precision(){
|
||||
return <%=column.getPrecision()%>;
|
||||
}
|
||||
public String <%=column.getLabel()%>Default(){
|
||||
<% if (column.getDefault() == null) { %>
|
||||
return null;
|
||||
<% } else { %>
|
||||
return "<%=TalendTextUtils.escapeJavaText(TalendTextUtils.removeQuotes(column.getDefault()))%>";
|
||||
<% } %>
|
||||
}
|
||||
public String <%=column.getLabel()%>Comment(){
|
||||
<% if (column.getComment() == null) { %>
|
||||
return null;
|
||||
<% } else { %>
|
||||
return "<%=TalendTextUtils.escapeJavaText(TalendTextUtils.removeQuotes(column.getComment()))%>";
|
||||
<% } %>
|
||||
}
|
||||
public String <%=column.getLabel()%>Pattern(){
|
||||
<% if (column.getPattern() == null) { %>
|
||||
return null;
|
||||
<% } else { %>
|
||||
return "<%=TalendTextUtils.escapeJavaText(TalendTextUtils.removeQuotes(column.getPattern()))%>";
|
||||
<% } %>
|
||||
}
|
||||
public String <%=column.getLabel()%>OriginalDbColumnName(){
|
||||
<% if (column.getOriginalDbColumnName() == null) { %>
|
||||
return null;
|
||||
<% } else { %>
|
||||
return "<%=TalendTextUtils.escapeJavaText(TalendTextUtils.removeQuotes(column.getOriginalDbColumnName()))%>";
|
||||
<% } %>
|
||||
}
|
||||
|
||||
<%
|
||||
if((conn.getLineStyle() == EConnectionType.FLOW_REF) && conn.getTarget().getUniqueName().startsWith("tXMLMap") && "id_Document".equals(javaType.getId())) {
|
||||
%>
|
||||
|
||||
@@ -10,6 +10,7 @@ CodeGenerator.getGraphicalNode2=------process.getGeneratingNodes()------
|
||||
CodeGenerator.JET.TimeOut=JET initialisation Time Out
|
||||
CodeGenerator.newLine=\n\n\n\n
|
||||
CodeGenerator.Node.NotFound=Node not found in current process
|
||||
CodeGenerator.Components.NotFound={0}: Component is missing: {1}; use -D{2}=false in your studio or commandline to skip this check, and a warning message will be logged.
|
||||
JavaRoutineSynchronizer.UnsupportedOperation.Exception1=method not implemented: org.talend.designer.codegen.JavaRoutineSynchronizer line:49
|
||||
JavaRoutineSynchronizer.UnsupportedOperation.Exception2=method not implemented: org.talend.designer.codegen.JavaRoutineSynchronizer line:58
|
||||
JetSkeletonManager.unableLoad=unable to load skeleton update cache file
|
||||
|
||||
@@ -66,6 +66,7 @@ import org.talend.designer.codegen.model.CodeGeneratorEmittersPoolFactory;
|
||||
import org.talend.designer.codegen.model.CodeGeneratorInternalTemplatesFactoryProvider;
|
||||
import org.talend.designer.codegen.proxy.JetProxy;
|
||||
import org.talend.designer.core.generic.model.Component;
|
||||
import org.talend.designer.core.model.components.DummyComponent;
|
||||
import org.talend.designer.runprocess.ProcessorUtilities;
|
||||
|
||||
/**
|
||||
@@ -845,6 +846,25 @@ public class CodeGenerator implements ICodeGenerator {
|
||||
IComponentFileNaming componentFileNaming = ComponentsFactoryProvider.getFileNamingInstance();
|
||||
|
||||
IComponent component = node.getComponent();
|
||||
if (component instanceof DummyComponent) {
|
||||
if (((DummyComponent) component).isMissingComponent()) {
|
||||
String processName = "";
|
||||
try {
|
||||
IProcess proc = node.getProcess();
|
||||
processName = proc.getName() + " " + proc.getVersion();
|
||||
} catch (Exception e) {
|
||||
ExceptionHandler.process(e);
|
||||
}
|
||||
if (IProcess.ERR_ON_COMPONENT_MISSING) {
|
||||
throw new CodeGeneratorException(Messages.getString("CodeGenerator.Components.NotFound", processName,
|
||||
component.getName(), IProcess.PROP_ERR_ON_COMPONENT_MISSING));
|
||||
}
|
||||
if (ECodePart.BEGIN.equals(part)) {
|
||||
log.warn(Messages.getString("CodeGenerator.Components.NotFound", processName, component.getName(),
|
||||
IProcess.PROP_ERR_ON_COMPONENT_MISSING));
|
||||
}
|
||||
}
|
||||
}
|
||||
// some code unification to handle all component types the same way.
|
||||
String templateURI = component.getTemplateFolder() + TemplateUtil.DIR_SEP
|
||||
+ componentFileNaming.getJetFileName(component.getTemplateNamePrefix(), language.getExtension(), part);
|
||||
|
||||
@@ -69,6 +69,15 @@ public class JavaRoutineSynchronizer extends AbstractRoutineSynchronizer {
|
||||
syncRoutineItems(getRoutines(true), true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void syncAllInnerCodes() throws SystemException {
|
||||
syncInnerCodeItems(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void syncAllInnerCodesForLogOn() throws SystemException {
|
||||
syncInnerCodeItems(true);
|
||||
}
|
||||
|
||||
private void syncRoutineItems(Collection<RoutineItem> routineObjects, boolean forceUpdate) throws SystemException {
|
||||
for (RoutineItem routineItem : routineObjects) {
|
||||
|
||||
@@ -8,6 +8,7 @@ Require-Bundle: org.eclipse.core.runtime,
|
||||
org.eclipse.ui,
|
||||
org.apache.log4j,
|
||||
org.apache.commons.collections,
|
||||
org.apache.commons.discovery,
|
||||
org.apache.commons.logging,
|
||||
org.apache.commons.beanutils,
|
||||
org.apache.commons.io,
|
||||
@@ -25,7 +26,6 @@ Require-Bundle: org.eclipse.core.runtime,
|
||||
org.talend.repository,
|
||||
org.talend.core.repository,
|
||||
org.talend.updates.runtime,
|
||||
org.apache.axis,
|
||||
org.eclipse.ui.intro,
|
||||
org.eclipse.ui.forms,
|
||||
org.eclipse.jface.text
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
// ============================================================================
|
||||
//
|
||||
// Copyright (C) 2006-2019 Talend Inc. - www.talend.com
|
||||
//
|
||||
// This source code is available under agreement available at
|
||||
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
|
||||
//
|
||||
// You should have received a copy of the agreement
|
||||
// along with this program; if not, write to Talend SA
|
||||
// 9 rue Pages 92150 Suresnes, France
|
||||
//
|
||||
// ============================================================================
|
||||
package org.talend.designer.components.exchange.proxy;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
|
||||
/**
|
||||
*
|
||||
* DOC hcyi class global comment. Detailled comment
|
||||
*/
|
||||
public class DefaultHTTPSTransportClientProperties extends DefaultHTTPTransportClientProperties {
|
||||
|
||||
/**
|
||||
* @see org.apache.axis.components.net.TransportClientProperties#getProxyHost()
|
||||
*/
|
||||
@Override
|
||||
public String getProxyHost() {
|
||||
return StringUtils.trimToEmpty(System.getProperty("https.proxyHost")); //$NON-NLS-1$
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.apache.axis.components.net.TransportClientProperties#getNonProxyHosts()
|
||||
*/
|
||||
@Override
|
||||
public String getNonProxyHosts() {
|
||||
|
||||
return StringUtils.trimToEmpty(System.getProperty("https.nonProxyHosts")); //$NON-NLS-1$
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.apache.axis.components.net.TransportClientProperties#getPort()
|
||||
*/
|
||||
@Override
|
||||
public String getProxyPort() {
|
||||
|
||||
return StringUtils.trimToEmpty(System.getProperty("https.proxyPort")); //$NON-NLS-1$
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.apache.axis.components.net.TransportClientProperties#getUser()
|
||||
*/
|
||||
@Override
|
||||
public String getProxyUser() {
|
||||
|
||||
return StringUtils.trimToEmpty(System.getProperty("https.proxyUser")); //$NON-NLS-1$
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.apache.axis.components.net.TransportClientProperties#getPassword()
|
||||
*/
|
||||
@Override
|
||||
public String getProxyPassword() {
|
||||
|
||||
return StringUtils.trimToEmpty(System.getProperty("https.proxyPassword")); //$NON-NLS-1$
|
||||
}
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
// ============================================================================
|
||||
//
|
||||
// Copyright (C) 2006-2019 Talend Inc. - www.talend.com
|
||||
//
|
||||
// This source code is available under agreement available at
|
||||
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
|
||||
//
|
||||
// You should have received a copy of the agreement
|
||||
// along with this program; if not, write to Talend SA
|
||||
// 9 rue Pages 92150 Suresnes, France
|
||||
//
|
||||
// ============================================================================
|
||||
package org.talend.designer.components.exchange.proxy;
|
||||
|
||||
import org.apache.axis.components.net.TransportClientProperties;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
|
||||
/**
|
||||
*
|
||||
* DOC hcyi class global comment. Detailled comment
|
||||
*/
|
||||
public class DefaultHTTPTransportClientProperties implements TransportClientProperties {
|
||||
|
||||
/**
|
||||
* @see org.apache.axis.components.net.TransportClientProperties#getProxyHost()
|
||||
*/
|
||||
public String getProxyHost() {
|
||||
return StringUtils.trimToEmpty(System.getProperty("http.proxyHost")); //$NON-NLS-1$
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.apache.axis.components.net.TransportClientProperties#getNonProxyHosts()
|
||||
*/
|
||||
public String getNonProxyHosts() {
|
||||
return StringUtils.trimToEmpty(System.getProperty("http.nonProxyHosts")); //$NON-NLS-1$
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.apache.axis.components.net.TransportClientProperties#getPort()
|
||||
*/
|
||||
public String getProxyPort() {
|
||||
return StringUtils.trimToEmpty(System.getProperty("http.proxyPort")); //$NON-NLS-1$
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.apache.axis.components.net.TransportClientProperties#getProxyUser()
|
||||
*/
|
||||
public String getProxyUser() {
|
||||
return StringUtils.trimToEmpty(System.getProperty("http.proxyUser")); //$NON-NLS-1$
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.apache.axis.components.net.TransportClientProperties#getProxyPassword()
|
||||
*/
|
||||
public String getProxyPassword() {
|
||||
return StringUtils.trimToEmpty(System.getProperty("http.proxyPassword")); //$NON-NLS-1$
|
||||
}
|
||||
}
|
||||
@@ -25,11 +25,9 @@ import java.util.Map;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
|
||||
import org.apache.axis.components.net.TransportClientProperties;
|
||||
import org.apache.axis.components.net.TransportClientPropertiesFactory;
|
||||
import org.apache.commons.beanutils.BeanUtils;
|
||||
import org.apache.commons.collections.map.MultiValueMap;
|
||||
import org.apache.commons.discovery.tools.ManagedProperties;
|
||||
import org.apache.commons.httpclient.HostConfiguration;
|
||||
import org.apache.commons.httpclient.HttpClient;
|
||||
import org.apache.commons.httpclient.NameValuePair;
|
||||
@@ -151,14 +149,17 @@ public class ExchangeUtils {
|
||||
public static String sendGetRequest(String urlAddress) throws Exception {
|
||||
HttpClient httpclient = new HttpClient();
|
||||
GetMethod getMethod = new GetMethod(urlAddress);
|
||||
TransportClientProperties tcp = TransportClientPropertiesFactory.create("http");
|
||||
if (tcp.getProxyHost().length() != 0) {
|
||||
String proxyUser = ManagedProperties.getProperty("http.proxyUser");
|
||||
String proxyPassword = ManagedProperties.getProperty("http.proxyPassword");
|
||||
String proxyHost = ManagedProperties.getProperty("http.proxyHost");
|
||||
proxyHost = proxyHost != null ? proxyHost : "";
|
||||
String proxyPort = ManagedProperties.getProperty("http.proxyPort");
|
||||
if (proxyHost.length() != 0) {
|
||||
UsernamePasswordCredentials creds = new UsernamePasswordCredentials(
|
||||
tcp.getProxyUser() != null ? tcp.getProxyUser() : "",
|
||||
tcp.getProxyPassword() != null ? tcp.getProxyUser() : "");
|
||||
proxyUser != null ? proxyUser : "", proxyPassword != null ? proxyPassword : "");
|
||||
httpclient.getState().setProxyCredentials(AuthScope.ANY, creds);
|
||||
HostConfiguration hcf = new HostConfiguration();
|
||||
hcf.setProxy(tcp.getProxyHost(), Integer.parseInt(tcp.getProxyPort()));
|
||||
hcf.setProxy(proxyHost, Integer.parseInt(proxyPort));
|
||||
httpclient.executeMethod(hcf, getMethod);
|
||||
} else {
|
||||
httpclient.executeMethod(getMethod);
|
||||
|
||||
@@ -42,7 +42,18 @@
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<version>1.7.25</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.reload4j</groupId>
|
||||
<artifactId>reload4j</artifactId>
|
||||
<version>1.2.19</version>
|
||||
</dependency>
|
||||
<!-- Spring 3 dependencies -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.talend.libraries</groupId>
|
||||
<artifactId>checkArchive-1.1-20190917</artifactId>
|
||||
<version>6.0.0</version>
|
||||
<name>checkArchive</name>
|
||||
<description>Dependence for tFileArchive and tFileUnAchive</description>
|
||||
<url>http://maven.apache.org</url>
|
||||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
|
||||
<java.source.version>1.7</java.source.version>
|
||||
</properties>
|
||||
|
||||
<distributionManagement>
|
||||
<snapshotRepository>
|
||||
<id>talend_nexus_deployment</id>
|
||||
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceSnapshot/</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
</snapshotRepository>
|
||||
<repository>
|
||||
<id>talend_nexus_deployment</id>
|
||||
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceRelease/</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
</releases>
|
||||
</repository>
|
||||
</distributionManagement>
|
||||
|
||||
<dependencies>
|
||||
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-compress -->
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<version>1.19</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>src/main/java</directory>
|
||||
</resource>
|
||||
</resources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>2.3.2</version>
|
||||
<configuration>
|
||||
<source>${java.source.version}</source>
|
||||
<target>${java.source.version}</target>
|
||||
<showDeprecation>true</showDeprecation>
|
||||
<showWarnings>true</showWarnings>
|
||||
<compilerArgument>-XDignore.symbol.file</compilerArgument>
|
||||
<fork>true</fork>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
@@ -4,7 +4,7 @@
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.talend.components</groupId>
|
||||
<artifactId>filecopy</artifactId>
|
||||
<version>2.0.1</version>
|
||||
<version>2.0.3</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>talend-copy</name>
|
||||
@@ -14,6 +14,7 @@
|
||||
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
|
||||
<java.source.version>1.8</java.source.version>
|
||||
<junit5.version>5.4.2</junit5.version>
|
||||
<slf4j.version>1.7.28</slf4j.version>
|
||||
</properties>
|
||||
|
||||
<distributionManagement>
|
||||
@@ -52,7 +53,12 @@
|
||||
<version>${junit5.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>${slf4j.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
|
||||
@@ -20,11 +20,16 @@ import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.nio.file.attribute.FileTime;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* DOC Administrator class global comment. Detailled comment
|
||||
*/
|
||||
public class FileCopy {
|
||||
|
||||
static Logger logger = LoggerFactory.getLogger(Object.class);
|
||||
|
||||
/** Private constructor, only static methods */
|
||||
private FileCopy() {
|
||||
}
|
||||
@@ -37,19 +42,57 @@ public class FileCopy {
|
||||
* @param delSrc : true if delete source.
|
||||
* @throws IOException : if IO pb.
|
||||
*/
|
||||
public static void copyFile(String srcFileName, String desFileName, boolean delSrc) throws IOException {
|
||||
public static void copyFile(String srcFileName, String desFileName, boolean delSrc, boolean keepModified)
|
||||
throws IOException {
|
||||
final Path source = Paths.get(srcFileName);
|
||||
final Path destination = Paths.get(desFileName);
|
||||
|
||||
FileTime lastModifiedTime = null;
|
||||
try {
|
||||
lastModifiedTime = Files.getLastModifiedTime(source);
|
||||
} catch (IOException e) {
|
||||
logger.warn(e.getLocalizedMessage());
|
||||
}
|
||||
if (delSrc) {
|
||||
// move : more efficient if in same FS and mustr delete existing file.
|
||||
FileTime lastModifiedTime = Files.getLastModifiedTime(source);
|
||||
Files.move(source, destination, StandardCopyOption.REPLACE_EXISTING);
|
||||
Files.setLastModifiedTime(destination,lastModifiedTime);
|
||||
} else {
|
||||
Files.copy(source, destination, StandardCopyOption.REPLACE_EXISTING);
|
||||
Files.setLastModifiedTime(destination,Files.getLastModifiedTime(source));
|
||||
}
|
||||
if(keepModified){
|
||||
try {
|
||||
Files.setLastModifiedTime(destination,lastModifiedTime);
|
||||
} catch (IOException e) {
|
||||
logger.warn(e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void copyFile(String srcFileName, String desFileName, boolean delSrc ) throws IOException {
|
||||
copyFile(srcFileName,desFileName,delSrc,true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Force Copy and Delete files.
|
||||
*
|
||||
* @param srcFileName : file name for source file.
|
||||
* @param desFileName : file name for destination file.
|
||||
* @throws IOException : if IO pb.
|
||||
*/
|
||||
public static void forceCopyAndDelete(String srcFileName, String desFileName, boolean keepModified) throws IOException {
|
||||
final Path source = Paths.get(srcFileName);
|
||||
final Path destination = Paths.get(desFileName);
|
||||
final long lastModifiedTime = new File(srcFileName).lastModified();
|
||||
|
||||
Files.copy(source, destination, StandardCopyOption.REPLACE_EXISTING);
|
||||
Files.delete(source);
|
||||
if(keepModified){
|
||||
destination.toFile().setLastModified(lastModifiedTime);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static void forceCopyAndDelete(String srcFileName, String desFileName) throws IOException {
|
||||
forceCopyAndDelete(srcFileName,desFileName,true);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -100,6 +100,26 @@ class FileCopyTest {
|
||||
Assertions.assertEquals(referenceSize, copy.length(), "Size error");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testForceCopyWithDelete() throws Exception {
|
||||
final URL repCopy = Thread.currentThread().getContextClassLoader().getResource("copy");
|
||||
|
||||
File file = this.buildFile("fileToDelete.txt", 10L * 1024L);
|
||||
file.deleteOnExit();
|
||||
File copy = new File(repCopy.getPath(), "fileToDelete.txt");
|
||||
long referenceSize = file.length();
|
||||
if (!copy.exists()) {
|
||||
copy.createNewFile();
|
||||
}
|
||||
copy.deleteOnExit();
|
||||
|
||||
FileCopy.forceCopyAndDelete(file.getPath(), copy.getPath());
|
||||
|
||||
Assertions.assertFalse(file.exists(), "file not delete");
|
||||
Assertions.assertTrue(copy.exists(), "small file : original file deleted");
|
||||
Assertions.assertEquals(referenceSize, copy.length(), "Size error");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testLastModifiedTime() throws Exception {
|
||||
final URL repCopy = Thread.currentThread().getContextClassLoader().getResource("copy");
|
||||
@@ -143,4 +163,22 @@ class FileCopyTest {
|
||||
|
||||
return generatedFile;
|
||||
}
|
||||
|
||||
@Test
|
||||
void testKeepLastModifiedTime() throws Exception {
|
||||
final URL repCopy = Thread.currentThread().getContextClassLoader().getResource("copy");
|
||||
|
||||
File file = this.buildFile("fileLMT.txt", 10L * 1024L);
|
||||
file.deleteOnExit();
|
||||
long referencceTime = 324723894L;
|
||||
file.setLastModified(referencceTime);
|
||||
|
||||
File copy = new File(repCopy.getPath(), "fileLMTDestination.txt");
|
||||
if (copy.exists()) {
|
||||
copy.delete();
|
||||
}
|
||||
copy.deleteOnExit();
|
||||
FileCopy.copyFile(file.getPath(), copy.getPath(), true,true);
|
||||
Assertions.assertEquals(referencceTime, copy.lastModified(), "modified time is not idential");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -230,9 +230,9 @@
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
<version>1.2.14</version>
|
||||
<groupId>ch.qos.reload4j</groupId>
|
||||
<artifactId>reload4j</artifactId>
|
||||
<version>1.2.19</version>
|
||||
<scope>runtime</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<cxf.version>3.1.2</cxf.version>
|
||||
<cxf.version>3.4.7</cxf.version>
|
||||
</properties>
|
||||
|
||||
<build>
|
||||
@@ -77,8 +77,8 @@
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>2.5.1</version>
|
||||
<configuration>
|
||||
<source>1.7</source>
|
||||
<target>1.7</target>
|
||||
<source>1.8</source>
|
||||
<target>1.8</target>
|
||||
<encoding>UTF-8</encoding>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -22,6 +22,7 @@ import javax.xml.bind.annotation.XmlType;
|
||||
* <element name="description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
|
||||
* <element name="name" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
|
||||
* <element name="processSteps" type="{http://www.talend.com/mdm}WSTransformerProcessStep" maxOccurs="unbounded" minOccurs="0"/>
|
||||
* <element name="withAdminPermissions" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/>
|
||||
* </sequence>
|
||||
* </restriction>
|
||||
* </complexContent>
|
||||
@@ -34,7 +35,8 @@ import javax.xml.bind.annotation.XmlType;
|
||||
@XmlType(name = "WSTransformerV2", propOrder = {
|
||||
"description",
|
||||
"name",
|
||||
"processSteps"
|
||||
"processSteps",
|
||||
"withAdminPermissions"
|
||||
})
|
||||
public class WSTransformerV2 {
|
||||
|
||||
@@ -42,6 +44,7 @@ public class WSTransformerV2 {
|
||||
protected String name;
|
||||
@XmlElement(nillable = true)
|
||||
protected List<WSTransformerProcessStep> processSteps;
|
||||
protected Boolean withAdminPermissions;
|
||||
|
||||
/**
|
||||
* Default no-arg constructor
|
||||
@@ -55,10 +58,11 @@ public class WSTransformerV2 {
|
||||
* Fully-initialising value constructor
|
||||
*
|
||||
*/
|
||||
public WSTransformerV2(final String description, final String name, final List<WSTransformerProcessStep> processSteps) {
|
||||
public WSTransformerV2(final String description, final String name, final List<WSTransformerProcessStep> processSteps, final Boolean withAdminPermissions) {
|
||||
this.description = description;
|
||||
this.name = name;
|
||||
this.processSteps = processSteps;
|
||||
this.withAdminPermissions = withAdminPermissions;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -138,4 +142,28 @@ public class WSTransformerV2 {
|
||||
return this.processSteps;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the value of the withAdminPermissions property.
|
||||
*
|
||||
* @return
|
||||
* possible object is
|
||||
* {@link Boolean }
|
||||
*
|
||||
*/
|
||||
public Boolean isWithAdminPermissions() {
|
||||
return withAdminPermissions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of the withAdminPermissions property.
|
||||
*
|
||||
* @param value
|
||||
* allowed object is
|
||||
* {@link Boolean }
|
||||
*
|
||||
*/
|
||||
public void setWithAdminPermissions(Boolean value) {
|
||||
this.withAdminPermissions = value;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.talend</groupId>
|
||||
<artifactId>talendMQRFH2</artifactId>
|
||||
<version>1.0.1-20190206</version>
|
||||
<version>1.1.0-20220307</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<properties>
|
||||
@@ -36,20 +36,10 @@
|
||||
</distributionManagement>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.ibm.mq</groupId>
|
||||
<artifactId>com.ibm.mq</artifactId>
|
||||
<version>8.0.0.9</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.ibm.mq</groupId>
|
||||
<artifactId>com.ibm.mqjms</artifactId>
|
||||
<version>8.0.0.9</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.ibm.mq</groupId>
|
||||
<artifactId>com.ibm.mq.allclient</artifactId>
|
||||
<version>8.0.0.9</version>
|
||||
<version>9.2.4.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.talend.libraries</groupId>
|
||||
@@ -67,9 +57,9 @@
|
||||
<version>6.0.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
<version>1.2.17</version>
|
||||
<groupId>ch.qos.reload4j</groupId>
|
||||
<artifactId>reload4j</artifactId>
|
||||
<version>1.2.19</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
@@ -113,9 +113,9 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
<version>1.2.17</version>
|
||||
<groupId>ch.qos.reload4j</groupId>
|
||||
<artifactId>reload4j</artifactId>
|
||||
<version>1.2.19</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- JUnit -->
|
||||
|
||||
@@ -63,9 +63,9 @@
|
||||
<version>4.1.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
<version>1.2.17</version>
|
||||
<groupId>ch.qos.reload4j</groupId>
|
||||
<artifactId>reload4j</artifactId>
|
||||
<version>1.2.19</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
|
||||
@@ -7,21 +7,21 @@
|
||||
<groupId>org.talend.libraries</groupId>
|
||||
<artifactId>talend-codegen-utils</artifactId>
|
||||
<!-- release for revert version of library -->
|
||||
<version>0.28.0</version>
|
||||
<version>0.31.0</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<properties>
|
||||
<avro.version>1.8.0</avro.version>
|
||||
<components.version>0.25.0-SNAPSHOT</components.version>
|
||||
<daikon.version>0.26.0-SNAPSHOT</daikon.version>
|
||||
<components.version>0.30.0</components.version>
|
||||
<daikon.version>0.31.11</daikon.version>
|
||||
<hamcrest.version>1.3</hamcrest.version>
|
||||
<junit.version>4.12</junit.version>
|
||||
<java-formatter.plugin.version>0.1.0</java-formatter.plugin.version>
|
||||
<formatter.plugin.version>1.6.0-SNAPSHOT</formatter.plugin.version>
|
||||
<mockito.version>2.2.15</mockito.version>
|
||||
<jacoco.plugin.version>0.7.8</jacoco.plugin.version>
|
||||
<maven.compiler.source>1.7</maven.compiler.source>
|
||||
<maven.compiler.target>1.7</maven.compiler.target>
|
||||
<maven.compiler.source>1.8</maven.compiler.source>
|
||||
<maven.compiler.target>1.8</maven.compiler.target>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
|
||||
</properties>
|
||||
|
||||
@@ -24,8 +24,10 @@ import java.time.temporal.ChronoUnit;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import org.apache.avro.Schema;
|
||||
@@ -33,9 +35,11 @@ import org.apache.avro.Schema.Field;
|
||||
import org.apache.avro.SchemaBuilder;
|
||||
import org.apache.avro.generic.GenericData;
|
||||
import org.apache.avro.generic.IndexedRecord;
|
||||
import org.apache.avro.SchemaParseException;
|
||||
import org.talend.codegen.DiSchemaConstants;
|
||||
import org.talend.daikon.avro.AvroUtils;
|
||||
import org.talend.daikon.avro.LogicalTypeUtils;
|
||||
import org.talend.daikon.avro.NameUtil;
|
||||
import org.talend.daikon.avro.SchemaConstants;
|
||||
|
||||
/**
|
||||
@@ -133,6 +137,7 @@ public class IncomingSchemaEnforcer {
|
||||
}
|
||||
}
|
||||
|
||||
//TODO remove this method as no place use it now in javajet
|
||||
/**
|
||||
* Take all of the parameters from the dynamic metadata and adapt it to a field for the runtime Schema.
|
||||
*
|
||||
@@ -144,6 +149,62 @@ public class IncomingSchemaEnforcer {
|
||||
addDynamicField(name, type, null, format, description, isNullable);
|
||||
}
|
||||
|
||||
private Set<String> existNames;
|
||||
|
||||
private Map<String, String> unvalidName2ValidName;
|
||||
|
||||
private int index = 0;
|
||||
|
||||
/**
|
||||
* Recreates dynamic field from parameters retrieved from DI dynamic metadata
|
||||
*
|
||||
* @param name dynamic field name
|
||||
* @param diType di column type
|
||||
* @param logicalType dynamic field logical type; could be null
|
||||
* @param fieldPattern dynamic field date format
|
||||
* @param description dynamic field description
|
||||
* @param isNullable defines whether dynamic field may contain <code>null</code> value
|
||||
* @param isKey defines whether dynamic field is key field
|
||||
*/
|
||||
public void addDynamicField(String name, String diType, String logicalType, String fieldPattern, String description,
|
||||
boolean isNullable, boolean isKey) {
|
||||
if (!needsInitDynamicColumns())
|
||||
return;
|
||||
Schema fieldSchema = diToAvro(diType, logicalType);
|
||||
|
||||
if (isNullable) {
|
||||
fieldSchema = SchemaBuilder.nullable().type(fieldSchema);
|
||||
}
|
||||
|
||||
Schema.Field field;
|
||||
try {
|
||||
field = new Schema.Field(name, fieldSchema, description, (Object) null);
|
||||
} catch (SchemaParseException e) {
|
||||
//if the name contains special char which can't pass avro name check like $ and #,
|
||||
//but uniode like Japanese which can pass too though that is not expected
|
||||
if (existNames == null) {
|
||||
existNames = new HashSet<>();
|
||||
unvalidName2ValidName = new HashMap<>();
|
||||
}
|
||||
|
||||
String validName = NameUtil.correct(name, index++, existNames);
|
||||
existNames.add(validName);
|
||||
unvalidName2ValidName.put(name, validName);
|
||||
|
||||
field = new Schema.Field(validName, fieldSchema, description, (Object) null);
|
||||
field.addProp(SchemaConstants.TALEND_COLUMN_DB_COLUMN_NAME, name);
|
||||
}
|
||||
|
||||
// Set pattern for date type
|
||||
if ("id_Date".equals(diType) && fieldPattern != null) {
|
||||
field.addProp(SchemaConstants.TALEND_COLUMN_PATTERN, fieldPattern);
|
||||
}
|
||||
if (isKey) {
|
||||
field.addProp(SchemaConstants.TALEND_COLUMN_IS_KEY, "true");
|
||||
}
|
||||
dynamicFields.add(field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recreates dynamic field from parameters retrieved from DI dynamic metadata
|
||||
*
|
||||
@@ -154,21 +215,10 @@ public class IncomingSchemaEnforcer {
|
||||
* @param description dynamic field description
|
||||
* @param isNullable defines whether dynamic field may contain <code>null</code> value
|
||||
*/
|
||||
@Deprecated
|
||||
public void addDynamicField(String name, String diType, String logicalType, String fieldPattern, String description,
|
||||
boolean isNullable) {
|
||||
if (!needsInitDynamicColumns())
|
||||
return;
|
||||
Schema fieldSchema = diToAvro(diType, logicalType);
|
||||
|
||||
if (isNullable) {
|
||||
fieldSchema = SchemaBuilder.nullable().type(fieldSchema);
|
||||
}
|
||||
Schema.Field field = new Schema.Field(name, fieldSchema, description, (Object) null);
|
||||
// Set pattern for date type
|
||||
if ("id_Date".equals(diType) && fieldPattern != null) {
|
||||
field.addProp(SchemaConstants.TALEND_COLUMN_PATTERN, fieldPattern);
|
||||
}
|
||||
dynamicFields.add(field);
|
||||
addDynamicField(name, diType, logicalType, fieldPattern, description, isNullable, false);
|
||||
}
|
||||
|
||||
public void addIncomingNodeField(String name, String className) {
|
||||
@@ -250,6 +300,8 @@ public class IncomingSchemaEnforcer {
|
||||
fieldSchema = AvroUtils._decimal();
|
||||
} else if ("id_Date".equals(diType)) {
|
||||
fieldSchema = AvroUtils._date();
|
||||
} else if ("id_byte[]".equals(diType)) {
|
||||
fieldSchema = AvroUtils._bytes();
|
||||
} else {
|
||||
throw new UnsupportedOperationException("Unrecognized type " + diType);
|
||||
}
|
||||
@@ -369,6 +421,9 @@ public class IncomingSchemaEnforcer {
|
||||
return designSchema;
|
||||
}
|
||||
|
||||
//here we do special process for dynamic input name, but in fact,
|
||||
//we have issue which support Japanese char or special char as label for basic talend column too,
|
||||
//so not only dynamic columns may have special name, but also basic may have, but here, we don't consider that, that's TODO
|
||||
/**
|
||||
* Converts DI data value to Avro format and put it into record by field name
|
||||
*
|
||||
@@ -376,9 +431,16 @@ public class IncomingSchemaEnforcer {
|
||||
* @param diValue data value
|
||||
*/
|
||||
public void put(String name, Object diValue) {
|
||||
if (unvalidName2ValidName != null) {
|
||||
String validName = unvalidName2ValidName.get(name);
|
||||
if (validName != null) {
|
||||
name = validName;
|
||||
}
|
||||
}
|
||||
put(columnToFieldIndex.get(name), diValue);
|
||||
}
|
||||
|
||||
//TODO make it private, no place to call it except current class?
|
||||
/**
|
||||
* Converts DI data value to Avro format and put it into record by field index
|
||||
*
|
||||
|
||||
@@ -52,6 +52,8 @@ public class IncomingSchemaEnforcerTest {
|
||||
*/
|
||||
private IndexedRecord componentRecord;
|
||||
|
||||
private IndexedRecord componentRecordWithSpecialName;
|
||||
|
||||
@Rule
|
||||
public ExpectedException thrown = ExpectedException.none();
|
||||
|
||||
@@ -72,9 +74,29 @@ public class IncomingSchemaEnforcerTest {
|
||||
componentRecord.put(3, true);
|
||||
componentRecord.put(4, "Main Street");
|
||||
componentRecord.put(5, "This is a record with six columns.");
|
||||
|
||||
Schema componentSchemaWithSpecialName = SchemaBuilder.builder().record("Record").fields() //
|
||||
.name("id").type().intType().noDefault() //
|
||||
.name("name").type().stringType().noDefault() //
|
||||
.name("age").type().intType().noDefault() //
|
||||
.name("性别").type().booleanType().noDefault() //why this don't store the origin name, as it can pass the avro name check, it's a avro bug
|
||||
.name("address_").prop(SchemaConstants.TALEND_COLUMN_DB_COLUMN_NAME, "address#").type().stringType().noDefault() //
|
||||
.name("comment_").prop(SchemaConstants.TALEND_COLUMN_DB_COLUMN_NAME, "comment$").type().stringType().noDefault() //
|
||||
.endRecord();
|
||||
componentRecordWithSpecialName = new GenericData.Record(componentSchemaWithSpecialName);
|
||||
componentRecordWithSpecialName.put(0, 1);
|
||||
componentRecordWithSpecialName.put(1, "User");
|
||||
componentRecordWithSpecialName.put(2, 100);
|
||||
componentRecordWithSpecialName.put(3, true);
|
||||
componentRecordWithSpecialName.put(4, "Main Street");
|
||||
componentRecordWithSpecialName.put(5, "This is a record with six columns.");
|
||||
}
|
||||
|
||||
private void checkEnforcerWithComponentRecordData(IncomingSchemaEnforcer enforcer) {
|
||||
checkEnforcerWithComponentRecordData(enforcer, false);
|
||||
}
|
||||
|
||||
private void checkEnforcerWithComponentRecordData(IncomingSchemaEnforcer enforcer, boolean specialName) {
|
||||
// The enforcer must be ready to receive values.
|
||||
assertThat(enforcer.needsInitDynamicColumns(), is(false));
|
||||
|
||||
@@ -88,15 +110,25 @@ public class IncomingSchemaEnforcerTest {
|
||||
IndexedRecord adapted = enforcer.createIndexedRecord();
|
||||
|
||||
// Ensure that the result is the same as the expected component record.
|
||||
assertThat(adapted, is(componentRecord));
|
||||
if (specialName) {
|
||||
assertThat(adapted, is(componentRecordWithSpecialName));
|
||||
} else {
|
||||
assertThat(adapted, is(componentRecord));
|
||||
}
|
||||
|
||||
// Ensure that we create a new instance when we give it another value.
|
||||
enforcer.put("id", 2);
|
||||
enforcer.put("name", "User2");
|
||||
enforcer.put("age", 200);
|
||||
enforcer.put("valid", false);
|
||||
enforcer.put("address", "2 Main Street");
|
||||
enforcer.put("comment", "2 This is a record with six columns.");
|
||||
if (specialName) {
|
||||
enforcer.put("性别", false);
|
||||
enforcer.put("address#", "2 Main Street");
|
||||
enforcer.put("comment$", "2 This is a record with six columns.");
|
||||
} else {
|
||||
enforcer.put("valid", false);
|
||||
enforcer.put("address", "2 Main Street");
|
||||
enforcer.put("comment", "2 This is a record with six columns.");
|
||||
}
|
||||
IndexedRecord adapted2 = enforcer.createIndexedRecord();
|
||||
|
||||
// It should have the same schema, but not be the same instance.
|
||||
@@ -392,6 +424,39 @@ public class IncomingSchemaEnforcerTest {
|
||||
checkEnforcerWithComponentRecordData(enforcer);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDynamicColumnWithSpecialName() {
|
||||
Schema designSchema = SchemaBuilder.builder().record("Record") //
|
||||
.prop(DiSchemaConstants.TALEND6_DYNAMIC_COLUMN_POSITION, "3") //
|
||||
.prop(SchemaConstants.INCLUDE_ALL_FIELDS, "true") //
|
||||
.fields() //
|
||||
.name("id").type().intType().noDefault() //
|
||||
.name("name").type().stringType().noDefault() //
|
||||
.name("age").type().intType().noDefault() //
|
||||
.endRecord();
|
||||
|
||||
IncomingSchemaEnforcer enforcer = new IncomingSchemaEnforcer(designSchema);
|
||||
|
||||
// The enforcer isn't usable yet.
|
||||
assertThat(enforcer.getDesignSchema(), is(designSchema));
|
||||
assertFalse(enforcer.areDynamicFieldsInitialized());
|
||||
assertThat(enforcer.getRuntimeSchema(), nullValue());
|
||||
|
||||
enforcer.addDynamicField("性别", "id_Boolean", null, null, null, false, false);
|
||||
enforcer.addDynamicField("address#", "id_String", null, null, null, false, false);
|
||||
enforcer.addDynamicField("comment$", "id_String", null, null, null, false, false);
|
||||
assertFalse(enforcer.areDynamicFieldsInitialized());
|
||||
enforcer.createRuntimeSchema();
|
||||
assertTrue(enforcer.areDynamicFieldsInitialized());
|
||||
|
||||
// Check the run-time schema was created.
|
||||
assertThat(enforcer.getDesignSchema(), is(designSchema));
|
||||
assertThat(enforcer.getRuntimeSchema(), not(nullValue()));
|
||||
|
||||
// Put values into the enforcer and get them as an IndexedRecord.
|
||||
checkEnforcerWithComponentRecordData(enforcer, true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTypeConversion_toDate() {
|
||||
// The expected schema after enforcement.
|
||||
@@ -699,6 +764,28 @@ public class IncomingSchemaEnforcerTest {
|
||||
assertThat(record.get(1), is((Object) new Date(1234567891011L)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks key field setting
|
||||
*/
|
||||
@Test
|
||||
public void testAddDynamicFieldKey() {
|
||||
Schema expectedRuntimeSchema = SchemaBuilder.builder().record("Record").fields().name("id")
|
||||
.prop(SchemaConstants.TALEND_COLUMN_IS_KEY, "true").type().intType().noDefault().endRecord();
|
||||
|
||||
Schema designSchema = SchemaBuilder.builder().record("Record").prop(SchemaConstants.INCLUDE_ALL_FIELDS, "true")
|
||||
.prop(DiSchemaConstants.TALEND6_DYNAMIC_COLUMN_POSITION, "0").fields().endRecord();
|
||||
|
||||
IncomingSchemaEnforcer enforcer = new IncomingSchemaEnforcer(designSchema);
|
||||
|
||||
enforcer.addDynamicField("id", "id_Integer", null, null, null, false, true);
|
||||
|
||||
enforcer.createRuntimeSchema();
|
||||
assertTrue(enforcer.areDynamicFieldsInitialized());
|
||||
|
||||
Schema actualRuntimeSchema = enforcer.getRuntimeSchema();
|
||||
assertEquals(expectedRuntimeSchema, actualRuntimeSchema);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks {@link IncomingSchemaEnforcer#put()} converts string value to date according pattern specified in dynamic field
|
||||
* TODO (iv.gonchar): this is incorrect behavior, because avro record should not contain java.util.Date value. It should store
|
||||
|
||||
@@ -45,9 +45,9 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
<version>1.2.17</version>
|
||||
<groupId>ch.qos.reload4j</groupId>
|
||||
<artifactId>reload4j</artifactId>
|
||||
<version>1.2.19</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
<groupId>org.talend</groupId>
|
||||
<artifactId>talend-httputil</artifactId>
|
||||
<name>talend-httputil</name>
|
||||
<version>1.0.5</version>
|
||||
<version>1.0.6</version>
|
||||
|
||||
<properties>
|
||||
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
|
||||
@@ -20,7 +20,7 @@
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>2.10.1</version>
|
||||
<version>2.11.4</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -69,13 +69,13 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-api</artifactId>
|
||||
<version>2.12.1</version>
|
||||
<version>2.17.1</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-core</artifactId>
|
||||
<version>2.12.1</version>
|
||||
<version>2.17.1</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
@@ -97,4 +97,4 @@
|
||||
</pluginManagement>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
</project>
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<cxf.version>3.1.1</cxf.version>
|
||||
<odata.version>4.3.0</odata.version>
|
||||
<slf4j.version>1.7.12</slf4j.version>
|
||||
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
|
||||
@@ -46,9 +45,9 @@
|
||||
<version>1.7.4</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
<version>1.2.5</version>
|
||||
<groupId>ch.qos.reload4j</groupId>
|
||||
<artifactId>reload4j</artifactId>
|
||||
<version>1.2.19</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
|
||||
@@ -0,0 +1,191 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.talend.components</groupId>
|
||||
<artifactId>talend-orc</artifactId>
|
||||
<version>1.0-20211008</version>
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<hadoop.version>3.2.2</hadoop.version>
|
||||
<apache.orc.version>1.7.0</apache.orc.version>
|
||||
<junit.jupiter.version>5.7.2</junit.jupiter.version>
|
||||
<hamcrest.version>1.3</hamcrest.version>
|
||||
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
|
||||
</properties>
|
||||
|
||||
<distributionManagement>
|
||||
<snapshotRepository>
|
||||
<id>talend_nexus_deployment</id>
|
||||
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceSnapshot/</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
</snapshotRepository>
|
||||
<repository>
|
||||
<id>talend_nexus_deployment</id>
|
||||
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceRelease/</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
</releases>
|
||||
</repository>
|
||||
</distributionManagement>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.orc</groupId>
|
||||
<artifactId>orc-core</artifactId>
|
||||
<version>${apache.orc.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
<version>${hadoop.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-server</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlet</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>jersey-server</artifactId>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>jersey-servlet</artifactId>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>log4j</artifactId>
|
||||
<groupId>log4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>zookeeper</artifactId>
|
||||
<groupId>org.apache.zookeeper</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>jetty-webapp</artifactId>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>javax.servlet-api</artifactId>
|
||||
<groupId>javax.servlet</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-hdfs-client</artifactId>
|
||||
<version>${hadoop.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-server</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlet</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>jersey-server</artifactId>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>jersey-servlet</artifactId>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>log4j</artifactId>
|
||||
<groupId>log4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>zookeeper</artifactId>
|
||||
<groupId>org.apache.zookeeper</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>jetty-webapp</artifactId>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>javax.servlet-api</artifactId>
|
||||
<groupId>javax.servlet</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<version>${junit.jupiter.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<version>${junit.jupiter.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<version>${junit.jupiter.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-params</artifactId>
|
||||
<version>${junit.jupiter.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.platform</groupId>
|
||||
<artifactId>junit-platform-launcher</artifactId>
|
||||
<version>1.7.2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-library</artifactId>
|
||||
<version>${hamcrest.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-library</artifactId>
|
||||
<version>${hamcrest.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.8.0</version>
|
||||
<configuration>
|
||||
<source>1.8</source>
|
||||
<target>1.8</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
@@ -0,0 +1,24 @@
|
||||
package org.talend.orc;
|
||||
|
||||
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.MapColumnVector;
|
||||
|
||||
abstract class ORCCommonUtils {
|
||||
/**
|
||||
* Check that the map type for the key is BYTES, LONG or DOUBLE and that the key
|
||||
* type is LONG, DOUBLE, BYTES, DECIMAL or TIMESTAMP.
|
||||
*
|
||||
* @param mapVector a MapColumnVector
|
||||
* @return true if the key and value types conform to the limits described
|
||||
* above.
|
||||
*/
|
||||
public static boolean checkMapColumnVectorTypes(MapColumnVector mapVector) {
|
||||
ColumnVector.Type keyType = mapVector.keys.type;
|
||||
ColumnVector.Type valueType = mapVector.values.type;
|
||||
return (keyType == ColumnVector.Type.BYTES || keyType == ColumnVector.Type.LONG
|
||||
|| keyType == ColumnVector.Type.DOUBLE)
|
||||
&& (valueType == ColumnVector.Type.LONG || valueType == ColumnVector.Type.DOUBLE
|
||||
|| valueType == ColumnVector.Type.BYTES || valueType == ColumnVector.Type.DECIMAL
|
||||
|| valueType == ColumnVector.Type.TIMESTAMP);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,442 @@
|
||||
package org.talend.orc;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang3.tuple.ImmutablePair;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.MapColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
|
||||
import org.apache.orc.TypeDescription;
|
||||
|
||||
public class ORCReadUtils {
|
||||
public static Object readColumnByName(VectorizedRowBatch batch, String columnName, TypeDescription schema,
|
||||
int rowNum) {
|
||||
List<String> allColumnNames = schema.getFieldNames();
|
||||
int colIndex = allColumnNames.indexOf(columnName);
|
||||
if (colIndex < 0 || colIndex > batch.cols.length - 1) {
|
||||
return null;
|
||||
} else {
|
||||
org.apache.hadoop.hive.ql.exec.vector.ColumnVector colVector = batch.cols[colIndex];
|
||||
TypeDescription fieldType = schema.getChildren().get(colIndex);
|
||||
int colRow = colVector.isRepeating ? 0 : rowNum;
|
||||
Object value = readColumn(colVector, fieldType, colRow);
|
||||
return value;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static Object readColumn(ColumnVector colVec, TypeDescription colType, int rowNum) {
|
||||
Object columnObj = null;
|
||||
if (!colVec.isNull[rowNum]) {
|
||||
switch (colVec.type) {
|
||||
case BYTES:
|
||||
columnObj = readBytesVal(colVec, colType, rowNum);
|
||||
break;
|
||||
case DECIMAL:
|
||||
columnObj = readDecimalVal(colVec, rowNum);
|
||||
break;
|
||||
case DOUBLE:
|
||||
columnObj = readDoubleVal(colVec, colType, rowNum);
|
||||
break;
|
||||
case LIST:
|
||||
columnObj = readListVal(colVec, colType, rowNum);
|
||||
break;
|
||||
case LONG:
|
||||
columnObj = readLongVal(colVec, colType, rowNum);
|
||||
break;
|
||||
case MAP:
|
||||
columnObj = readMapVal(colVec, colType, rowNum);
|
||||
break;
|
||||
case STRUCT:
|
||||
columnObj = readStructVal(colVec, colType, rowNum);
|
||||
break;
|
||||
case TIMESTAMP:
|
||||
columnObj = readTimestampVal(colVec, colType, rowNum);
|
||||
break;
|
||||
case UNION:
|
||||
columnObj = readUnionVal(colVec, colType, rowNum);
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException("readColumn: unsupported ORC file column type: " + colVec.type.name());
|
||||
}
|
||||
}
|
||||
return columnObj;
|
||||
}
|
||||
|
||||
private static Object readListVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
|
||||
Object listValues = null;
|
||||
if (!colVec.isNull[rowNum]) {
|
||||
ListColumnVector listVector = (ListColumnVector) colVec;
|
||||
ColumnVector listChildVector = listVector.child;
|
||||
TypeDescription childType = colType.getChildren().get(0);
|
||||
switch (listChildVector.type) {
|
||||
case BYTES:
|
||||
listValues = readBytesListValues(listVector, childType, rowNum);
|
||||
break;
|
||||
case DECIMAL:
|
||||
listValues = readDecimalListValues(listVector, rowNum);
|
||||
break;
|
||||
case DOUBLE:
|
||||
listValues = readDoubleListValues(listVector, rowNum);
|
||||
break;
|
||||
case LONG:
|
||||
listValues = readLongListValues(listVector, childType, rowNum);
|
||||
break;
|
||||
case TIMESTAMP:
|
||||
listValues = readTimestampListValues(listVector, childType, rowNum);
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException(listVector.type.name() + " is not supported for ListColumnVectors");
|
||||
}
|
||||
}
|
||||
return listValues;
|
||||
}
|
||||
|
||||
private static List<Object> readLongListVector(LongColumnVector longVector, TypeDescription childType, int offset,
|
||||
int numValues) {
|
||||
List<Object> longList = new ArrayList<>();
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
if (!longVector.isNull[offset + i]) {
|
||||
long longVal = longVector.vector[offset + i];
|
||||
if (childType.getCategory() == TypeDescription.Category.BOOLEAN) {
|
||||
Boolean boolVal = longVal == 0 ? Boolean.valueOf(false) : Boolean.valueOf(true);
|
||||
longList.add(boolVal);
|
||||
} else if (childType.getCategory() == TypeDescription.Category.INT) {
|
||||
Integer intObj = (int) longVal;
|
||||
longList.add(intObj);
|
||||
} else {
|
||||
longList.add(longVal);
|
||||
}
|
||||
} else {
|
||||
longList.add(null);
|
||||
}
|
||||
}
|
||||
return longList;
|
||||
}
|
||||
|
||||
private static Object readLongListValues(ListColumnVector listVector, TypeDescription childType, int rowNum) {
|
||||
int offset = (int) listVector.offsets[rowNum];
|
||||
int numValues = (int) listVector.lengths[rowNum];
|
||||
LongColumnVector longVector = (LongColumnVector) listVector.child;
|
||||
return readLongListVector(longVector, childType, offset, numValues);
|
||||
}
|
||||
|
||||
private static Object readTimestampListVector(TimestampColumnVector timestampVector, TypeDescription childType,
|
||||
int offset, int numValues) {
|
||||
List<Object> timestampList = new ArrayList<>();
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
if (!timestampVector.isNull[offset + i]) {
|
||||
int nanos = timestampVector.nanos[offset + i];
|
||||
long millisec = timestampVector.time[offset + i];
|
||||
Timestamp timestamp = new Timestamp(millisec);
|
||||
timestamp.setNanos(nanos);
|
||||
if (childType.getCategory() == TypeDescription.Category.DATE) {
|
||||
Date date = new Date(timestamp.getTime());
|
||||
timestampList.add(date);
|
||||
} else {
|
||||
timestampList.add(timestamp);
|
||||
}
|
||||
} else {
|
||||
timestampList.add(null);
|
||||
}
|
||||
}
|
||||
return timestampList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read either Timestamp or Date values, depending on the definition in the
|
||||
* schema.
|
||||
*/
|
||||
private static Object readTimestampListValues(ListColumnVector listVector, TypeDescription childType, int rowNum) {
|
||||
int offset = (int) listVector.offsets[rowNum];
|
||||
int numValues = (int) listVector.lengths[rowNum];
|
||||
TimestampColumnVector timestampVec = (TimestampColumnVector) listVector.child;
|
||||
return readTimestampListVector(timestampVec, childType, offset, numValues);
|
||||
}
|
||||
|
||||
private static Object readDecimalListVector(DecimalColumnVector decimalVector, int offset, int numValues,
|
||||
int batchRowNum) {
|
||||
List<Object> decimalList = new ArrayList<>();
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
if (!decimalVector.isNull[offset + i]) {
|
||||
BigDecimal bigDecimal = decimalVector.vector[batchRowNum].getHiveDecimal().bigDecimalValue();
|
||||
decimalList.add(bigDecimal);
|
||||
} else {
|
||||
decimalList.add(null);
|
||||
}
|
||||
}
|
||||
return decimalList;
|
||||
}
|
||||
|
||||
private static Object readDecimalListValues(ListColumnVector listVector, int rowNum) {
|
||||
int offset = (int) listVector.offsets[rowNum];
|
||||
int numValues = (int) listVector.lengths[rowNum];
|
||||
DecimalColumnVector decimalVec = (DecimalColumnVector) listVector.child;
|
||||
return readDecimalListVector(decimalVec, offset, numValues, rowNum);
|
||||
}
|
||||
|
||||
private static Object readBytesListVector(BytesColumnVector bytesVec, TypeDescription childType, int offset,
|
||||
int numValues) {
|
||||
List<Object> bytesValList = new ArrayList<>();
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
if (!bytesVec.isNull[offset + i]) {
|
||||
byte[] byteArray = bytesVec.vector[offset + i];
|
||||
int vecLen = bytesVec.length[offset + i];
|
||||
int vecStart = bytesVec.start[offset + i];
|
||||
byte[] vecCopy = Arrays.copyOfRange(byteArray, vecStart, vecStart + vecLen);
|
||||
if (childType.getCategory() == TypeDescription.Category.STRING) {
|
||||
String str = new String(vecCopy);
|
||||
bytesValList.add(str);
|
||||
} else {
|
||||
bytesValList.add(vecCopy);
|
||||
}
|
||||
} else {
|
||||
bytesValList.add(null);
|
||||
}
|
||||
}
|
||||
return bytesValList;
|
||||
}
|
||||
|
||||
private static Object readBytesListValues(ListColumnVector listVector, TypeDescription childType, int rowNum) {
|
||||
int offset = (int) listVector.offsets[rowNum];
|
||||
int numValues = (int) listVector.lengths[rowNum];
|
||||
BytesColumnVector bytesVec = (BytesColumnVector) listVector.child;
|
||||
return readBytesListVector(bytesVec, childType, offset, numValues);
|
||||
}
|
||||
|
||||
private static Object readDoubleListVector(DoubleColumnVector doubleVec, int offset, int numValues) {
|
||||
List<Object> doubleList = new ArrayList<>();
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
if (!doubleVec.isNull[offset + i]) {
|
||||
Double doubleVal = doubleVec.vector[offset + i];
|
||||
doubleList.add(doubleVal);
|
||||
} else {
|
||||
doubleList.add(null);
|
||||
}
|
||||
}
|
||||
return doubleList;
|
||||
}
|
||||
|
||||
private static Object readDoubleListValues(ListColumnVector listVector, int rowNum) {
|
||||
|
||||
int offset = (int) listVector.offsets[rowNum];
|
||||
int numValues = (int) listVector.lengths[rowNum];
|
||||
DoubleColumnVector doubleVec = (DoubleColumnVector) listVector.child;
|
||||
return readDoubleListVector(doubleVec, offset, numValues);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static List<Object> readMapVector(ColumnVector mapVector, TypeDescription childType, int offset,
|
||||
int numValues, int rowNum) {
|
||||
List<Object> mapList = null;
|
||||
switch (mapVector.type) {
|
||||
case BYTES:
|
||||
mapList = (List<Object>) readBytesListVector((BytesColumnVector) mapVector, childType, offset, numValues);
|
||||
break;
|
||||
case DECIMAL:
|
||||
mapList = (List<Object>) readDecimalListVector((DecimalColumnVector) mapVector, offset, numValues, rowNum);
|
||||
;
|
||||
break;
|
||||
case DOUBLE:
|
||||
mapList = (List<Object>) readDoubleListVector((DoubleColumnVector) mapVector, offset, numValues);
|
||||
break;
|
||||
case LONG:
|
||||
mapList = readLongListVector((LongColumnVector) mapVector, childType, offset, numValues);
|
||||
break;
|
||||
case TIMESTAMP:
|
||||
mapList = (List<Object>) readTimestampListVector((TimestampColumnVector) mapVector, childType, offset,
|
||||
numValues);
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException(mapVector.type.name() + " is not supported for MapColumnVectors");
|
||||
}
|
||||
return mapList;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Read a Map column value (e.g., a set of keys and their associated values).
|
||||
* </p>
|
||||
* <p>
|
||||
* The Map key and value types are the first and second children in the children
|
||||
* TypeDescription List. From the TypeDescription source:
|
||||
* </p>
|
||||
*
|
||||
* <pre>
|
||||
* result.children.add(keyType);
|
||||
* result.children.add(valueType);
|
||||
* </pre>
|
||||
*/
|
||||
private static Object readMapVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
|
||||
Map<Object, Object> objMap = new HashMap<>();
|
||||
MapColumnVector mapVector = (MapColumnVector) colVec;
|
||||
if (ORCCommonUtils.checkMapColumnVectorTypes(mapVector)) {
|
||||
int mapSize = (int) mapVector.lengths[rowNum];
|
||||
int offset = (int) mapVector.offsets[rowNum];
|
||||
List<TypeDescription> mapTypes = colType.getChildren();
|
||||
TypeDescription keyType = mapTypes.get(0);
|
||||
TypeDescription valueType = mapTypes.get(1);
|
||||
ColumnVector keyChild = mapVector.keys;
|
||||
ColumnVector valueChild = mapVector.values;
|
||||
List<Object> keyList = readMapVector(keyChild, keyType, offset, mapSize, rowNum);
|
||||
List<Object> valueList = readMapVector(valueChild, valueType, offset, mapSize, rowNum);
|
||||
assert (keyList.size() == valueList.size());
|
||||
for (int i = 0; i < keyList.size(); i++) {
|
||||
objMap.put(keyList.get(i), valueList.get(i));
|
||||
}
|
||||
} else {
|
||||
throw new RuntimeException("readMapVal: unsupported key or value types");
|
||||
}
|
||||
return objMap;
|
||||
}
|
||||
|
||||
private static Object readUnionVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
|
||||
Pair<TypeDescription, Object> columnValuePair;
|
||||
UnionColumnVector unionVector = (UnionColumnVector) colVec;
|
||||
int tagVal = unionVector.tags[rowNum];
|
||||
List<TypeDescription> unionFieldTypes = colType.getChildren();
|
||||
if (tagVal < unionFieldTypes.size()) {
|
||||
TypeDescription fieldType = unionFieldTypes.get(tagVal);
|
||||
if (tagVal < unionVector.fields.length) {
|
||||
ColumnVector fieldVector = unionVector.fields[tagVal];
|
||||
int colRow = fieldVector.isRepeating ? 0 : rowNum;
|
||||
Object unionValue = readColumn(fieldVector, fieldType, colRow);
|
||||
columnValuePair = new ImmutablePair<>(fieldType, unionValue);
|
||||
} else {
|
||||
throw new RuntimeException("readUnionVal: union tag value out of range for union column vectors");
|
||||
}
|
||||
} else {
|
||||
throw new RuntimeException("readUnionVal: union tag value out of range for union types");
|
||||
}
|
||||
return columnValuePair;
|
||||
}
|
||||
|
||||
private static Object readStructVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
|
||||
Object structObj = null;
|
||||
if (!colVec.isNull[rowNum]) {
|
||||
List<Object> fieldValList = new ArrayList<>();
|
||||
StructColumnVector structVector = (StructColumnVector) colVec;
|
||||
ColumnVector[] fieldVec = structVector.fields;
|
||||
List<TypeDescription> fieldTypes = colType.getChildren();
|
||||
assert (fieldVec.length == fieldTypes.size());
|
||||
for (int i = 0; i < fieldVec.length; i++) {
|
||||
int colRow = fieldVec[i].isRepeating ? 0 : rowNum;
|
||||
Object fieldObj = readColumn(fieldVec[i], fieldTypes.get(i), colRow);
|
||||
fieldValList.add(fieldObj);
|
||||
}
|
||||
structObj = fieldValList;
|
||||
}
|
||||
return structObj;
|
||||
}
|
||||
|
||||
private static Object readTimestampVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
|
||||
Object timestampVal = null;
|
||||
if (!colVec.isNull[rowNum]) {
|
||||
TimestampColumnVector timestampVec = (TimestampColumnVector) colVec;
|
||||
int nanos = timestampVec.nanos[rowNum];
|
||||
long millisec = timestampVec.time[rowNum];
|
||||
Timestamp timestamp = new Timestamp(millisec);
|
||||
timestamp.setNanos(nanos);
|
||||
timestampVal = timestamp;
|
||||
if (colType.getCategory() == TypeDescription.Category.DATE) {
|
||||
timestampVal = new Date(timestamp.getTime());
|
||||
}
|
||||
}
|
||||
return timestampVal;
|
||||
}
|
||||
|
||||
private static Object readDecimalVal(ColumnVector colVec, int rowNum) {
|
||||
Object decimalObj = null;
|
||||
if (!colVec.isNull[rowNum]) {
|
||||
DecimalColumnVector decimalVec = (DecimalColumnVector) colVec;
|
||||
decimalObj = decimalVec.vector[rowNum].getHiveDecimal().bigDecimalValue().setScale(decimalVec.scale);
|
||||
}
|
||||
return decimalObj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a Long or Boolean value
|
||||
*
|
||||
* @param colVec the column vector
|
||||
* @param colType the type of the column
|
||||
* @param rowNum the ORC file row number.
|
||||
* @return a Boolean or Long object
|
||||
*/
|
||||
private static Object readLongVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
|
||||
Object colObj = null;
|
||||
if (!colVec.isNull[rowNum]) {
|
||||
LongColumnVector longVec = (LongColumnVector) colVec;
|
||||
Long longVal = longVec.vector[rowNum];
|
||||
colObj = longVal;
|
||||
if (colType.getCategory() == TypeDescription.Category.INT) {
|
||||
colObj = longVal.intValue();
|
||||
} else if (colType.getCategory() == TypeDescription.Category.BOOLEAN) {
|
||||
colObj = longVal == 1 ? Boolean.TRUE : Boolean.FALSE;
|
||||
} else if (colType.getCategory() == TypeDescription.Category.DATE) {
|
||||
colObj = new Date(longVal * 86400000);
|
||||
} else if (colType.getCategory() == TypeDescription.Category.BYTE) {
|
||||
colObj = longVal.byteValue();
|
||||
} else if (colType.getCategory() == TypeDescription.Category.SHORT) {
|
||||
colObj = longVal.shortValue();
|
||||
}
|
||||
}
|
||||
return colObj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a Double or Float value
|
||||
*
|
||||
* @param colVec the column vector
|
||||
* @param colType the type of the column
|
||||
* @param rowNum the ORC file row number.
|
||||
* @return a Double or Float object
|
||||
*/
|
||||
private static Object readDoubleVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
|
||||
Object colObj = null;
|
||||
if (!colVec.isNull[rowNum]) {
|
||||
DoubleColumnVector longVec = (DoubleColumnVector) colVec;
|
||||
Double doubleVal = longVec.vector[rowNum];
|
||||
colObj = doubleVal;
|
||||
if (colType.getCategory() == TypeDescription.Category.FLOAT) {
|
||||
colObj = doubleVal.floatValue();
|
||||
}
|
||||
}
|
||||
return colObj;
|
||||
}
|
||||
|
||||
private static Object readBytesVal(ColumnVector colVec, TypeDescription colType, int rowNum) {
|
||||
Object bytesObj = null;
|
||||
if (!colVec.isNull[rowNum]) {
|
||||
BytesColumnVector bytesVector = (BytesColumnVector) colVec;
|
||||
byte[] columnBytes = bytesVector.vector[rowNum];
|
||||
int vecLen = bytesVector.length[rowNum];
|
||||
int vecStart = bytesVector.start[rowNum];
|
||||
byte[] vecCopy = Arrays.copyOfRange(columnBytes, vecStart, vecStart + vecLen);
|
||||
if (colType.getCategory() == TypeDescription.Category.STRING ||colType.getCategory() == TypeDescription.Category.VARCHAR) {
|
||||
bytesObj = new String(vecCopy);
|
||||
} else if (colType.getCategory() == TypeDescription.Category.CHAR) {
|
||||
String charStr = new String(vecCopy);
|
||||
bytesObj = charStr;
|
||||
} else {
|
||||
bytesObj = vecCopy;
|
||||
}
|
||||
}
|
||||
return bytesObj;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,884 @@
|
||||
package org.talend.orc;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.Timestamp;
|
||||
import java.time.LocalDate;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TimeZone;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.apache.hadoop.hive.common.type.HiveDecimal;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.DateColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.MapColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.MultiValuedColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
|
||||
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
|
||||
import org.apache.orc.TypeDescription;
|
||||
|
||||
public class ORCWriteUtils {
|
||||
private static final Predicate<Object> isInteger = Integer.class::isInstance;
|
||||
private static final Predicate<Object> isLong = Long.class::isInstance;
|
||||
private static final Predicate<Object> isDouble = Double.class::isInstance;
|
||||
private static final Predicate<Object> isString = String.class::isInstance;
|
||||
private static final Predicate<Object> isBigDecimal = BigDecimal.class::isInstance;
|
||||
private static final Predicate<Object> isDate = Date.class::isInstance;
|
||||
|
||||
public static void setColumnByName(VectorizedRowBatch batch, String columnName, TypeDescription schema,
|
||||
Object colVal, int rowNum) {
|
||||
List<String> allColumnNames = schema.getFieldNames();
|
||||
int colIndex = allColumnNames.indexOf(columnName);
|
||||
if (colIndex < 0 || colIndex > batch.cols.length - 1) {
|
||||
return;
|
||||
} else {
|
||||
org.apache.hadoop.hive.ql.exec.vector.ColumnVector colVector = batch.cols[colIndex];
|
||||
TypeDescription fieldType = schema.getChildren().get(colIndex);
|
||||
setColumn(colVal, fieldType, columnName, colVector, rowNum);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a column value that is a String or a byte[] array.
|
||||
*
|
||||
* @param colVal the column value object
|
||||
* @param fieldName the name of the field (for error reporting)
|
||||
* @param bytesColVector the BytesColumnVector that the byte array will be added
|
||||
* to.
|
||||
* @param rowNum the ORC file row number
|
||||
*/
|
||||
private static void setByteColumnVector(Object colVal, String fieldName, BytesColumnVector bytesColVector,
|
||||
int rowNum) {
|
||||
if (colVal instanceof byte[] || colVal instanceof String || colVal instanceof Character) {
|
||||
byte[] byteVec;
|
||||
if (colVal instanceof String) {
|
||||
String strVal = (String) colVal;
|
||||
byteVec = strVal.getBytes(StandardCharsets.UTF_8);
|
||||
} else if (colVal instanceof Character) {
|
||||
String strVal = String.valueOf((char) colVal);
|
||||
byteVec = strVal.getBytes(StandardCharsets.UTF_8);
|
||||
} else {
|
||||
byteVec = (byte[]) colVal;
|
||||
}
|
||||
bytesColVector.setRef(rowNum, byteVec, 0, byteVec.length);
|
||||
} else {
|
||||
throw new RuntimeException(orcExceptionMsg("byte[] or String type expected for field ", fieldName, rowNum));
|
||||
}
|
||||
}
|
||||
|
||||
private static void setDecimalVector(Object colVal, String fieldName, DecimalColumnVector decimalColVector,
|
||||
int rowNum) {
|
||||
if (colVal instanceof BigDecimal) {
|
||||
BigDecimal bigDecimal = (BigDecimal) colVal;
|
||||
decimalColVector.precision = (short) bigDecimal.precision();
|
||||
decimalColVector.scale = (short) bigDecimal.scale();
|
||||
HiveDecimal hiveDecimal = HiveDecimal.create(bigDecimal);
|
||||
HiveDecimalWritable writeableDecimal = new HiveDecimalWritable(hiveDecimal);
|
||||
decimalColVector.vector[rowNum] = writeableDecimal;
|
||||
} else {
|
||||
throw new RuntimeException(orcExceptionMsg("BigDecimal type expected for field ", fieldName, rowNum));
|
||||
}
|
||||
}
|
||||
|
||||
private static void setDoubleVector(Object colVal, String fieldName, DoubleColumnVector doubleVector, int rowNum) {
|
||||
if (colVal instanceof Double) {
|
||||
doubleVector.vector[rowNum] = (Double) colVal;
|
||||
} else if (colVal instanceof Float) {
|
||||
Float fltVal = (Float) colVal;
|
||||
doubleVector.vector[rowNum] = fltVal.doubleValue();
|
||||
} else if (colVal instanceof BigDecimal) {
|
||||
doubleVector.vector[rowNum] = ((BigDecimal) colVal).doubleValue();
|
||||
} else {
|
||||
throw new RuntimeException(orcExceptionMsg("Double/Float/BigDecimal type expected for field ", fieldName, rowNum));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize a LongColumnVector value.
|
||||
*
|
||||
* @param colVal an object of type Boolean, Integer, Long or BigInteger.
|
||||
* @param fieldName the field name in the schema
|
||||
* @param longVector the LongColumnVector
|
||||
* @param rowNum the row number
|
||||
*/
|
||||
private static void setLongColumnVector(Object colVal, String fieldName, LongColumnVector longVector, int rowNum) {
|
||||
if (colVal instanceof Boolean) {
|
||||
Boolean bool = (Boolean) colVal;
|
||||
longVector.vector[rowNum] = (bool.equals(Boolean.TRUE)) ? Long.valueOf(1) : Long.valueOf(0);
|
||||
} else if (colVal instanceof Byte) {
|
||||
longVector.vector[rowNum] = (Byte) colVal;
|
||||
} else if (colVal instanceof Short) {
|
||||
longVector.vector[rowNum] = (Short) colVal;
|
||||
} else if (colVal instanceof Integer) {
|
||||
longVector.vector[rowNum] = (Integer) colVal;
|
||||
} else if (colVal instanceof Long) {
|
||||
longVector.vector[rowNum] = (Long) colVal;
|
||||
} else if (colVal instanceof BigInteger) {
|
||||
BigInteger bigInt = (BigInteger) colVal;
|
||||
longVector.vector[rowNum] = bigInt.longValue();
|
||||
} else {
|
||||
throw new RuntimeException(orcExceptionMsg("Long or Integer type expected for field ", fieldName, rowNum));
|
||||
}
|
||||
}
|
||||
|
||||
private static void setDateColumnVector(Object colVal, String fieldName, DateColumnVector dateVector, int rowNum) {
|
||||
if (colVal instanceof Date) {
|
||||
Date dateVal = (Date) colVal;
|
||||
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
|
||||
cal.setTime(dateVal);
|
||||
long epochDay = LocalDate.of(cal.get(Calendar.YEAR), cal.get(Calendar.MONTH)+1, cal.get(Calendar.DAY_OF_MONTH)).toEpochDay();
|
||||
dateVector.vector[rowNum] = epochDay;
|
||||
} else {
|
||||
throw new RuntimeException(orcExceptionMsg("Date type expected for field ", fieldName, rowNum));
|
||||
}
|
||||
}
|
||||
|
||||
private static void setTimestampVector(Object colVal, String fieldName, TimestampColumnVector timestampVector,
|
||||
int rowNum) {
|
||||
if (colVal instanceof Timestamp) {
|
||||
timestampVector.set(rowNum, (Timestamp) colVal);
|
||||
} else if (colVal instanceof Date) {
|
||||
Date date = (Date) colVal;
|
||||
Timestamp ts = new Timestamp(date.getTime());
|
||||
|
||||
timestampVector.set(rowNum, ts);
|
||||
} else {
|
||||
throw new RuntimeException(
|
||||
orcExceptionMsg("Date or Timestamp type expected for field ", fieldName, rowNum));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* A union column can contain column vectors of more than one type. In the
|
||||
* TypeDescription createUnion() is called to create a TypeDescription for a
|
||||
* union column. The union values are added by calling the addUnionChild()
|
||||
* method on this TypeDescription object.
|
||||
* </p>
|
||||
* <p>
|
||||
* The class fields in the UnionColumnVector are shown below:
|
||||
* </p>
|
||||
*
|
||||
* <pre>
|
||||
* public class UnionColumnVector extends ColumnVector {
|
||||
* public int[] tags;
|
||||
* public ColumnVector[] fields;
|
||||
* </pre>
|
||||
* <p>
|
||||
* A tag value (
|
||||
*
|
||||
* <pre>
|
||||
* tags[rowNum]
|
||||
* </pre>
|
||||
*
|
||||
* ) is associated with each field value (
|
||||
*
|
||||
* <pre>
|
||||
* fields[rowNum])
|
||||
* </pre>
|
||||
*
|
||||
* . The tag value serves as an index for the field type. For example, if there
|
||||
* are three field types defined:
|
||||
* <ol>
|
||||
* <li>Long</li>
|
||||
* <li>Double</li>
|
||||
* <li>String</li>
|
||||
* </ol>
|
||||
* The tag will have a value in the range of [0..2]
|
||||
* </p>
|
||||
* <p>
|
||||
* The tag value is needed to initialize the ColumnVector since without the tag
|
||||
* there is no way to know which union child should be initialized.
|
||||
* </p>
|
||||
*
|
||||
* @param colVal a Pair<ColumnVector.Type, Object> object with the
|
||||
* union type and the object that will be used to initialize
|
||||
* the union child ColumnVector.
|
||||
* @param fieldName The name of the union field
|
||||
* @param unionVector The UnionColumnVector to be initialized
|
||||
* @param rowNum the ORC file row number.
|
||||
*/
|
||||
private static void setUnionColumnVector(Object colVal, TypeDescription unionTypeDesc, String fieldName,
|
||||
UnionColumnVector unionVector, int rowNum) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Pair<TypeDescription, Object> unionValuePair = (Pair<TypeDescription, Object>) colVal;
|
||||
TypeDescription unionValType = unionValuePair.getLeft();
|
||||
List<TypeDescription> unionChildrenTypes = unionTypeDesc.getChildren();
|
||||
Object unionColVal = unionValuePair.getRight();
|
||||
boolean found = false;
|
||||
for (int i = 0; i < unionChildrenTypes.size(); i++) {
|
||||
if (unionChildrenTypes.get(i).getCategory() == unionValType.getCategory()) {
|
||||
unionVector.tags[rowNum] = i;
|
||||
ColumnVector unionFieldVec = unionVector.fields[i];
|
||||
setColumn(unionColVal, unionChildrenTypes.get(i), fieldName, unionFieldVec, rowNum);
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
throw new RuntimeException("writeUnionColumnVector: Bad type enumeration "
|
||||
+ unionValType.getCategory().getName() + " passed for field " + fieldName);
|
||||
}
|
||||
}
|
||||
|
||||
private static void setLongListVector(List<Object> longValList, LongColumnVector longVector, int offset,
|
||||
String fieldName) {
|
||||
for (int i = 0; i < longValList.size(); i++) {
|
||||
Object objVal = longValList.get(i);
|
||||
if (objVal != null) {
|
||||
if (objVal instanceof Integer) {
|
||||
longVector.vector[offset + i] = (Integer) objVal;
|
||||
} else if (objVal instanceof Long) {
|
||||
longVector.vector[offset + i] = (Long) objVal;
|
||||
} else {
|
||||
throw new RuntimeException("List<Integer> expected for field " + fieldName);
|
||||
}
|
||||
} else {
|
||||
longVector.isNull[offset + i] = true;
|
||||
longVector.noNulls = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void setLongList(List<Object> colValList, ListColumnVector listVector, String fieldName,
|
||||
int rowNum) {
|
||||
LongColumnVector longVector = (LongColumnVector) listVector.child;
|
||||
int offset = (int) listVector.offsets[rowNum];
|
||||
setLongListVector(colValList, longVector, offset, fieldName);
|
||||
}
|
||||
|
||||
private static void setDoubleListVector(List<Object> doubleValList, DoubleColumnVector doubleVector, int offset,
|
||||
String fieldName) {
|
||||
for (int i = 0; i < doubleValList.size(); i++) {
|
||||
Object objVal = doubleValList.get(i);
|
||||
if (objVal != null) {
|
||||
if (objVal instanceof Double) {
|
||||
doubleVector.vector[offset + i] = (Double) objVal;
|
||||
} else if (objVal instanceof Float) {
|
||||
Float fltVal = (Float) objVal;
|
||||
doubleVector.vector[offset + i] = fltVal.doubleValue();
|
||||
} else {
|
||||
throw new RuntimeException("List<Double> expected for field " + fieldName);
|
||||
}
|
||||
} else {
|
||||
doubleVector.isNull[offset + i] = true;
|
||||
doubleVector.noNulls = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void setDoubleList(List<Object> doubleValList, ListColumnVector listVector, String fieldName,
|
||||
int rowNum) {
|
||||
DoubleColumnVector vecChild = (DoubleColumnVector) listVector.child;
|
||||
int offset = (int) listVector.offsets[rowNum];
|
||||
setDoubleListVector(doubleValList, vecChild, offset, fieldName);
|
||||
}
|
||||
|
||||
private static void setTimestampListVector(List<Object> valueList, TimestampColumnVector timestampVector,
|
||||
int offset, String fieldName) {
|
||||
for (int i = 0; i < valueList.size(); i++) {
|
||||
Object objVal = valueList.get(i);
|
||||
if (objVal != null) {
|
||||
if (objVal instanceof Date) {
|
||||
Timestamp ts = (objVal instanceof Timestamp) ? (Timestamp) objVal
|
||||
: new Timestamp(((Date) objVal).getTime());
|
||||
timestampVector.time[offset + i] = ts.getTime();
|
||||
timestampVector.nanos[offset + i] = ts.getNanos();
|
||||
} else {
|
||||
throw new RuntimeException("List<Date> or List<Timestamp> expected for field " + fieldName);
|
||||
}
|
||||
} else {
|
||||
timestampVector.isNull[offset + i] = true;
|
||||
timestampVector.noNulls = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the vector values for a ListColumnVector of Date or Timestamp
|
||||
* values.
|
||||
*
|
||||
* @param colValList a list of Timestamp or java.util.Date objects
|
||||
* @param listVector A ListColumnVector with a child that will contain the
|
||||
* vector values.
|
||||
* @param fieldName The field name in the schema for this ORC element
|
||||
* @param rowNum The ORC file row number
|
||||
*/
|
||||
private static void setTimestampList(List<Object> colValList, ListColumnVector listVector, String fieldName,
|
||||
int rowNum) {
|
||||
TimestampColumnVector timestampVector = (TimestampColumnVector) listVector.child;
|
||||
int offset = (int) listVector.offsets[rowNum];
|
||||
setTimestampListVector(colValList, timestampVector, offset, fieldName);
|
||||
}
|
||||
|
||||
private static void setDecimalListVector(List<Object> decimalValList, DecimalColumnVector decimalVector, int offset,
|
||||
String fieldName) {
|
||||
for (int i = 0; i < decimalValList.size(); i++) {
|
||||
Object objVal = decimalValList.get(i);
|
||||
if (objVal != null) {
|
||||
if (objVal instanceof BigDecimal) {
|
||||
BigDecimal bigDecimal = (BigDecimal) objVal;
|
||||
decimalVector.precision = (short) bigDecimal.precision();
|
||||
decimalVector.scale = (short) bigDecimal.scale();
|
||||
HiveDecimal hiveDecimal = HiveDecimal.create(bigDecimal);
|
||||
HiveDecimalWritable writeableDecimal = new HiveDecimalWritable(hiveDecimal);
|
||||
decimalVector.vector[offset + i] = writeableDecimal;
|
||||
} else {
|
||||
throw new RuntimeException("BigDecimal value expected for field " + fieldName);
|
||||
}
|
||||
} else {
|
||||
decimalVector.isNull[offset + i] = true;
|
||||
decimalVector.noNulls = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param colValList a list of BigDecimal values to initialize the
|
||||
* ListColumnVector child
|
||||
* @param listVector the ListColumnVector with the DecimalColumnVector child
|
||||
* @param fieldName the field name for the ListColumnVector/DecimalColumnVector
|
||||
* column
|
||||
* @param rowNum the ORC file row number
|
||||
*/
|
||||
private static void setDecimalList(List<Object> colValList, ListColumnVector listVector, String fieldName,
|
||||
int rowNum) {
|
||||
DecimalColumnVector decimalVector = (DecimalColumnVector) listVector.child;
|
||||
int offset = (int) listVector.offsets[rowNum];
|
||||
setDecimalListVector(colValList, decimalVector, offset, fieldName);
|
||||
}
|
||||
|
||||
private static void setBytesListVector(List<Object> valueList, BytesColumnVector bytesVector, int offset,
|
||||
String fieldName) {
|
||||
for (int i = 0; i < valueList.size(); i++) {
|
||||
Object objVal = valueList.get(i);
|
||||
if (objVal != null) {
|
||||
if (objVal instanceof byte[] || objVal instanceof String) {
|
||||
byte[] byteVec = (objVal instanceof byte[]) ? (byte[]) objVal
|
||||
: ((String) objVal).getBytes(StandardCharsets.UTF_8);
|
||||
bytesVector.vector[offset + i] = byteVec;
|
||||
bytesVector.length[offset + i] = byteVec.length;
|
||||
} else {
|
||||
throw new RuntimeException("String or byte[] value expected for field " + fieldName);
|
||||
}
|
||||
} else {
|
||||
bytesVector.isNull[offset + i] = true;
|
||||
bytesVector.length[offset + i] = 0;
|
||||
bytesVector.noNulls = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize a ListColumnVector with a BytesColumnVector child with byte[]
|
||||
* values.
|
||||
*
|
||||
* @param colValList a list of byte[] or String values
|
||||
* @param listVector the parent ListColumnVector
|
||||
* @param fieldName the field name for the ORC column that contains the
|
||||
* ListColumnVector
|
||||
* @param rowNum the ORC file row number
|
||||
*/
|
||||
private static void setBytesList(List<Object> colValList, ListColumnVector listVector, String fieldName,
|
||||
int rowNum) {
|
||||
BytesColumnVector bytesVector = (BytesColumnVector) listVector.child;
|
||||
int offset = (int) listVector.offsets[rowNum];
|
||||
setBytesListVector(colValList, bytesVector, offset, fieldName);
|
||||
}
|
||||
|
||||
private static void setMultiValuedVectorParameters(MultiValuedColumnVector multiVector, int vecLength, int rowNum) {
|
||||
multiVector.lengths[rowNum] = vecLength;
|
||||
if (rowNum > 0) {
|
||||
multiVector.offsets[rowNum] = multiVector.lengths[rowNum - 1] + multiVector.offsets[rowNum - 1];
|
||||
}
|
||||
}
|
||||
|
||||
private static void setListVectorParameters(ListColumnVector listVec, int maxBatchSize, int vecLength, int rowNum) {
|
||||
setMultiValuedVectorParameters(listVec, vecLength, rowNum);
|
||||
listVec.child.ensureSize(maxBatchSize * vecLength, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize a ListColumnVector. The child of the vector is limited to the
|
||||
* scalar types long, double, String (or byte[])), BigDecimal or Date (or
|
||||
* Timestamp).
|
||||
*
|
||||
* @param colVal a List<Object>
|
||||
* @param typeDesc the schema definition for this column
|
||||
* @param fieldName the column field name
|
||||
* @param listVector the ListColumnVector parent of the vector type child
|
||||
* @param rowNum the ORC file row number.
|
||||
*/
|
||||
private static void setListColumnVector(Object colVal, TypeDescription typeDesc, String fieldName,
|
||||
ListColumnVector listVector, int rowNum) {
|
||||
if (colVal instanceof List) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Object> objValList = (List<Object>) colVal;
|
||||
final int maxBatchSize = typeDesc.createRowBatch().getMaxSize();
|
||||
setListVectorParameters(listVector, maxBatchSize, objValList.size(), rowNum);
|
||||
ColumnVector.Type childType = listVector.child.type;
|
||||
switch (childType) {
|
||||
case LONG:
|
||||
setLongList(objValList, listVector, fieldName, rowNum);
|
||||
break;
|
||||
case DOUBLE:
|
||||
setDoubleList(objValList, listVector, fieldName, rowNum);
|
||||
break;
|
||||
case BYTES:
|
||||
setBytesList(objValList, listVector, fieldName, rowNum);
|
||||
break;
|
||||
case DECIMAL:
|
||||
setDecimalList(objValList, listVector, fieldName, rowNum);
|
||||
break;
|
||||
case TIMESTAMP:
|
||||
setTimestampList(objValList, listVector, fieldName, rowNum);
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException(childType.name() + " is not supported for ListColumnVector columns");
|
||||
}
|
||||
} else {
|
||||
throw new RuntimeException("List value expected for field " + fieldName);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that all elements in an Object list are of a particular type
|
||||
*
|
||||
* @param objList the Object list that is tested
|
||||
* @param typeTest a function that compares against a particular Object type
|
||||
* @return true if all elements are of the test type, false if one or more
|
||||
* elements are not of that type.
|
||||
*/
|
||||
private static boolean isListType(List<Object> objList, Predicate<Object> typeTest) {
|
||||
return !objList.stream().map(typeTest::test).collect(Collectors.toList()).contains(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize a ColumnVector with Long values.
|
||||
*
|
||||
* @param valueList a list of Long values
|
||||
* @param colVector the LongColumnVector that will be initialized with the Long
|
||||
* values
|
||||
* @param offset the offset[rownum] value for the array
|
||||
* @param fieldName the field name for the Map column
|
||||
*/
|
||||
private static void setLongMapValues(List<Object> valueList, ColumnVector colVector, int offset, String fieldName) {
|
||||
if (isListType(valueList, isLong) || isListType(valueList, isInteger)) {
|
||||
LongColumnVector longVector = (LongColumnVector) colVector;
|
||||
setLongListVector(valueList, longVector, offset, fieldName);
|
||||
} else {
|
||||
throw new RuntimeException("For field " + fieldName + " Long values expected");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize a ColumnVector with Double values.
|
||||
*
|
||||
* @param valueList a list of Double values
|
||||
* @param colVector the DoubleColumnVector that will be initialized with the
|
||||
* Double values
|
||||
* @param offset the offset[rownum] value for the array
|
||||
* @param fieldName the field name for the Map column
|
||||
*/
|
||||
private static void setDoubleMapValues(List<Object> valueList, ColumnVector colVector, int offset,
|
||||
String fieldName) {
|
||||
if (isListType(valueList, isDouble)) {
|
||||
DoubleColumnVector doubleVector = (DoubleColumnVector) colVector;
|
||||
setDoubleListVector(valueList, doubleVector, offset, fieldName);
|
||||
} else {
|
||||
throw new RuntimeException("For field " + fieldName + " Double values expected");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize a ColumnVector with String values.
|
||||
*
|
||||
* @param valueList a list of String values
|
||||
* @param colVector the BytesColumnVector that will be initialized with the
|
||||
* String values
|
||||
* @param offset the offset[rownum] value for the array
|
||||
* @param fieldName the field name for the Map column
|
||||
*/
|
||||
private static void setStringMapValues(List<Object> valueList, ColumnVector colVector, int offset,
|
||||
String fieldName) {
|
||||
if (isListType(valueList, isString)) {
|
||||
BytesColumnVector doubleVector = (BytesColumnVector) colVector;
|
||||
setBytesListVector(valueList, doubleVector, offset, fieldName);
|
||||
} else {
|
||||
throw new RuntimeException("For field " + fieldName + " String values expected");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize a ColumnVector with BigDeciml values.
|
||||
*
|
||||
* @param valueList a list of BigDecimal
|
||||
* @param colVector the DecimalColumnVector that will be initialized with the
|
||||
* BigDecimal values
|
||||
* @param offset the offset[rownum] value for the array
|
||||
* @param fieldName the field name for the Map column
|
||||
*/
|
||||
private static void setDecimalMapValues(List<Object> valueList, ColumnVector colVector, int offset,
|
||||
String fieldName) {
|
||||
if (isListType(valueList, isBigDecimal)) {
|
||||
DecimalColumnVector decimalVector = (DecimalColumnVector) colVector;
|
||||
setDecimalListVector(valueList, decimalVector, offset, fieldName);
|
||||
} else {
|
||||
throw new RuntimeException("For field " + fieldName + " BigDecimal values expected");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize a ColumnVector with timestamp values.
|
||||
*
|
||||
* @param valueList a list of Date (or Timestamp) objects
|
||||
* @param colVector the TimestampColumnVector that will be initialized with the
|
||||
* Timestamp values
|
||||
* @param offset the offset[rownum] value for the array
|
||||
* @param fieldName the field name for the Map column
|
||||
*/
|
||||
private static void setTimestampMapValues(List<Object> valueList, ColumnVector colVector, int offset,
|
||||
String fieldName) {
|
||||
if (isListType(valueList, isDate)) {
|
||||
TimestampColumnVector timestampVector = (TimestampColumnVector) colVector;
|
||||
setTimestampListVector(valueList, timestampVector, offset, fieldName);
|
||||
} else {
|
||||
throw new RuntimeException("For field " + fieldName + " Date or Timestamp values expected");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the MapColumn value array vector. The type for this vector is limited to
|
||||
* long, double, bytes (String), Decimal and Timestamp.
|
||||
*
|
||||
* @param valueList a list of Objects to initialize the Map column value array.
|
||||
* @param colVector the column array vector to be initialized with the map
|
||||
* values.
|
||||
* @param offset the offset[rowNum] from the parent MapColumnVector
|
||||
* @param fieldName the name of the field for the MapColumnVector.
|
||||
*/
|
||||
private static void setMapValueVector(List<Object> valueList, ColumnVector colVector, int offset,
|
||||
String fieldName) {
|
||||
switch (colVector.type) {
|
||||
case LONG:
|
||||
setLongMapValues(valueList, colVector, offset, fieldName);
|
||||
break;
|
||||
case DOUBLE:
|
||||
setDoubleMapValues(valueList, colVector, offset, fieldName);
|
||||
break;
|
||||
case BYTES:
|
||||
setStringMapValues(valueList, colVector, offset, fieldName);
|
||||
break;
|
||||
case DECIMAL:
|
||||
setDecimalMapValues(valueList, colVector, offset, fieldName);
|
||||
break;
|
||||
case TIMESTAMP:
|
||||
setTimestampMapValues(valueList, colVector, offset, fieldName);
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException(
|
||||
"For field " + fieldName + " values must be long, double, String, BigDecimal or Timestamp");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Initialize a MapColumnVector with Long key values.
|
||||
* </p>
|
||||
*
|
||||
* @param mapSet a set of {key, value} pairs, where the key values are Long
|
||||
* objects. The elements of this set will be used to initialize
|
||||
* the key and value array column vectors that are children of
|
||||
* the MapColumnVector.
|
||||
* @param mapVector the MapColumnVector. This ColumnVector has children for the
|
||||
* key and value arrays.
|
||||
* @param fieldName the field name for the map column vector column.
|
||||
* @param rowNum the ORC file row number.
|
||||
*/
|
||||
private static void setLongKeyMap(Set<Map.Entry<Object, Object>> mapSet, MapColumnVector mapVector,
|
||||
String fieldName, int rowNum) {
|
||||
List<Object> keyValueList = mapSet.stream().map(Map.Entry::getKey).collect(Collectors.toList());
|
||||
if (isListType(keyValueList, isLong)) {
|
||||
LongColumnVector longVector = (LongColumnVector) mapVector.keys;
|
||||
int offset = (int) mapVector.offsets[rowNum];
|
||||
// set the key vector
|
||||
setLongListVector(keyValueList, longVector, offset, fieldName);
|
||||
// set the value vector
|
||||
ColumnVector valueVector = mapVector.values;
|
||||
List<Object> valueList = mapSet.stream().map(Map.Entry::getValue).collect(Collectors.toList());
|
||||
setMapValueVector(valueList, valueVector, offset, fieldName);
|
||||
} else {
|
||||
throw new RuntimeException("For field " + fieldName + " Long key type expected to match schema");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Initialize a MapColumnVector with Double key values.
|
||||
* </p>
|
||||
*
|
||||
* @param mapSet a set of {key, value} pairs, where the key values are Double
|
||||
* objects. The elements of this set will be used to initialize
|
||||
* the key and value array column vectors that are children of
|
||||
* the MapColumnVector.
|
||||
* @param mapVector the MapColumnVector. This ColumnVector has children for the
|
||||
* key and value arrays.
|
||||
* @param fieldName the field name for the map column vector column.
|
||||
* @param rowNum the ORC file row number.
|
||||
*/
|
||||
private static void setDoubleKeyMap(Set<Map.Entry<Object, Object>> mapSet, MapColumnVector mapVector,
|
||||
String fieldName, int rowNum) {
|
||||
List<Object> keyValueList = mapSet.stream().map(Map.Entry::getKey).collect(Collectors.toList());
|
||||
if (isListType(keyValueList, isDouble)) {
|
||||
DoubleColumnVector doubleVector = (DoubleColumnVector) mapVector.keys;
|
||||
int offset = (int) mapVector.offsets[rowNum];
|
||||
// set the key vector
|
||||
setDoubleListVector(keyValueList, doubleVector, offset, fieldName);
|
||||
// set the value vector
|
||||
ColumnVector valueVector = mapVector.values;
|
||||
List<Object> valueList = mapSet.stream().map(Map.Entry::getValue).collect(Collectors.toList());
|
||||
setMapValueVector(valueList, valueVector, offset, fieldName);
|
||||
} else {
|
||||
throw new RuntimeException("For field " + fieldName + " Long key type expected to match schema");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Initialize a MapColumnVector with String key values.
|
||||
* </p>
|
||||
*
|
||||
* @param mapSet a set of {key, value} pairs, where the key values are String
|
||||
* objects. The elements of this set will be used to initialize
|
||||
* the key and value array column vectors that are children of
|
||||
* the MapColumnVector.
|
||||
* @param mapVector the MapColumnVector. This ColumnVector has children for the
|
||||
* key and value arrays.
|
||||
* @param fieldName the field name for the map column vector column.
|
||||
* @param rowNum the ORC file row number.
|
||||
*/
|
||||
private static void setStringKeyMap(Set<Map.Entry<Object, Object>> mapSet, MapColumnVector mapVector,
|
||||
String fieldName, int rowNum) {
|
||||
List<Object> keyValueList = mapSet.stream().map(Map.Entry::getKey).collect(Collectors.toList());
|
||||
if (isListType(keyValueList, isString)) {
|
||||
BytesColumnVector byteVector = (BytesColumnVector) mapVector.keys;
|
||||
int offset = (int) mapVector.offsets[rowNum];
|
||||
// set the key array vector
|
||||
setBytesListVector(keyValueList, byteVector, offset, fieldName);
|
||||
// set the value array vector
|
||||
ColumnVector valueVector = mapVector.values;
|
||||
List<Object> valueList = mapSet.stream().map(Map.Entry::getValue).collect(Collectors.toList());
|
||||
setMapValueVector(valueList, valueVector, offset, fieldName);
|
||||
} else {
|
||||
throw new RuntimeException("For field " + fieldName + " Long key type expected to match schema");
|
||||
}
|
||||
}
|
||||
|
||||
private static void setMapVectorParameters(MapColumnVector mapVec, int maxBatchSize, int vecLength, int rowNum) {
|
||||
setMultiValuedVectorParameters(mapVec, vecLength, rowNum);
|
||||
mapVec.keys.ensureSize(maxBatchSize + vecLength, true);
|
||||
mapVec.values.ensureSize(maxBatchSize + vecLength, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Set the Map key and value elements for a MapColumnVector
|
||||
* </p>
|
||||
* <p>
|
||||
* A MapColumnVector has a single ColumnVector type for each of the map key and
|
||||
* map values. For example, the ColumnVector for the key values could be a
|
||||
* BytesColumnVector (a set of String keys). The values could be a
|
||||
* LongColumnVector.
|
||||
* </p>
|
||||
* <p>
|
||||
* In the documentation there is no restriction given for the map key type. This
|
||||
* code limits the key types to scalar values: string, long, double.
|
||||
* </p>
|
||||
* </p>
|
||||
* <p>
|
||||
* The documentation does not limit the map value types. This code limites the
|
||||
* map values to the same types that are supported for ListColumnVectors: long,
|
||||
* double, bytes (String), Decimal and Timestamp.
|
||||
* </p>
|
||||
*
|
||||
* @param colVal a HashMap object
|
||||
* @param typeDesc the schema description for the MapColumnVector column
|
||||
* @param fieldName the field name of the MapColumnVector column
|
||||
* @param mapVector The parent MapColumnVector
|
||||
* @param rowNum the ORC file column number.
|
||||
*/
|
||||
private static void setMapColumnVector(Object colVal, TypeDescription typeDesc, String fieldName,
|
||||
MapColumnVector mapVector, int rowNum) {
|
||||
if (colVal == null) {
|
||||
mapVector.isNull[rowNum] = true;
|
||||
mapVector.noNulls = false;
|
||||
} else {
|
||||
if (colVal instanceof HashMap) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<Object, Object> rawMap = (HashMap<Object, Object>) colVal;
|
||||
int mapLen = rawMap.size();
|
||||
final int maxBatchSize = typeDesc.createRowBatch().getMaxSize();
|
||||
setMapVectorParameters(mapVector, maxBatchSize, mapLen, rowNum);
|
||||
if (ORCCommonUtils.checkMapColumnVectorTypes(mapVector)) {
|
||||
Set<Map.Entry<Object, Object>> mapSet = rawMap.entrySet();
|
||||
switch (mapVector.keys.type) {
|
||||
case LONG:
|
||||
setLongKeyMap(mapSet, mapVector, fieldName, rowNum);
|
||||
break;
|
||||
case DOUBLE:
|
||||
setDoubleKeyMap(mapSet, mapVector, fieldName, rowNum);
|
||||
break;
|
||||
case BYTES:
|
||||
setStringKeyMap(mapSet, mapVector, fieldName, rowNum);
|
||||
break;
|
||||
default: {
|
||||
break;
|
||||
/* This block left intentionally empty */
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new RuntimeException(
|
||||
"For field " + fieldName + " key types are limited to string, long and double. "
|
||||
+ "value types are limited to long, double, String, decimal and timestamp");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a column value in an ORC a row that will be written to the ORC file.
|
||||
*
|
||||
* @param colVal an Object containing the values to be written to the column
|
||||
* @param typeDesc the TypeDescription from the schema that defines the column
|
||||
* @param fieldName the column field name
|
||||
* @param vector the ColumnVector that will be initialized with the values in
|
||||
* the colVal argument.
|
||||
* @param rowNum the ORC file row number.
|
||||
*/
|
||||
public static void setColumn(Object colVal, TypeDescription typeDesc, String fieldName, ColumnVector vector,
|
||||
int rowNum) {
|
||||
if (colVal == null) {
|
||||
vector.isNull[rowNum] = true;
|
||||
vector.noNulls = false;
|
||||
} else {
|
||||
switch (vector.type) {
|
||||
case LONG: {
|
||||
if (vector instanceof DateColumnVector) {
|
||||
DateColumnVector dateVector = (DateColumnVector) vector;
|
||||
setDateColumnVector(colVal, fieldName, dateVector, rowNum);
|
||||
} else {
|
||||
LongColumnVector longVector = (LongColumnVector) vector;
|
||||
setLongColumnVector(colVal, fieldName, longVector, rowNum);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DOUBLE: {
|
||||
DoubleColumnVector doubleVector = (DoubleColumnVector) vector;
|
||||
setDoubleVector(colVal, fieldName, doubleVector, rowNum);
|
||||
break;
|
||||
}
|
||||
case BYTES: {
|
||||
BytesColumnVector bytesColVector = (BytesColumnVector) vector;
|
||||
setByteColumnVector(colVal, fieldName, bytesColVector, rowNum);
|
||||
break;
|
||||
}
|
||||
case DECIMAL: {
|
||||
DecimalColumnVector decimalVector = (DecimalColumnVector) vector;
|
||||
setDecimalVector(colVal, fieldName, decimalVector, rowNum);
|
||||
break;
|
||||
}
|
||||
case DECIMAL_64:
|
||||
throw new RuntimeException("Field: " + fieldName + ", Decimal64ColumnVector is not supported");
|
||||
case TIMESTAMP: {
|
||||
TimestampColumnVector timestampVector = (TimestampColumnVector) vector;
|
||||
setTimestampVector(colVal, fieldName, timestampVector, rowNum);
|
||||
break;
|
||||
}
|
||||
case INTERVAL_DAY_TIME:
|
||||
throw new RuntimeException("Field: " + fieldName + ", HiveIntervalDayTime is not supported");
|
||||
case STRUCT: {
|
||||
StructColumnVector structVector = (StructColumnVector) vector;
|
||||
// setStructColumnVector(colVal, typeDesc, fieldName, structVector, rowNum);
|
||||
break;
|
||||
}
|
||||
case LIST: {
|
||||
ListColumnVector listVector = (ListColumnVector) vector;
|
||||
setListColumnVector(colVal, typeDesc, fieldName, listVector, rowNum);
|
||||
break;
|
||||
}
|
||||
case MAP: {
|
||||
MapColumnVector mapVector = (MapColumnVector) vector;
|
||||
setMapColumnVector(colVal, typeDesc, fieldName, mapVector, rowNum);
|
||||
break;
|
||||
}
|
||||
case UNION: {
|
||||
UnionColumnVector unionVector = (UnionColumnVector) vector;
|
||||
setUnionColumnVector(colVal, typeDesc, fieldName, unionVector, rowNum);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new RuntimeException("setColumn: Internal error: unexpected ColumnVector subtype");
|
||||
} // switch
|
||||
} // else
|
||||
} // setColumn
|
||||
|
||||
private static String orcExceptionMsg(String prefixMsg, String fieldName, int rowNum) {
|
||||
return prefixMsg + fieldName + " in row " + rowNum;
|
||||
}
|
||||
|
||||
public static TypeDescription detectType(Object value) {
|
||||
TypeDescription type = null;
|
||||
if (value != null) {
|
||||
if (value instanceof Boolean) {
|
||||
type = TypeDescription.createBoolean();
|
||||
} else if (value instanceof Short) {
|
||||
type = TypeDescription.createShort();
|
||||
} else if (value instanceof Integer) {
|
||||
type = TypeDescription.createInt();
|
||||
} else if (value instanceof Long) {
|
||||
type = TypeDescription.createLong();
|
||||
} else if (value instanceof Timestamp) {
|
||||
type = TypeDescription.createTimestamp();
|
||||
} else if (value instanceof BigDecimal) {
|
||||
type = TypeDescription.createDecimal();
|
||||
} else if (value instanceof Byte) {
|
||||
type = TypeDescription.createByte();
|
||||
} else if (value instanceof Float) {
|
||||
type = TypeDescription.createFloat();
|
||||
} else if (value instanceof Double) {
|
||||
type = TypeDescription.createDouble();
|
||||
} else if (value instanceof String) {
|
||||
type = TypeDescription.createString();
|
||||
} else if (value instanceof Date) {
|
||||
type = TypeDescription.createDate();
|
||||
} else if (value instanceof byte[]) {
|
||||
type = TypeDescription.createBinary();
|
||||
} else {
|
||||
throw new RuntimeException(
|
||||
value.getClass().getName() + " is not supported for ListColumnVector columns");
|
||||
}
|
||||
} else {
|
||||
type = TypeDescription.createString();
|
||||
}
|
||||
|
||||
return type;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,253 @@
|
||||
package org.talend.orc;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
|
||||
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
|
||||
import org.apache.orc.CompressionKind;
|
||||
import org.apache.orc.OrcFile;
|
||||
import org.apache.orc.OrcFile.WriterOptions;
|
||||
import org.apache.orc.Reader;
|
||||
import org.apache.orc.RecordReader;
|
||||
import org.apache.orc.TypeDescription;
|
||||
import org.apache.orc.Writer;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
class ORCUtilsTest {
|
||||
|
||||
private static File localFolder;
|
||||
|
||||
@BeforeAll
|
||||
static void setup() throws IOException {
|
||||
localFolder = createTempDirectory();
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
static void dispose() {
|
||||
localFolder.delete();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that an exception is thrown if a Date type is written.
|
||||
*
|
||||
* At the time this test was written, the ORC writer did not correctly write the
|
||||
* date epoch value to the ORC file. The value was written as a 32-bit int,
|
||||
* instead of a 64 bit long. As a result, the date is incorrect. A timestamp
|
||||
* value should be used instead.
|
||||
*
|
||||
* @param tempDirPath
|
||||
* @throws ORCFileException
|
||||
* @throws InterruptedException
|
||||
*/
|
||||
@Test
|
||||
void testAllDataTypes() throws Throwable {
|
||||
String filePath = localFolder.getAbsolutePath() + "/testAllTypes.orc";
|
||||
writeData(filePath);
|
||||
|
||||
readAndCheckData(filePath);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDetectType() {
|
||||
Assertions.assertEquals(TypeDescription.Category.BOOLEAN, ORCWriteUtils.detectType(true).getCategory());
|
||||
Assertions.assertEquals(TypeDescription.Category.SHORT,
|
||||
ORCWriteUtils.detectType(Short.valueOf("1")).getCategory());
|
||||
Assertions.assertEquals(TypeDescription.Category.INT, ORCWriteUtils.detectType(1).getCategory());
|
||||
Assertions.assertEquals(TypeDescription.Category.LONG, ORCWriteUtils.detectType(1L).getCategory());
|
||||
Assertions.assertEquals(TypeDescription.Category.TIMESTAMP,
|
||||
ORCWriteUtils.detectType(new Timestamp(System.currentTimeMillis())).getCategory());
|
||||
Assertions.assertEquals(TypeDescription.Category.DECIMAL,
|
||||
ORCWriteUtils.detectType(new BigDecimal("1")).getCategory());
|
||||
Assertions.assertEquals(TypeDescription.Category.BYTE,
|
||||
ORCWriteUtils.detectType(Byte.valueOf("1")).getCategory());
|
||||
Assertions.assertEquals(TypeDescription.Category.FLOAT, ORCWriteUtils.detectType(1.0f).getCategory());
|
||||
Assertions.assertEquals(TypeDescription.Category.DOUBLE, ORCWriteUtils.detectType(1.0).getCategory());
|
||||
Assertions.assertEquals(TypeDescription.Category.STRING, ORCWriteUtils.detectType("test").getCategory());
|
||||
Assertions.assertEquals(TypeDescription.Category.DATE, ORCWriteUtils.detectType(new Date()).getCategory());
|
||||
Assertions.assertEquals(TypeDescription.Category.BINARY,
|
||||
ORCWriteUtils.detectType("test".getBytes()).getCategory());
|
||||
}
|
||||
|
||||
private void writeData(String filePath) throws Throwable {
|
||||
TypeDescription schema = TypeDescription.createStruct();
|
||||
schema.addField("t_boolean", TypeDescription.createBoolean());
|
||||
schema.addField("t_byte", TypeDescription.createByte());
|
||||
schema.addField("t_bytes", TypeDescription.createBinary());
|
||||
schema.addField("t_char", TypeDescription.createChar());
|
||||
schema.addField("t_date", TypeDescription.createDate());
|
||||
schema.addField("t_ts", TypeDescription.createTimestamp());
|
||||
schema.addField("t_double", TypeDescription.createDouble());
|
||||
schema.addField("t_float", TypeDescription.createFloat());
|
||||
schema.addField("t_decimal", TypeDescription.createDecimal().withPrecision(18).withScale(5));
|
||||
schema.addField("t_int", TypeDescription.createInt());
|
||||
schema.addField("t_long", TypeDescription.createLong());
|
||||
schema.addField("t_short", TypeDescription.createShort());
|
||||
schema.addField("t_string", TypeDescription.createString());
|
||||
schema.addField("t_list", TypeDescription.createList(TypeDescription.createString()));
|
||||
|
||||
WriterOptions writerOption = OrcFile.writerOptions(new Configuration()) //
|
||||
.overwrite(true) //
|
||||
.compress(CompressionKind.valueOf("ZLIB")).setSchema(schema); //
|
||||
|
||||
Writer writer = OrcFile.createWriter(new Path(filePath), writerOption);
|
||||
VectorizedRowBatch batch = schema.createRowBatch(100);
|
||||
for (int r = 0; r < 1000; ++r) {
|
||||
int row = batch.size++;
|
||||
for (int i = 0; i < batch.cols.length; i++) {
|
||||
ColumnVector vector = batch.cols[i];
|
||||
TypeDescription type = schema.getChildren().get(i);
|
||||
switch (vector.type) {
|
||||
case BYTES:
|
||||
if (type.getCategory() == TypeDescription.Category.BINARY) {
|
||||
ORCWriteUtils.setColumn(("this is byte[] " + r).getBytes(), null, "t_bytes", vector, row);
|
||||
} else if (type.getCategory() == TypeDescription.Category.STRING) {
|
||||
if(r==666) {
|
||||
ORCWriteUtils.setColumn(null, null, "t_string", vector, row);
|
||||
}else {
|
||||
ORCWriteUtils.setColumn(("this is String " + r), null, "t_string", vector, row);
|
||||
}
|
||||
} else if (type.getCategory() == TypeDescription.Category.CHAR) {
|
||||
ORCWriteUtils.setColumn("talend".charAt(r % 6), null, "t_char", vector, row);
|
||||
} else {
|
||||
throw new RuntimeException(type.getCategory() + " is not supported as BYTES vector");
|
||||
}
|
||||
break;
|
||||
case DECIMAL:
|
||||
ORCWriteUtils.setColumn(new BigDecimal(r + ".12345"), null, "t_decimal", vector, row);
|
||||
break;
|
||||
case DOUBLE:
|
||||
if (type.getCategory() == TypeDescription.Category.DOUBLE) {
|
||||
ORCWriteUtils.setColumn(r + 0.123, null, "t_double", vector, row);
|
||||
} else if (type.getCategory() == TypeDescription.Category.FLOAT) {
|
||||
ORCWriteUtils.setColumn(r + 0.456f, null, "t_float", vector, row);
|
||||
} else {
|
||||
throw new RuntimeException(type.getCategory() + " is not supported as DOUBLE vector");
|
||||
}
|
||||
break;
|
||||
case LONG:
|
||||
if (type.getCategory() == TypeDescription.Category.BOOLEAN) {
|
||||
ORCWriteUtils.setColumn(true, null, "t_boolean", vector, row);
|
||||
} else if (type.getCategory() == TypeDescription.Category.BYTE) {
|
||||
ORCWriteUtils.setColumn((byte)(r % 128), null, "t_byte", vector, row);
|
||||
} else if (type.getCategory() == TypeDescription.Category.INT) {
|
||||
ORCWriteUtils.setColumn(r, null, "t_int", vector, row);
|
||||
} else if (type.getCategory() == TypeDescription.Category.SHORT) {
|
||||
ORCWriteUtils.setColumn((short)(r % 256), null, "t_short", vector, row);
|
||||
} else if (type.getCategory() == TypeDescription.Category.LONG) {
|
||||
ORCWriteUtils.setColumn(r * 1000L, null, "t_long", vector, row);
|
||||
} else if (type.getCategory() == TypeDescription.Category.DATE) {
|
||||
Date d = new Date(1633687854031L);
|
||||
ORCWriteUtils.setColumn(d, null, "t_date", vector, row);
|
||||
} else {
|
||||
throw new RuntimeException(type.getCategory() + " is not supported as LONG vector");
|
||||
}
|
||||
break;
|
||||
case TIMESTAMP:
|
||||
Timestamp ts = new java.sql.Timestamp(1633687854031L);
|
||||
ts.setNanos(123456789);
|
||||
ORCWriteUtils.setColumn(ts, null, "t_ts", vector, row);
|
||||
break;
|
||||
case LIST:
|
||||
List<String> values = new ArrayList<>();
|
||||
values.add("v1_" + r);
|
||||
values.add("v2_" + r);
|
||||
values.add("v3_" + r);
|
||||
ORCWriteUtils.setColumn(values, ORCWriteUtils.detectType("v1_" + r), "t_ list", vector, row);
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException(vector.type + " is not supported");
|
||||
|
||||
}
|
||||
}
|
||||
if (batch.size == batch.getMaxSize()) {
|
||||
writer.addRowBatch(batch);
|
||||
batch.reset();
|
||||
}
|
||||
}
|
||||
if (batch.size != 0) {
|
||||
writer.addRowBatch(batch);
|
||||
}
|
||||
|
||||
writer.close();
|
||||
}
|
||||
|
||||
private void readAndCheckData(String filePath) throws Throwable {
|
||||
|
||||
Reader reader = OrcFile.createReader(new Path(filePath), OrcFile.readerOptions(new Configuration()));
|
||||
TypeDescription schema = reader.getSchema();
|
||||
VectorizedRowBatch batch = schema.createRowBatch();
|
||||
RecordReader rowIterator = reader.rows(reader.options().schema(schema));
|
||||
int nuberLine = 0;
|
||||
List<Object> nb_500 = new ArrayList<>();
|
||||
List<Object> nb_666 = new ArrayList<>();
|
||||
while (rowIterator.nextBatch(batch)) {
|
||||
ColumnVector[] colVectors = batch.cols;
|
||||
for (int row = 0; row < batch.size; ++row) {
|
||||
nuberLine++;
|
||||
for (String columnName : schema.getFieldNames()) {
|
||||
ColumnVector colVector = colVectors[schema.getFieldNames().indexOf(columnName)];
|
||||
int colRow = colVector.isRepeating ? 0 : row;
|
||||
Object value = ORCReadUtils.readColumnByName(batch, columnName, schema, colRow);
|
||||
if (nuberLine == 500) {
|
||||
nb_500.add(value);
|
||||
}else if (nuberLine == 667) {
|
||||
nb_666.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Assertions.assertEquals(true, nb_500.get(0));
|
||||
Assertions.assertEquals(Byte.valueOf("115"), nb_500.get(1));
|
||||
Assertions.assertEquals("this is byte[] 499", new String((byte[]) nb_500.get(2)));
|
||||
Assertions.assertEquals("a", nb_500.get(3));
|
||||
Date t_date = (Date) nb_500.get(4);
|
||||
Assertions.assertEquals((1633687854000L/86400000), t_date.getTime()/86400000);
|
||||
Timestamp t_ts = (Timestamp) nb_500.get(5);
|
||||
Assertions.assertEquals(1633687854123L, t_ts.getTime());
|
||||
Assertions.assertEquals(123456789, t_ts.getNanos());
|
||||
Assertions.assertEquals(499.123, nb_500.get(6));
|
||||
Assertions.assertEquals(499.456f, (((float) nb_500.get(7)) * 1000) / 1000f);
|
||||
Assertions.assertEquals(new BigDecimal("499.12345"), nb_500.get(8));
|
||||
Assertions.assertEquals(499, nb_500.get(9));
|
||||
Assertions.assertEquals(499000L, nb_500.get(10));
|
||||
Assertions.assertEquals(Short.valueOf("243"), nb_500.get(11));
|
||||
Assertions.assertEquals("this is String 499", nb_500.get(12));
|
||||
Assertions.assertArrayEquals(Arrays.asList("v1_499", "v2_499", "v3_499").toArray(),
|
||||
((List<Object>) nb_500.get(13)).toArray());
|
||||
|
||||
//NB_LINE 666
|
||||
Assertions.assertNull( nb_666.get(12));
|
||||
|
||||
rowIterator.close();
|
||||
|
||||
}
|
||||
|
||||
public static File createTempDirectory() throws IOException {
|
||||
final File temp;
|
||||
|
||||
temp = File.createTempFile("temp", Long.toString(System.nanoTime()));
|
||||
if (!temp.delete()) {
|
||||
throw new IOException("Could not delete temp file: " + temp.getAbsolutePath());
|
||||
}
|
||||
|
||||
if (!temp.mkdir()) {
|
||||
throw new IOException("Could not create temp directory: " + temp.getAbsolutePath());
|
||||
}
|
||||
|
||||
return temp;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,98 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>org.talend.components</groupId>
|
||||
<artifactId>talend-parquet</artifactId>
|
||||
<version>1.3</version>
|
||||
|
||||
<properties>
|
||||
<parquet.version>1.10.1</parquet.version>
|
||||
<hadoop.version>3.2.2</hadoop.version>
|
||||
<jodd.version>6.0.1</jodd.version>
|
||||
<hamcrest.version>1.3</hamcrest.version>
|
||||
<junit.version>4.13.2</junit.version>
|
||||
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
|
||||
</properties>
|
||||
|
||||
<distributionManagement>
|
||||
<snapshotRepository>
|
||||
<id>talend_nexus_deployment</id>
|
||||
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceSnapshot/</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
</snapshotRepository>
|
||||
<repository>
|
||||
<id>talend_nexus_deployment</id>
|
||||
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceRelease/</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
</releases>
|
||||
</repository>
|
||||
</distributionManagement>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.parquet</groupId>
|
||||
<artifactId>parquet-hadoop</artifactId>
|
||||
<version>${parquet.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-client</artifactId>
|
||||
<version>${hadoop.version}</version>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jodd</groupId>
|
||||
<artifactId>jodd-util</artifactId>
|
||||
<version>${jodd.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>${junit.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-library</artifactId>
|
||||
<version>${hamcrest.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-library</artifactId>
|
||||
<version>${hamcrest.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.8.0</version>
|
||||
<configuration>
|
||||
<source>1.8</source>
|
||||
<target>1.8</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
@@ -0,0 +1,141 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data;
|
||||
|
||||
import org.talend.parquet.data.simple.NanoTime;
|
||||
import org.apache.parquet.io.api.Binary;
|
||||
import org.apache.parquet.io.api.RecordConsumer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
abstract public class Group extends GroupValueSource {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(Group.class);
|
||||
|
||||
public void add(String field, int value) {
|
||||
add(getType().getFieldIndex(field), value);
|
||||
}
|
||||
|
||||
public void add(String field, long value) {
|
||||
add(getType().getFieldIndex(field), value);
|
||||
}
|
||||
|
||||
public void add(String field, float value) {
|
||||
add(getType().getFieldIndex(field), value);
|
||||
}
|
||||
|
||||
public void add(String field, double value) {
|
||||
add(getType().getFieldIndex(field), value);
|
||||
}
|
||||
|
||||
public void add(String field, String value) {
|
||||
add(getType().getFieldIndex(field), value);
|
||||
}
|
||||
|
||||
public void add(String field, NanoTime value) {
|
||||
add(getType().getFieldIndex(field), value);
|
||||
}
|
||||
|
||||
public void add(String field, boolean value) {
|
||||
add(getType().getFieldIndex(field), value);
|
||||
}
|
||||
|
||||
public void add(String field, Binary value) {
|
||||
add(getType().getFieldIndex(field), value);
|
||||
}
|
||||
|
||||
public void add(String field, Group value) {
|
||||
add(getType().getFieldIndex(field), value);
|
||||
}
|
||||
|
||||
public Group addGroup(String field) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("add group {} to {}", field, getType().getName());
|
||||
}
|
||||
return addGroup(getType().getFieldIndex(field));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Group getGroup(String field, int index) {
|
||||
return getGroup(getType().getFieldIndex(field), index);
|
||||
}
|
||||
|
||||
abstract public void add(int fieldIndex, int value);
|
||||
|
||||
abstract public void add(int fieldIndex, long value);
|
||||
|
||||
abstract public void add(int fieldIndex, String value);
|
||||
|
||||
abstract public void add(int fieldIndex, boolean value);
|
||||
|
||||
abstract public void add(int fieldIndex, NanoTime value);
|
||||
|
||||
abstract public void add(int fieldIndex, Binary value);
|
||||
|
||||
abstract public void add(int fieldIndex, float value);
|
||||
|
||||
abstract public void add(int fieldIndex, double value);
|
||||
|
||||
abstract public void add(int fieldIndex, Group value);
|
||||
|
||||
abstract public Group addGroup(int fieldIndex);
|
||||
|
||||
@Override
|
||||
abstract public Group getGroup(int fieldIndex, int index);
|
||||
|
||||
public Group asGroup() {
|
||||
return this;
|
||||
}
|
||||
|
||||
public Group append(String fieldName, int value) {
|
||||
add(fieldName, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Group append(String fieldName, float value) {
|
||||
add(fieldName, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Group append(String fieldName, double value) {
|
||||
add(fieldName, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Group append(String fieldName, long value) {
|
||||
add(fieldName, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Group append(String fieldName, NanoTime value) {
|
||||
add(fieldName, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Group append(String fieldName, String value) {
|
||||
add(fieldName, Binary.fromString(value));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Group append(String fieldName, boolean value) {
|
||||
add(fieldName, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Group append(String fieldName, Binary value) {
|
||||
add(fieldName, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
abstract public void writeValue(int field, int index, RecordConsumer recordConsumer);
|
||||
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data;
|
||||
|
||||
abstract public class GroupFactory {
|
||||
|
||||
abstract public Group newGroup();
|
||||
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data;
|
||||
|
||||
import org.apache.parquet.io.api.Binary;
|
||||
import org.apache.parquet.schema.GroupType;
|
||||
|
||||
abstract public class GroupValueSource {
|
||||
|
||||
public int getFieldRepetitionCount(String field) {
|
||||
return getFieldRepetitionCount(getType().getFieldIndex(field));
|
||||
}
|
||||
|
||||
public GroupValueSource getGroup(String field, int index) {
|
||||
return getGroup(getType().getFieldIndex(field), index);
|
||||
}
|
||||
|
||||
public String getString(String field, int index) {
|
||||
return getString(getType().getFieldIndex(field), index);
|
||||
}
|
||||
|
||||
public int getInteger(String field, int index) {
|
||||
return getInteger(getType().getFieldIndex(field), index);
|
||||
}
|
||||
|
||||
public long getLong(String field, int index) {
|
||||
return getLong(getType().getFieldIndex(field), index);
|
||||
}
|
||||
|
||||
public double getDouble(String field, int index) {
|
||||
return getDouble(getType().getFieldIndex(field), index);
|
||||
}
|
||||
|
||||
public float getFloat(String field, int index) {
|
||||
return getFloat(getType().getFieldIndex(field), index);
|
||||
}
|
||||
|
||||
public boolean getBoolean(String field, int index) {
|
||||
return getBoolean(getType().getFieldIndex(field), index);
|
||||
}
|
||||
|
||||
public Binary getBinary(String field, int index) {
|
||||
return getBinary(getType().getFieldIndex(field), index);
|
||||
}
|
||||
|
||||
public Binary getInt96(String field, int index) {
|
||||
return getInt96(getType().getFieldIndex(field), index);
|
||||
}
|
||||
|
||||
abstract public int getFieldRepetitionCount(int fieldIndex);
|
||||
|
||||
abstract public GroupValueSource getGroup(int fieldIndex, int index);
|
||||
|
||||
abstract public String getString(int fieldIndex, int index);
|
||||
|
||||
abstract public Integer getInteger(int fieldIndex, int index);
|
||||
|
||||
abstract public Long getLong(int fieldIndex, int index);
|
||||
|
||||
abstract public Double getDouble(int fieldIndex, int index);
|
||||
|
||||
abstract public Float getFloat(int fieldIndex, int index);
|
||||
|
||||
abstract public Boolean getBoolean(int fieldIndex, int index);
|
||||
|
||||
abstract public Binary getBinary(int fieldIndex, int index);
|
||||
|
||||
abstract public Binary getInt96(int fieldIndex, int index);
|
||||
|
||||
abstract public String getValueToString(int fieldIndex, int index);
|
||||
|
||||
abstract public GroupType getType();
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data;
|
||||
|
||||
import org.apache.parquet.io.api.RecordConsumer;
|
||||
import org.apache.parquet.schema.GroupType;
|
||||
import org.apache.parquet.schema.Type;
|
||||
|
||||
public class GroupWriter {
|
||||
|
||||
private final RecordConsumer recordConsumer;
|
||||
private final GroupType schema;
|
||||
|
||||
public GroupWriter(RecordConsumer recordConsumer, GroupType schema) {
|
||||
this.recordConsumer = recordConsumer;
|
||||
this.schema = schema;
|
||||
}
|
||||
|
||||
public void write(Group group) {
|
||||
recordConsumer.startMessage();
|
||||
writeGroup(group, schema);
|
||||
recordConsumer.endMessage();
|
||||
}
|
||||
|
||||
private void writeGroup(Group group, GroupType type) {
|
||||
int fieldCount = type.getFieldCount();
|
||||
for (int field = 0; field < fieldCount; ++field) {
|
||||
int valueCount = group.getFieldRepetitionCount(field);
|
||||
if (valueCount > 0) {
|
||||
Type fieldType = type.getType(field);
|
||||
String fieldName = fieldType.getName();
|
||||
recordConsumer.startField(fieldName, field);
|
||||
for (int index = 0; index < valueCount; ++index) {
|
||||
if (fieldType.isPrimitive()) {
|
||||
group.writeValue(field, index, recordConsumer);
|
||||
} else {
|
||||
recordConsumer.startGroup();
|
||||
writeGroup(group.getGroup(field, index), fieldType.asGroupType());
|
||||
recordConsumer.endGroup();
|
||||
}
|
||||
}
|
||||
recordConsumer.endField(fieldName, field);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple;
|
||||
|
||||
import org.apache.parquet.io.api.Binary;
|
||||
import org.apache.parquet.io.api.RecordConsumer;
|
||||
|
||||
public class BinaryValue extends Primitive {
|
||||
|
||||
private final Binary binary;
|
||||
|
||||
public BinaryValue(Binary binary) {
|
||||
this.binary = binary;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Binary getBinary() {
|
||||
return binary;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getString() {
|
||||
return binary.toStringUsingUTF8();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(RecordConsumer recordConsumer) {
|
||||
recordConsumer.addBinary(binary);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple;
|
||||
|
||||
import org.apache.parquet.io.api.RecordConsumer;
|
||||
|
||||
public class BooleanValue extends Primitive {
|
||||
|
||||
private final boolean bool;
|
||||
|
||||
public BooleanValue(boolean bool) {
|
||||
this.bool = bool;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.valueOf(bool);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean getBoolean() {
|
||||
return bool;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(RecordConsumer recordConsumer) {
|
||||
recordConsumer.addBoolean(bool);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple;
|
||||
|
||||
import org.apache.parquet.io.api.RecordConsumer;
|
||||
|
||||
public class DoubleValue extends Primitive {
|
||||
|
||||
private final double value;
|
||||
|
||||
public DoubleValue(double value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getDouble() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(RecordConsumer recordConsumer) {
|
||||
recordConsumer.addDouble(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.valueOf(value);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple;
|
||||
|
||||
import org.apache.parquet.io.api.RecordConsumer;
|
||||
|
||||
public class FloatValue extends Primitive {
|
||||
|
||||
private final float value;
|
||||
|
||||
public FloatValue(float value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getFloat() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(RecordConsumer recordConsumer) {
|
||||
recordConsumer.addFloat(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.valueOf(value);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple;
|
||||
|
||||
import org.apache.parquet.io.api.Binary;
|
||||
import org.apache.parquet.io.api.RecordConsumer;
|
||||
|
||||
public class Int96Value extends Primitive {
|
||||
|
||||
private final Binary value;
|
||||
|
||||
public Int96Value(Binary value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Binary getInt96() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(RecordConsumer recordConsumer) {
|
||||
recordConsumer.addBinary(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Int96Value{" + String.valueOf(value) + "}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple;
|
||||
|
||||
import org.apache.parquet.io.api.RecordConsumer;
|
||||
|
||||
public class IntegerValue extends Primitive {
|
||||
|
||||
private final int value;
|
||||
|
||||
public IntegerValue(int value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.valueOf(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getInteger() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(RecordConsumer recordConsumer) {
|
||||
recordConsumer.addInteger(value);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple;
|
||||
|
||||
import org.apache.parquet.io.api.RecordConsumer;
|
||||
|
||||
public class LongValue extends Primitive {
|
||||
|
||||
private final long value;
|
||||
|
||||
public LongValue(long value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.valueOf(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLong() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(RecordConsumer recordConsumer) {
|
||||
recordConsumer.addLong(value);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import org.apache.parquet.Preconditions;
|
||||
import org.apache.parquet.io.api.Binary;
|
||||
import org.apache.parquet.io.api.RecordConsumer;
|
||||
|
||||
public class NanoTime extends Primitive {
|
||||
private final int julianDay;
|
||||
private final long timeOfDayNanos;
|
||||
|
||||
public static NanoTime fromBinary(Binary bytes) {
|
||||
Preconditions.checkArgument(bytes.length() == 12, "Must be 12 bytes");
|
||||
ByteBuffer buf = bytes.toByteBuffer();
|
||||
buf.order(ByteOrder.LITTLE_ENDIAN);
|
||||
long timeOfDayNanos = buf.getLong();
|
||||
int julianDay = buf.getInt();
|
||||
return new NanoTime(julianDay, timeOfDayNanos);
|
||||
}
|
||||
|
||||
public static NanoTime fromInt96(Int96Value int96) {
|
||||
ByteBuffer buf = int96.getInt96().toByteBuffer();
|
||||
return new NanoTime(buf.getInt(), buf.getLong());
|
||||
}
|
||||
|
||||
public NanoTime(int julianDay, long timeOfDayNanos) {
|
||||
this.julianDay = julianDay;
|
||||
this.timeOfDayNanos = timeOfDayNanos;
|
||||
}
|
||||
|
||||
public int getJulianDay() {
|
||||
return julianDay;
|
||||
}
|
||||
|
||||
public long getTimeOfDayNanos() {
|
||||
return timeOfDayNanos;
|
||||
}
|
||||
|
||||
public Binary toBinary() {
|
||||
ByteBuffer buf = ByteBuffer.allocate(12);
|
||||
buf.order(ByteOrder.LITTLE_ENDIAN);
|
||||
buf.putLong(timeOfDayNanos);
|
||||
buf.putInt(julianDay);
|
||||
buf.flip();
|
||||
return Binary.fromConstantByteBuffer(buf);
|
||||
}
|
||||
|
||||
public Int96Value toInt96() {
|
||||
return new Int96Value(toBinary());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(RecordConsumer recordConsumer) {
|
||||
recordConsumer.addBinary(toBinary());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "NanoTime{julianDay=" + julianDay + ", timeOfDayNanos=" + timeOfDayNanos + "}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple;
|
||||
|
||||
import org.apache.parquet.io.api.Binary;
|
||||
import org.apache.parquet.io.api.RecordConsumer;
|
||||
|
||||
public abstract class Primitive {
|
||||
|
||||
public String getString() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public int getInteger() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public long getLong() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public boolean getBoolean() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public Binary getBinary() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public Binary getInt96() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public float getFloat() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public double getDouble() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
abstract public void writeValue(RecordConsumer recordConsumer);
|
||||
|
||||
}
|
||||
@@ -0,0 +1,274 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.talend.parquet.data.Group;
|
||||
import org.apache.parquet.io.api.Binary;
|
||||
import org.apache.parquet.io.api.RecordConsumer;
|
||||
import org.apache.parquet.schema.GroupType;
|
||||
import org.apache.parquet.schema.Type;
|
||||
|
||||
public class SimpleGroup extends Group {
|
||||
|
||||
private final GroupType schema;
|
||||
private final List<Object>[] data;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public SimpleGroup(GroupType schema) {
|
||||
this.schema = schema;
|
||||
this.data = new List[schema.getFields().size()];
|
||||
for (int i = 0; i < schema.getFieldCount(); i++) {
|
||||
this.data[i] = new ArrayList<>();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return toString("");
|
||||
}
|
||||
|
||||
private StringBuilder appendToString(StringBuilder builder, String indent) {
|
||||
int i = 0;
|
||||
for (Type field : schema.getFields()) {
|
||||
String name = field.getName();
|
||||
List<Object> values = data[i];
|
||||
++i;
|
||||
if (values != null && !values.isEmpty()) {
|
||||
for (Object value : values) {
|
||||
builder.append(indent).append(name);
|
||||
if (value == null) {
|
||||
builder.append(": NULL\n");
|
||||
} else if (value instanceof Group) {
|
||||
builder.append('\n');
|
||||
((SimpleGroup) value).appendToString(builder, indent + " ");
|
||||
} else {
|
||||
builder.append(": ").append(value.toString()).append('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
public String toString(String indent) {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
appendToString(builder, indent);
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Group addGroup(int fieldIndex) {
|
||||
SimpleGroup g = new SimpleGroup(schema.getType(fieldIndex).asGroupType());
|
||||
add(fieldIndex, g);
|
||||
return g;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Group getGroup(int fieldIndex, int index) {
|
||||
return (Group) getValue(fieldIndex, index);
|
||||
}
|
||||
|
||||
private Object getValue(int fieldIndex, int index) {
|
||||
List<Object> list;
|
||||
try {
|
||||
list = data[fieldIndex];
|
||||
} catch (IndexOutOfBoundsException e) {
|
||||
throw new RuntimeException(
|
||||
"not found " + fieldIndex + "(" + schema.getFieldName(fieldIndex) + ") in group:\n" + this);
|
||||
}
|
||||
try {
|
||||
if(list == null || list.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return list.get(index);
|
||||
} catch (IndexOutOfBoundsException e) {
|
||||
throw new RuntimeException("not found " + fieldIndex + "(" + schema.getFieldName(fieldIndex)
|
||||
+ ") element number " + index + " in group:\n" + this);
|
||||
}
|
||||
}
|
||||
|
||||
private void add(int fieldIndex, Primitive value) {
|
||||
Type type = schema.getType(fieldIndex);
|
||||
List<Object> list = data[fieldIndex];
|
||||
if (!type.isRepetition(Type.Repetition.REPEATED) && !list.isEmpty()) {
|
||||
throw new IllegalStateException(
|
||||
"field " + fieldIndex + " (" + type.getName() + ") can not have more than one value: " + list);
|
||||
}
|
||||
list.add(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getFieldRepetitionCount(int fieldIndex) {
|
||||
List<Object> list = data[fieldIndex];
|
||||
return list == null ? 0 : list.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getValueToString(int fieldIndex, int index) {
|
||||
Object value = getValue(fieldIndex, index);
|
||||
if(value == null) {
|
||||
return null;
|
||||
}
|
||||
return String.valueOf(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getString(int fieldIndex, int index) {
|
||||
Object value = getValue(fieldIndex, index);
|
||||
if(value == null) {
|
||||
return null;
|
||||
}
|
||||
return ((BinaryValue) value).getString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getInteger(int fieldIndex, int index) {
|
||||
Object value = getValue(fieldIndex, index);
|
||||
if(value == null) {
|
||||
return null;
|
||||
}
|
||||
return ((IntegerValue)value).getInteger();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getLong(int fieldIndex, int index) {
|
||||
Object value = getValue(fieldIndex, index);
|
||||
if(value == null) {
|
||||
return null;
|
||||
}
|
||||
return ((LongValue)value).getLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Double getDouble(int fieldIndex, int index) {
|
||||
Object value = getValue(fieldIndex, index);
|
||||
if(value == null) {
|
||||
return null;
|
||||
}
|
||||
return ((DoubleValue)value).getDouble();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Float getFloat(int fieldIndex, int index) {
|
||||
Object value = getValue(fieldIndex, index);
|
||||
if(value == null) {
|
||||
return null;
|
||||
}
|
||||
return ((FloatValue)value).getFloat();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean getBoolean(int fieldIndex, int index) {
|
||||
Object value = getValue(fieldIndex, index);
|
||||
if(value == null) {
|
||||
return null;
|
||||
}
|
||||
return ((BooleanValue) value).getBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Binary getBinary(int fieldIndex, int index) {
|
||||
Object value = getValue(fieldIndex, index);
|
||||
if(value == null) {
|
||||
return null;
|
||||
}
|
||||
return ((BinaryValue) value).getBinary();
|
||||
}
|
||||
|
||||
public NanoTime getTimeNanos(int fieldIndex, int index) {
|
||||
Object value = getValue(fieldIndex, index);
|
||||
if(value == null) {
|
||||
return null;
|
||||
}
|
||||
return NanoTime.fromInt96((Int96Value) value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Binary getInt96(int fieldIndex, int index) {
|
||||
Object value = getValue(fieldIndex, index);
|
||||
if(value == null) {
|
||||
return null;
|
||||
}
|
||||
return ((Int96Value) value).getInt96();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int fieldIndex, int value) {
|
||||
add(fieldIndex, new IntegerValue(value));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int fieldIndex, long value) {
|
||||
add(fieldIndex, new LongValue(value));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int fieldIndex, String value) {
|
||||
add(fieldIndex, new BinaryValue(Binary.fromString(value)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int fieldIndex, NanoTime value) {
|
||||
add(fieldIndex, value.toInt96());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int fieldIndex, boolean value) {
|
||||
add(fieldIndex, new BooleanValue(value));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int fieldIndex, Binary value) {
|
||||
switch (getType().getType(fieldIndex).asPrimitiveType().getPrimitiveTypeName()) {
|
||||
case BINARY:
|
||||
case FIXED_LEN_BYTE_ARRAY:
|
||||
add(fieldIndex, new BinaryValue(value));
|
||||
break;
|
||||
case INT96:
|
||||
add(fieldIndex, new Int96Value(value));
|
||||
break;
|
||||
default:
|
||||
throw new UnsupportedOperationException(
|
||||
getType().asPrimitiveType().getName() + " not supported for Binary");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int fieldIndex, float value) {
|
||||
add(fieldIndex, new FloatValue(value));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int fieldIndex, double value) {
|
||||
add(fieldIndex, new DoubleValue(value));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int fieldIndex, Group value) {
|
||||
data[fieldIndex].add(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GroupType getType() {
|
||||
return schema;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(int field, int index, RecordConsumer recordConsumer) {
|
||||
((Primitive) getValue(field, index)).writeValue(recordConsumer);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple;
|
||||
|
||||
import org.talend.parquet.data.Group;
|
||||
import org.talend.parquet.data.GroupFactory;
|
||||
import org.apache.parquet.schema.MessageType;
|
||||
|
||||
public class SimpleGroupFactory extends GroupFactory {
|
||||
|
||||
private final MessageType schema;
|
||||
|
||||
public SimpleGroupFactory(MessageType schema) {
|
||||
this.schema = schema;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Group newGroup() {
|
||||
return new SimpleGroup(schema);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple.convert;
|
||||
|
||||
import org.talend.parquet.data.Group;
|
||||
import org.talend.parquet.data.simple.SimpleGroupFactory;
|
||||
import org.apache.parquet.io.api.GroupConverter;
|
||||
import org.apache.parquet.io.api.RecordMaterializer;
|
||||
import org.apache.parquet.schema.MessageType;
|
||||
|
||||
public class GroupRecordConverter extends RecordMaterializer<Group> {
|
||||
|
||||
private final SimpleGroupFactory simpleGroupFactory;
|
||||
|
||||
private SimpleGroupConverter root;
|
||||
|
||||
public GroupRecordConverter(MessageType schema) {
|
||||
this.simpleGroupFactory = new SimpleGroupFactory(schema);
|
||||
this.root = new SimpleGroupConverter(null, 0, schema) {
|
||||
@Override
|
||||
public void start() {
|
||||
this.current = simpleGroupFactory.newGroup();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void end() {
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Group getCurrentRecord() {
|
||||
return root.getCurrentRecord();
|
||||
}
|
||||
|
||||
@Override
|
||||
public GroupConverter getRootConverter() {
|
||||
return root;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple.convert;
|
||||
|
||||
import org.talend.parquet.data.Group;
|
||||
import org.apache.parquet.io.api.Converter;
|
||||
import org.apache.parquet.io.api.GroupConverter;
|
||||
import org.apache.parquet.schema.GroupType;
|
||||
import org.apache.parquet.schema.Type;
|
||||
|
||||
class SimpleGroupConverter extends GroupConverter {
|
||||
private final SimpleGroupConverter parent;
|
||||
private final int index;
|
||||
protected Group current;
|
||||
private Converter[] converters;
|
||||
|
||||
SimpleGroupConverter(SimpleGroupConverter parent, int index, GroupType schema) {
|
||||
this.parent = parent;
|
||||
this.index = index;
|
||||
|
||||
converters = new Converter[schema.getFieldCount()];
|
||||
|
||||
for (int i = 0; i < converters.length; i++) {
|
||||
final Type type = schema.getType(i);
|
||||
if (type.isPrimitive()) {
|
||||
converters[i] = new SimplePrimitiveConverter(this, i);
|
||||
} else {
|
||||
converters[i] = new SimpleGroupConverter(this, i, type.asGroupType());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start() {
|
||||
current = parent.getCurrentRecord().addGroup(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Converter getConverter(int fieldIndex) {
|
||||
return converters[fieldIndex];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void end() {
|
||||
}
|
||||
|
||||
public Group getCurrentRecord() {
|
||||
return current;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,88 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.data.simple.convert;
|
||||
|
||||
import org.apache.parquet.io.api.Binary;
|
||||
import org.apache.parquet.io.api.PrimitiveConverter;
|
||||
|
||||
class SimplePrimitiveConverter extends PrimitiveConverter {
|
||||
|
||||
private final SimpleGroupConverter parent;
|
||||
private final int index;
|
||||
|
||||
SimplePrimitiveConverter(SimpleGroupConverter parent, int index) {
|
||||
this.parent = parent;
|
||||
this.index = index;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see org.apache.parquet.io.api.PrimitiveConverter#addBinary(Binary)
|
||||
*/
|
||||
@Override
|
||||
public void addBinary(Binary value) {
|
||||
parent.getCurrentRecord().add(index, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see org.apache.parquet.io.api.PrimitiveConverter#addBoolean(boolean)
|
||||
*/
|
||||
@Override
|
||||
public void addBoolean(boolean value) {
|
||||
parent.getCurrentRecord().add(index, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see org.apache.parquet.io.api.PrimitiveConverter#addDouble(double)
|
||||
*/
|
||||
@Override
|
||||
public void addDouble(double value) {
|
||||
parent.getCurrentRecord().add(index, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see org.apache.parquet.io.api.PrimitiveConverter#addFloat(float)
|
||||
*/
|
||||
@Override
|
||||
public void addFloat(float value) {
|
||||
parent.getCurrentRecord().add(index, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see org.apache.parquet.io.api.PrimitiveConverter#addInt(int)
|
||||
*/
|
||||
@Override
|
||||
public void addInt(int value) {
|
||||
parent.getCurrentRecord().add(index, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see org.apache.parquet.io.api.PrimitiveConverter#addLong(long)
|
||||
*/
|
||||
@Override
|
||||
public void addLong(long value) {
|
||||
parent.getCurrentRecord().add(index, value);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.hadoop;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.parquet.hadoop.api.ReadSupport;
|
||||
import org.apache.parquet.io.api.RecordMaterializer;
|
||||
import org.apache.parquet.schema.MessageType;
|
||||
import org.talend.parquet.data.Group;
|
||||
import org.talend.parquet.data.simple.convert.GroupRecordConverter;
|
||||
|
||||
public class TalendGroupReadSupport extends ReadSupport<Group> {
|
||||
|
||||
@Override
|
||||
public org.apache.parquet.hadoop.api.ReadSupport.ReadContext init(Configuration configuration,
|
||||
Map<String, String> keyValueMetaData, MessageType fileSchema) {
|
||||
String partialSchemaString = configuration.get(ReadSupport.PARQUET_READ_SCHEMA);
|
||||
MessageType requestedProjection = getSchemaForRead(fileSchema, partialSchemaString);
|
||||
return new ReadContext(requestedProjection);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RecordMaterializer<Group> prepareForRead(Configuration configuration, Map<String, String> keyValueMetaData,
|
||||
MessageType fileSchema, org.apache.parquet.hadoop.api.ReadSupport.ReadContext readContext) {
|
||||
return new GroupRecordConverter(readContext.getRequestedSchema());
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,81 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.hadoop;
|
||||
|
||||
import static org.apache.parquet.schema.MessageTypeParser.parseMessageType;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.parquet.hadoop.api.WriteSupport;
|
||||
import org.apache.parquet.io.api.RecordConsumer;
|
||||
import org.apache.parquet.schema.MessageType;
|
||||
import org.talend.parquet.data.Group;
|
||||
import org.talend.parquet.data.GroupWriter;
|
||||
|
||||
public class TalendGroupWriteSupport extends WriteSupport<Group> {
|
||||
|
||||
public static final String PARQUET_SCHEMA = "parquet.talend.schema";
|
||||
|
||||
public static void setSchema(MessageType schema, Configuration configuration) {
|
||||
configuration.set(PARQUET_SCHEMA, schema.toString());
|
||||
}
|
||||
|
||||
public static MessageType getSchema(Configuration configuration) {
|
||||
return parseMessageType(Objects.requireNonNull(configuration.get(PARQUET_SCHEMA), PARQUET_SCHEMA));
|
||||
}
|
||||
|
||||
private MessageType schema;
|
||||
private GroupWriter groupWriter;
|
||||
private Map<String, String> extraMetaData;
|
||||
|
||||
public TalendGroupWriteSupport() {
|
||||
this(null, new HashMap<String, String>());
|
||||
}
|
||||
|
||||
TalendGroupWriteSupport(MessageType schema) {
|
||||
this(schema, new HashMap<String, String>());
|
||||
}
|
||||
|
||||
TalendGroupWriteSupport(MessageType schema, Map<String, String> extraMetaData) {
|
||||
this.schema = schema;
|
||||
this.extraMetaData = extraMetaData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "Talend";
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.apache.parquet.hadoop.api.WriteSupport.WriteContext init(Configuration configuration) {
|
||||
// if present, prefer the schema passed to the constructor
|
||||
if (schema == null) {
|
||||
schema = getSchema(configuration);
|
||||
}
|
||||
return new WriteContext(schema, this.extraMetaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void prepareForWrite(RecordConsumer recordConsumer) {
|
||||
groupWriter = new GroupWriter(recordConsumer, schema);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(Group record) {
|
||||
groupWriter.write(record);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.hadoop;
|
||||
|
||||
import org.apache.parquet.hadoop.ParquetInputFormat;
|
||||
import org.talend.parquet.data.Group;
|
||||
|
||||
/**
|
||||
* Example input format to read Parquet files
|
||||
*
|
||||
* This Input format uses a rather inefficient data model but works
|
||||
* independently of higher level abstractions.
|
||||
*/
|
||||
public class TalendInputFormat extends ParquetInputFormat<Group> {
|
||||
|
||||
public TalendInputFormat() {
|
||||
super(TalendGroupReadSupport.class);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.hadoop;
|
||||
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.parquet.hadoop.ParquetOutputFormat;
|
||||
import org.apache.parquet.hadoop.util.ContextUtil;
|
||||
import org.apache.parquet.schema.MessageType;
|
||||
import org.talend.parquet.data.Group;
|
||||
|
||||
/**
|
||||
* An example output format
|
||||
*
|
||||
* must be provided the schema up front
|
||||
*
|
||||
* @see TalendOutputFormat#setSchema(Job, MessageType)
|
||||
* @see TalendGroupWriteSupport#PARQUET_SCHEMA
|
||||
*/
|
||||
public class TalendOutputFormat extends ParquetOutputFormat<Group> {
|
||||
|
||||
/**
|
||||
* set the schema being written to the job conf
|
||||
*
|
||||
* @param job a job
|
||||
* @param schema the schema of the data
|
||||
*/
|
||||
public static void setSchema(Job job, MessageType schema) {
|
||||
TalendGroupWriteSupport.setSchema(schema, ContextUtil.getConfiguration(job));
|
||||
}
|
||||
|
||||
/**
|
||||
* retrieve the schema from the conf
|
||||
*
|
||||
* @param job a job
|
||||
* @return the schema
|
||||
*/
|
||||
public static MessageType getSchema(Job job) {
|
||||
return TalendGroupWriteSupport.getSchema(ContextUtil.getConfiguration(job));
|
||||
}
|
||||
|
||||
public TalendOutputFormat() {
|
||||
super(new TalendGroupWriteSupport());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,108 @@
|
||||
/*
|
||||
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.talend.parquet.hadoop;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.parquet.column.ParquetProperties;
|
||||
import org.apache.parquet.hadoop.ParquetWriter;
|
||||
import org.apache.parquet.hadoop.api.WriteSupport;
|
||||
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
|
||||
import org.apache.parquet.io.OutputFile;
|
||||
import org.apache.parquet.schema.MessageType;
|
||||
import org.talend.parquet.data.Group;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* An example file writer class.
|
||||
*/
|
||||
public class TalendParquetWriter extends ParquetWriter<Group> {
|
||||
|
||||
/**
|
||||
* Creates a Builder for configuring ParquetWriter with the example object
|
||||
*
|
||||
* @param file the output file to create
|
||||
* @return a {@link Builder} to create a {@link ParquetWriter}
|
||||
*/
|
||||
public static Builder builder(Path file) {
|
||||
return new Builder(file);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Builder for configuring ParquetWriter with the example object
|
||||
*
|
||||
* @param file the output file to create
|
||||
* @return a {@link Builder} to create a {@link ParquetWriter}
|
||||
*/
|
||||
public static Builder builder(OutputFile file) {
|
||||
return new Builder(file);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link TalendParquetWriter}.
|
||||
*
|
||||
* @param file The file name to write to.
|
||||
* @param writeSupport The schema to write with.
|
||||
* @param compressionCodecName Compression code to use, or
|
||||
* CompressionCodecName.UNCOMPRESSED
|
||||
* @param blockSize the block size threshold.
|
||||
* @param pageSize See parquet write up. Blocks are subdivided into
|
||||
* pages for alignment and other purposes.
|
||||
* @param enableDictionary Whether to use a dictionary to compress columns.
|
||||
* @param conf The Configuration to use.
|
||||
* @throws IOException
|
||||
*/
|
||||
TalendParquetWriter(Path file, WriteSupport<Group> writeSupport, CompressionCodecName compressionCodecName,
|
||||
int blockSize, int pageSize, boolean enableDictionary, boolean enableValidation,
|
||||
ParquetProperties.WriterVersion writerVersion, Configuration conf) throws IOException {
|
||||
super(file, writeSupport, compressionCodecName, blockSize, pageSize, pageSize, enableDictionary,
|
||||
enableValidation, writerVersion, conf);
|
||||
}
|
||||
|
||||
public static class Builder extends ParquetWriter.Builder<Group, Builder> {
|
||||
private MessageType type = null;
|
||||
private Map<String, String> extraMetaData = new HashMap<String, String>();
|
||||
|
||||
private Builder(Path file) {
|
||||
super(file);
|
||||
}
|
||||
|
||||
private Builder(OutputFile file) {
|
||||
super(file);
|
||||
}
|
||||
|
||||
public Builder withType(MessageType type) {
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder withExtraMetaData(Map<String, String> extraMetaData) {
|
||||
this.extraMetaData = extraMetaData;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Builder self() {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected WriteSupport<Group> getWriteSupport(Configuration conf) {
|
||||
return new TalendGroupWriteSupport(type, extraMetaData);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,131 @@
|
||||
package org.talend.parquet.utils;
|
||||
|
||||
import java.sql.Timestamp;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.Calendar;
|
||||
import java.util.GregorianCalendar;
|
||||
import java.util.TimeZone;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.talend.parquet.data.simple.NanoTime;
|
||||
|
||||
import jodd.time.JulianDate;
|
||||
|
||||
public class NanoTimeUtils {
|
||||
|
||||
/**
|
||||
* Number of days between Julian day epoch (January 1, 4713 BC) and Unix day
|
||||
* epoch (January 1, 1970). The value of this constant is {@value}.
|
||||
*/
|
||||
public static final long JULIAN_EPOCH_OFFSET_DAYS = 2440588;
|
||||
private static final long MILLIS_IN_DAY = TimeUnit.DAYS.toMillis(1);
|
||||
private static final long NANOS_PER_MILLISECOND = TimeUnit.MILLISECONDS.toNanos(1);
|
||||
|
||||
static final long NANOS_PER_HOUR = java.util.concurrent.TimeUnit.HOURS.toNanos(1);
|
||||
static final long NANOS_PER_MINUTE = java.util.concurrent.TimeUnit.MINUTES.toNanos(1);
|
||||
static final long NANOS_PER_SECOND = java.util.concurrent.TimeUnit.SECONDS.toNanos(1);
|
||||
static final long NANOS_PER_DAY = java.util.concurrent.TimeUnit.DAYS.toNanos(1);
|
||||
|
||||
private static final ThreadLocal<java.util.Calendar> parquetGMTCalendar = new ThreadLocal<Calendar>();
|
||||
private static final ThreadLocal<Calendar> parquetLocalCalendar = new ThreadLocal<Calendar>();
|
||||
|
||||
private static Calendar getGMTCalendar() {
|
||||
// Calendar.getInstance calculates the current-time needlessly, so cache
|
||||
// an instance.
|
||||
if (parquetGMTCalendar.get() == null) {
|
||||
parquetGMTCalendar.set(Calendar.getInstance(TimeZone.getTimeZone("GMT")));
|
||||
}
|
||||
return parquetGMTCalendar.get();
|
||||
}
|
||||
|
||||
private static Calendar getLocalCalendar() {
|
||||
if (parquetLocalCalendar.get() == null) {
|
||||
parquetLocalCalendar.set(Calendar.getInstance());
|
||||
}
|
||||
return parquetLocalCalendar.get();
|
||||
}
|
||||
|
||||
private static Calendar getCalendar(boolean skipConversion) {
|
||||
Calendar calendar = skipConversion ? getLocalCalendar() : getGMTCalendar();
|
||||
calendar.clear();
|
||||
return calendar;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a timestamp to NanoTime.
|
||||
*/
|
||||
public static NanoTime getNanoTime(Timestamp ts, boolean skipConversion) {
|
||||
|
||||
Calendar calendar = getCalendar(skipConversion);
|
||||
calendar.setTimeInMillis(ts.getTime());
|
||||
int year = calendar.get(Calendar.YEAR);
|
||||
if (calendar.get(Calendar.ERA) == GregorianCalendar.BC) {
|
||||
year = 1 - year;
|
||||
}
|
||||
JulianDate jDateTime;
|
||||
jDateTime = JulianDate.of(year, calendar.get(Calendar.MONTH) + 1, // java calendar index starting at 1.
|
||||
calendar.get(Calendar.DAY_OF_MONTH), 0, 0, 0, 0);
|
||||
int days = jDateTime.getJulianDayNumber();
|
||||
|
||||
long hour = calendar.get(Calendar.HOUR_OF_DAY);
|
||||
long minute = calendar.get(Calendar.MINUTE);
|
||||
long second = calendar.get(Calendar.SECOND);
|
||||
long nanos = ts.getNanos();
|
||||
long nanosOfDay = nanos + NANOS_PER_SECOND * second + NANOS_PER_MINUTE * minute + NANOS_PER_HOUR * hour;
|
||||
|
||||
return new NanoTime(days, nanosOfDay);
|
||||
}
|
||||
|
||||
public static Timestamp getTimestamp(NanoTime nt, boolean skipConversion) {
|
||||
int julianDay = nt.getJulianDay();
|
||||
long nanosOfDay = nt.getTimeOfDayNanos();
|
||||
|
||||
long remainder = nanosOfDay;
|
||||
julianDay += remainder / NANOS_PER_DAY;
|
||||
remainder %= NANOS_PER_DAY;
|
||||
if (remainder < 0) {
|
||||
remainder += NANOS_PER_DAY;
|
||||
julianDay--;
|
||||
}
|
||||
|
||||
JulianDate jDateTime = new JulianDate((double) julianDay);
|
||||
LocalDateTime datetime = jDateTime.toLocalDateTime();
|
||||
Calendar calendar = getCalendar(skipConversion);
|
||||
calendar.set(Calendar.YEAR, datetime.getYear());
|
||||
calendar.set(Calendar.MONTH, datetime.getMonthValue() - 1);
|
||||
calendar.set(Calendar.DAY_OF_MONTH, datetime.getYear());
|
||||
|
||||
int hour = (int) (remainder / (NANOS_PER_HOUR));
|
||||
remainder = remainder % (NANOS_PER_HOUR);
|
||||
int minutes = (int) (remainder / (NANOS_PER_MINUTE));
|
||||
remainder = remainder % (NANOS_PER_MINUTE);
|
||||
int seconds = (int) (remainder / (NANOS_PER_SECOND));
|
||||
long nanos = remainder % NANOS_PER_SECOND;
|
||||
|
||||
calendar.set(Calendar.HOUR_OF_DAY, hour);
|
||||
calendar.set(Calendar.MINUTE, minutes);
|
||||
calendar.set(Calendar.SECOND, seconds);
|
||||
Timestamp ts = new Timestamp(calendar.getTimeInMillis());
|
||||
ts.setNanos((int) nanos);
|
||||
return ts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns timestamp millis from NanoTime type value.
|
||||
*
|
||||
* @param nt NanoTime value
|
||||
* @return timestamp in millis
|
||||
*/
|
||||
public static long getTimestampMillis(NanoTime nt) {
|
||||
long timeOfDayNanos = nt.getTimeOfDayNanos();
|
||||
int julianDay = nt.getJulianDay();
|
||||
|
||||
return (julianDay - JULIAN_EPOCH_OFFSET_DAYS) * MILLIS_IN_DAY + (timeOfDayNanos / NANOS_PER_MILLISECOND);
|
||||
}
|
||||
|
||||
public static Timestamp getTimestamp(NanoTime nt) {
|
||||
Timestamp ts = new Timestamp(getTimestampMillis(nt));
|
||||
ts.setNanos((int) (nt.getTimeOfDayNanos() % 1000000000));
|
||||
return ts;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,231 @@
|
||||
package org.talend.parquet.utils;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.math.RoundingMode;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.parquet.io.api.Binary;
|
||||
import org.apache.parquet.schema.DecimalMetadata;
|
||||
import org.apache.parquet.schema.GroupType;
|
||||
import org.apache.parquet.schema.OriginalType;
|
||||
import org.apache.parquet.schema.PrimitiveType;
|
||||
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
|
||||
import org.apache.parquet.schema.Type;
|
||||
import org.apache.parquet.schema.Type.Repetition;
|
||||
import org.apache.parquet.schema.Types;
|
||||
import org.apache.parquet.schema.Types.GroupBuilder;
|
||||
import org.talend.parquet.data.Group;
|
||||
import org.talend.parquet.data.simple.NanoTime;
|
||||
|
||||
public class TalendParquetUtils {
|
||||
|
||||
public static final String ARRAY_FIELD_NAME = "array";
|
||||
|
||||
public static PrimitiveType createPrimitiveType(String fieldName, boolean nullable, String primitiveType,
|
||||
String originalTypeName) {
|
||||
OriginalType originalType = null;
|
||||
if (originalTypeName != null) {
|
||||
originalType = OriginalType.valueOf(originalTypeName);
|
||||
}
|
||||
return new PrimitiveType((nullable ? Repetition.OPTIONAL : Repetition.REQUIRED),
|
||||
PrimitiveTypeName.valueOf(primitiveType), fieldName, originalType);
|
||||
}
|
||||
|
||||
public static PrimitiveType createDecimalType(String fieldName, boolean nullable, int precision, int scale) {
|
||||
DecimalMetadata decimalMetadata = new DecimalMetadata(precision, scale);
|
||||
return new PrimitiveType((nullable ? Repetition.OPTIONAL : Repetition.REQUIRED),
|
||||
PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY, 16, fieldName, OriginalType.DECIMAL, decimalMetadata, null);
|
||||
}
|
||||
|
||||
public static Type createGroupElementType(String fieldName, Object element) {
|
||||
if (element == null) {
|
||||
return Types.repeated(PrimitiveTypeName.BINARY).as(OriginalType.UTF8).named(fieldName);
|
||||
}
|
||||
if (String.class.isInstance(element)) {
|
||||
return Types.repeated(PrimitiveTypeName.BINARY).as(OriginalType.UTF8).named(fieldName);
|
||||
} else if (Double.class.isInstance(element)) {
|
||||
return Types.repeated(PrimitiveTypeName.DOUBLE).named(fieldName);
|
||||
} else if (Float.class.isInstance(element)) {
|
||||
return Types.repeated(PrimitiveTypeName.FLOAT).named(fieldName);
|
||||
} else if (Byte.class.isInstance(element)) {
|
||||
return Types.repeated(PrimitiveTypeName.INT32).as(OriginalType.INT_8).named(fieldName);
|
||||
} else if (Short.class.isInstance(element)) {
|
||||
return Types.repeated(PrimitiveTypeName.INT32).as(OriginalType.INT_16).named(fieldName);
|
||||
} else if (Integer.class.isInstance(element)) {
|
||||
return Types.repeated(PrimitiveTypeName.INT32).named(fieldName);
|
||||
} else if (Long.class.isInstance(element)) {
|
||||
return Types.repeated(PrimitiveTypeName.INT64).named(fieldName);
|
||||
} else if (Boolean.class.isInstance(element)) {
|
||||
return Types.repeated(PrimitiveTypeName.BOOLEAN).named(fieldName);
|
||||
} else if (Date.class.isInstance(element)) {
|
||||
return Types.repeated(PrimitiveTypeName.INT64).as(OriginalType.TIMESTAMP_MILLIS).named(fieldName);
|
||||
} else if (Group.class.isInstance(element)) {
|
||||
return ((Group) element).getType();
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unsupported type: " + element.getClass().getCanonicalName()
|
||||
+ " for group type field'" + fieldName + "'");
|
||||
}
|
||||
}
|
||||
|
||||
public static GroupType createGroupType(String fieldName, boolean nullable, Object element) {
|
||||
GroupBuilder<GroupType> builder = null;
|
||||
if (nullable) {
|
||||
builder = Types.optionalGroup();
|
||||
} else {
|
||||
builder = Types.requiredGroup();
|
||||
}
|
||||
return builder.as(OriginalType.LIST).addField(createGroupElementType("array", element)).named(fieldName);
|
||||
}
|
||||
|
||||
/*
|
||||
* Here group only support List value with one field
|
||||
*/
|
||||
public static List<Object> groupFieldValueToList(Group group) {
|
||||
if (group == null) {
|
||||
return null;
|
||||
}
|
||||
List<Object> values = new ArrayList<>();
|
||||
int listSize = group.getFieldRepetitionCount(0);
|
||||
for (int elementIndex = 0; elementIndex < listSize; elementIndex++) {
|
||||
Type elelemntType = group.getType().getType(0);
|
||||
if (elelemntType.isPrimitive()) {
|
||||
PrimitiveType pType = elelemntType.asPrimitiveType();
|
||||
switch (pType.getPrimitiveTypeName()) {
|
||||
case INT64:
|
||||
if (OriginalType.TIMESTAMP_MILLIS == elelemntType.getOriginalType()) {
|
||||
values.add(new Date(group.getLong(0, elementIndex)));
|
||||
} else {
|
||||
values.add(group.getLong(0, elementIndex));
|
||||
}
|
||||
break;
|
||||
case INT32:
|
||||
values.add(group.getInteger(0, elementIndex));
|
||||
break;
|
||||
case BOOLEAN:
|
||||
values.add(group.getBoolean(0, elementIndex));
|
||||
break;
|
||||
case INT96:
|
||||
Binary value = group.getInt96(0, elementIndex);
|
||||
if (value != null) {
|
||||
NanoTime nanoTime = NanoTime.fromBinary(value);
|
||||
values.add(new Date(NanoTimeUtils.getTimestamp(nanoTime, false).getTime()));
|
||||
} else {
|
||||
values.add(value);
|
||||
}
|
||||
break;
|
||||
case FLOAT:
|
||||
values.add(group.getFloat(0, elementIndex));
|
||||
break;
|
||||
case DOUBLE:
|
||||
values.add(group.getDouble(0, elementIndex));
|
||||
break;
|
||||
default:
|
||||
values.add(group.getValueToString(0, elementIndex));
|
||||
}
|
||||
} else {
|
||||
values.add(groupFieldValueToList(group.getGroup(0, elementIndex)));
|
||||
}
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
public static void writeGroupField(Group nestGroup, List<?> values) {
|
||||
if (values == null || values.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
// only support one field currently
|
||||
for (int i = 0; i < values.size(); i++) {
|
||||
Object element = values.get(i);
|
||||
if (String.class.isInstance(element)) {
|
||||
nestGroup.add(0, (String) element);
|
||||
} else if (Double.class.isInstance(element)) {
|
||||
nestGroup.add(0, (Double) element);
|
||||
} else if (Float.class.isInstance(element)) {
|
||||
nestGroup.add(0, (Float) element);
|
||||
} else if (Byte.class.isInstance(element)) {
|
||||
nestGroup.add(0, (Byte) element);
|
||||
} else if (Short.class.isInstance(element)) {
|
||||
nestGroup.add(0, (Short) element);
|
||||
} else if (Integer.class.isInstance(element)) {
|
||||
nestGroup.add(0, (Integer) element);
|
||||
} else if (Long.class.isInstance(element)) {
|
||||
nestGroup.add(0, (Long) element);
|
||||
} else if (Boolean.class.isInstance(element)) {
|
||||
nestGroup.add(0, (Boolean) element);
|
||||
} else if (Date.class.isInstance(element)) {
|
||||
nestGroup.add(0, ((Date) element).getTime());
|
||||
} else if (Group.class.isInstance(element)) {
|
||||
nestGroup.add(0, (Group) element);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unsupported type: " + element.getClass().getCanonicalName()
|
||||
+ " for group type field'" + nestGroup + "'");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static BigDecimal binaryToDecimal(Binary value, int precision, int scale) {
|
||||
/*
|
||||
* Precision <= 18 checks for the max number of digits for an unscaled long,
|
||||
* else treat with big integer conversion
|
||||
*/
|
||||
if (precision <= 18) {
|
||||
ByteBuffer buffer = value.toByteBuffer();
|
||||
byte[] bytes = buffer.array();
|
||||
int start = buffer.arrayOffset() + buffer.position();
|
||||
int end = buffer.arrayOffset() + buffer.limit();
|
||||
long unscaled = 0L;
|
||||
int i = start;
|
||||
while (i < end) {
|
||||
unscaled = (unscaled << 8 | bytes[i] & 0xff);
|
||||
i++;
|
||||
}
|
||||
int bits = 8 * (end - start);
|
||||
long unscaledNew = (unscaled << (64 - bits)) >> (64 - bits);
|
||||
if (scale == 0 || unscaledNew <= -Math.pow(10, 18) || unscaledNew >= Math.pow(10, 18)) {
|
||||
return new BigDecimal(unscaledNew);
|
||||
} else {
|
||||
return BigDecimal.valueOf(unscaledNew / Math.pow(10, scale));
|
||||
}
|
||||
} else {
|
||||
return new BigDecimal(new BigInteger(value.getBytes()), scale);
|
||||
}
|
||||
}
|
||||
|
||||
public static Binary decimalToBinary(BigDecimal decimalValue, int scale) {
|
||||
// First we need to make sure the BigDecimal matches our schema scale:
|
||||
decimalValue = decimalValue.setScale(scale, RoundingMode.HALF_UP);
|
||||
|
||||
// Next we get the decimal value as one BigInteger (like there was no decimal
|
||||
// point)
|
||||
BigInteger unscaledDecimalValue = decimalValue.unscaledValue();
|
||||
|
||||
byte[] decimalBuffer = null;
|
||||
// Finally we serialize the integer
|
||||
byte[] decimalBytes = unscaledDecimalValue.toByteArray();
|
||||
|
||||
if (decimalValue.compareTo(BigDecimal.ZERO) < 0) {
|
||||
decimalBuffer = new byte[] { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 };
|
||||
} else {
|
||||
decimalBuffer = new byte[16];
|
||||
}
|
||||
if (decimalBuffer.length >= decimalBytes.length) {
|
||||
// Because we set our fixed byte array size as 16 bytes, we need to
|
||||
// pad-left our original value's bytes with 0 or -1
|
||||
int decimalBufferIndex = decimalBuffer.length - 1;
|
||||
for (int i = decimalBytes.length - 1; i >= 0; i--) {
|
||||
decimalBuffer[decimalBufferIndex] = decimalBytes[i];
|
||||
decimalBufferIndex--;
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException(String
|
||||
.format("Decimal size: %d was greater than the allowed max: %d",
|
||||
decimalBytes.length, decimalBuffer.length));
|
||||
}
|
||||
return Binary.fromReusedByteArray(decimalBuffer);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.talend.parquet;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.parquet.column.statistics.Statistics;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.junit.Assert;
|
||||
|
||||
public class TestUtils {
|
||||
|
||||
public static void enforceEmptyDir(Configuration conf, Path path) throws IOException {
|
||||
FileSystem fs = path.getFileSystem(conf);
|
||||
if (fs.exists(path)) {
|
||||
if (!fs.delete(path, true)) {
|
||||
throw new IOException("can not delete path " + path);
|
||||
}
|
||||
}
|
||||
if (!fs.mkdirs(path)) {
|
||||
throw new IOException("can not create path " + path);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A convenience method to avoid a large number of @Test(expected=...) tests
|
||||
*
|
||||
* @param message A String message to describe this assertion
|
||||
* @param expected An Exception class that the Runnable should throw
|
||||
* @param callable A Callable that is expected to throw the exception
|
||||
*/
|
||||
public static void assertThrows(String message, Class<? extends Exception> expected, Callable callable) {
|
||||
try {
|
||||
callable.call();
|
||||
Assert.fail("No exception was thrown (" + message + "), expected: " + expected.getName());
|
||||
} catch (Exception actual) {
|
||||
try {
|
||||
Assert.assertEquals(message, expected, actual.getClass());
|
||||
} catch (AssertionError e) {
|
||||
e.addSuppressed(actual);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void assertStatsValuesEqual(Statistics<?> stats1, Statistics<?> stats2) {
|
||||
assertStatsValuesEqual(null, stats1, stats2);
|
||||
}
|
||||
|
||||
// To be used to assert that the values (min, max, num-of-nulls) equals. It
|
||||
// might be used in cases when creating
|
||||
// Statistics object for the proper Type would require too much work/code
|
||||
// duplications etc.
|
||||
public static void assertStatsValuesEqual(String message, Statistics<?> expected, Statistics<?> actual) {
|
||||
if (expected == actual) {
|
||||
return;
|
||||
}
|
||||
if (expected == null || actual == null) {
|
||||
Assert.assertEquals(expected, actual);
|
||||
}
|
||||
Assert.assertThat(actual, CoreMatchers.instanceOf(expected.getClass()));
|
||||
Assert.assertArrayEquals(message, expected.getMaxBytes(), actual.getMaxBytes());
|
||||
Assert.assertArrayEquals(message, expected.getMinBytes(), actual.getMinBytes());
|
||||
Assert.assertEquals(message, expected.getNumNulls(), actual.getNumNulls());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.talend.parquet.hadoop;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.parquet.hadoop.api.ReadSupport;
|
||||
import org.apache.parquet.schema.MessageType;
|
||||
import org.apache.parquet.schema.MessageTypeParser;
|
||||
import org.junit.Test;
|
||||
import org.talend.parquet.hadoop.TalendGroupReadSupport;
|
||||
|
||||
public class TalendGroupReadSupportTest {
|
||||
|
||||
private String fullSchemaStr = "message example {\n" + "required int32 line;\n" + "optional binary content;\n"
|
||||
+ "}";
|
||||
|
||||
private String partialSchemaStr = "message example {\n" + "required int32 line;\n" + "}";
|
||||
|
||||
@Test
|
||||
public void testInitWithoutSpecifyingRequestSchema() throws Exception {
|
||||
TalendGroupReadSupport s = new TalendGroupReadSupport();
|
||||
Configuration configuration = new Configuration();
|
||||
Map<String, String> keyValueMetaData = new HashMap<String, String>();
|
||||
MessageType fileSchema = MessageTypeParser.parseMessageType(fullSchemaStr);
|
||||
|
||||
ReadSupport.ReadContext context = s.init(configuration, keyValueMetaData, fileSchema);
|
||||
assertEquals(context.getRequestedSchema(), fileSchema);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInitWithPartialSchema() {
|
||||
TalendGroupReadSupport s = new TalendGroupReadSupport();
|
||||
Configuration configuration = new Configuration();
|
||||
Map<String, String> keyValueMetaData = new HashMap<String, String>();
|
||||
MessageType fileSchema = MessageTypeParser.parseMessageType(fullSchemaStr);
|
||||
MessageType partialSchema = MessageTypeParser.parseMessageType(partialSchemaStr);
|
||||
configuration.set(ReadSupport.PARQUET_READ_SCHEMA, partialSchemaStr);
|
||||
|
||||
ReadSupport.ReadContext context = s.init(configuration, keyValueMetaData, fileSchema);
|
||||
assertEquals(context.getRequestedSchema(), partialSchema);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,169 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.talend.parquet.hadoop;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.apache.parquet.column.Encoding.DELTA_BYTE_ARRAY;
|
||||
import static org.apache.parquet.column.Encoding.PLAIN;
|
||||
import static org.apache.parquet.column.Encoding.PLAIN_DICTIONARY;
|
||||
import static org.apache.parquet.column.Encoding.RLE_DICTIONARY;
|
||||
import static org.apache.parquet.format.converter.ParquetMetadataConverter.NO_FILTER;
|
||||
import static org.apache.parquet.hadoop.ParquetFileReader.readFooter;
|
||||
import static org.apache.parquet.hadoop.metadata.CompressionCodecName.UNCOMPRESSED;
|
||||
import static org.apache.parquet.schema.MessageTypeParser.parseMessageType;
|
||||
import static org.apache.parquet.schema.Type.Repetition.REQUIRED;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.parquet.column.Encoding;
|
||||
import org.apache.parquet.column.ParquetProperties;
|
||||
import org.apache.parquet.column.ParquetProperties.WriterVersion;
|
||||
import org.apache.parquet.example.data.Group;
|
||||
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
|
||||
import org.apache.parquet.hadoop.ParquetReader;
|
||||
import org.apache.parquet.hadoop.ParquetWriter;
|
||||
import org.apache.parquet.hadoop.example.ExampleParquetWriter;
|
||||
import org.apache.parquet.hadoop.example.GroupReadSupport;
|
||||
import org.apache.parquet.hadoop.example.GroupWriteSupport;
|
||||
import org.apache.parquet.hadoop.metadata.BlockMetaData;
|
||||
import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData;
|
||||
import org.apache.parquet.hadoop.metadata.ParquetMetadata;
|
||||
import org.apache.parquet.io.api.Binary;
|
||||
import org.apache.parquet.schema.GroupType;
|
||||
import org.apache.parquet.schema.InvalidSchemaException;
|
||||
import org.apache.parquet.schema.MessageType;
|
||||
import org.apache.parquet.schema.Types;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.talend.parquet.TestUtils;
|
||||
|
||||
|
||||
public class TestParquetWriter {
|
||||
|
||||
@Test
|
||||
public void test() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
Path root = new Path("target/tests/TestParquetWriter/");
|
||||
TestUtils.enforceEmptyDir(conf, root);
|
||||
MessageType schema = parseMessageType(
|
||||
"message test { "
|
||||
+ "required binary binary_field; "
|
||||
+ "required int32 int32_field; "
|
||||
+ "required int64 int64_field; "
|
||||
+ "required boolean boolean_field; "
|
||||
+ "required float float_field; "
|
||||
+ "required double double_field; "
|
||||
+ "required fixed_len_byte_array(3) flba_field; "
|
||||
+ "required int96 int96_field; "
|
||||
+ "} ");
|
||||
GroupWriteSupport.setSchema(schema, conf);
|
||||
SimpleGroupFactory f = new SimpleGroupFactory(schema);
|
||||
Map<String, Encoding> expected = new HashMap<String, Encoding>();
|
||||
expected.put("10-" + ParquetProperties.WriterVersion.PARQUET_1_0, PLAIN_DICTIONARY);
|
||||
expected.put("1000-" + ParquetProperties.WriterVersion.PARQUET_1_0, PLAIN);
|
||||
expected.put("10-" + ParquetProperties.WriterVersion.PARQUET_2_0, RLE_DICTIONARY);
|
||||
expected.put("1000-" + ParquetProperties.WriterVersion.PARQUET_2_0, DELTA_BYTE_ARRAY);
|
||||
for (int modulo : asList(10, 1000)) {
|
||||
for (WriterVersion version : WriterVersion.values()) {
|
||||
Path file = new Path(root, version.name() + "_" + modulo);
|
||||
ParquetWriter<Group> writer = new ParquetWriter<Group>(
|
||||
file,
|
||||
new GroupWriteSupport(),
|
||||
UNCOMPRESSED, 1024, 1024, 512, true, false, version, conf);
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
writer.write(
|
||||
f.newGroup()
|
||||
.append("binary_field", "test" + (i % modulo))
|
||||
.append("int32_field", 32)
|
||||
.append("int64_field", 64l)
|
||||
.append("boolean_field", true)
|
||||
.append("float_field", 1.0f)
|
||||
.append("double_field", 2.0d)
|
||||
.append("flba_field", "foo")
|
||||
.append("int96_field", Binary.fromConstantByteArray(new byte[12])));
|
||||
}
|
||||
writer.close();
|
||||
ParquetReader<Group> reader = ParquetReader.builder(new GroupReadSupport(), file).withConf(conf).build();
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
Group group = reader.read();
|
||||
assertEquals("test" + (i % modulo), group.getBinary("binary_field", 0).toStringUsingUTF8());
|
||||
assertEquals(32, group.getInteger("int32_field", 0));
|
||||
assertEquals(64l, group.getLong("int64_field", 0));
|
||||
assertEquals(true, group.getBoolean("boolean_field", 0));
|
||||
assertEquals(1.0f, group.getFloat("float_field", 0), 0.001);
|
||||
assertEquals(2.0d, group.getDouble("double_field", 0), 0.001);
|
||||
assertEquals("foo", group.getBinary("flba_field", 0).toStringUsingUTF8());
|
||||
assertEquals(Binary.fromConstantByteArray(new byte[12]),
|
||||
group.getInt96("int96_field",0));
|
||||
}
|
||||
reader.close();
|
||||
ParquetMetadata footer = readFooter(conf, file, NO_FILTER);
|
||||
for (BlockMetaData blockMetaData : footer.getBlocks()) {
|
||||
for (ColumnChunkMetaData column : blockMetaData.getColumns()) {
|
||||
if (column.getPath().toDotString().equals("binary_field")) {
|
||||
String key = modulo + "-" + version;
|
||||
Encoding expectedEncoding = expected.get(key);
|
||||
assertTrue(
|
||||
key + ":" + column.getEncodings() + " should contain " + expectedEncoding,
|
||||
column.getEncodings().contains(expectedEncoding));
|
||||
}
|
||||
}
|
||||
}
|
||||
assertEquals("Object model property should be example",
|
||||
"example", footer.getFileMetaData().getKeyValueMetaData()
|
||||
.get(ParquetWriter.OBJECT_MODEL_NAME_PROP));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Rule
|
||||
public TemporaryFolder temp = new TemporaryFolder();
|
||||
|
||||
@Test
|
||||
public void testBadWriteSchema() throws IOException {
|
||||
final File file = temp.newFile("test.parquet");
|
||||
file.delete();
|
||||
|
||||
TestUtils.assertThrows("Should reject a schema with an empty group",
|
||||
InvalidSchemaException.class, new Callable<Void>() {
|
||||
@Override
|
||||
public Void call() throws IOException {
|
||||
ExampleParquetWriter.builder(new Path(file.toString()))
|
||||
.withType(Types.buildMessage()
|
||||
.addField(new GroupType(REQUIRED, "invalid_group"))
|
||||
.named("invalid_message"))
|
||||
.build();
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
Assert.assertFalse("Should not create a file when schema is rejected",
|
||||
file.exists());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,286 @@
|
||||
package org.talend.parquet.util;
|
||||
|
||||
import static org.apache.parquet.schema.MessageTypeParser.parseMessageType;
|
||||
import static org.apache.parquet.schema.OriginalType.DECIMAL;
|
||||
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY;
|
||||
import static org.apache.parquet.schema.Type.Repetition.REQUIRED;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.parquet.schema.DecimalMetadata;
|
||||
import org.apache.parquet.schema.GroupType;
|
||||
import org.apache.parquet.schema.MessageType;
|
||||
import org.apache.parquet.schema.OriginalType;
|
||||
import org.apache.parquet.schema.PrimitiveType;
|
||||
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
|
||||
import org.apache.parquet.schema.Type;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.talend.parquet.data.Group;
|
||||
import org.talend.parquet.data.simple.SimpleGroup;
|
||||
import org.talend.parquet.utils.TalendParquetUtils;
|
||||
|
||||
public class TalendParquetUtilsTest {
|
||||
|
||||
@Test
|
||||
public void testGetStringList() {
|
||||
MessageType schema = parseMessageType("message Schema { " //
|
||||
+ " optional int64 fieldo; " //
|
||||
+ " optional group field1 { " //
|
||||
+ " repeated binary field2 (UTF8); " //
|
||||
+ " } " //
|
||||
+ "}"); //
|
||||
|
||||
Group group = new SimpleGroup(schema.getType(1).asGroupType());
|
||||
group.add(0, "element 1");
|
||||
group.add(0, "element 2");
|
||||
group.add(0, "element 3");
|
||||
group.add(0, "element 4");
|
||||
|
||||
List<Object> values = TalendParquetUtils.groupFieldValueToList(group);
|
||||
MatcherAssert.assertThat("", values, Matchers.contains("element 1", "element 2", "element 3", "element 4"));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetIntList() {
|
||||
MessageType schema = parseMessageType("message Schema { " //
|
||||
+ " optional int64 fieldo; " //
|
||||
+ " optional group field1 { " //
|
||||
+ " repeated int32 field2 ; " //
|
||||
+ " } " //
|
||||
+ "}"); //
|
||||
|
||||
Group group = new SimpleGroup(schema.getType(1).asGroupType());
|
||||
group.add(0, 123);
|
||||
group.add(0, 345);
|
||||
group.add(0, 431);
|
||||
|
||||
List<Object> values = TalendParquetUtils.groupFieldValueToList(group);
|
||||
MatcherAssert.assertThat("", values, Matchers.contains(123, 345, 431));
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void testNestGroupList() {
|
||||
MessageType schema = parseMessageType("message Schema { " //
|
||||
+ " optional int64 fieldo; " //
|
||||
+ " optional group field1 { " //
|
||||
+ " repeated group field2 {" //
|
||||
+ " repeated double field3;" //
|
||||
+ " } " //
|
||||
+ " } " //
|
||||
+ "}"); //
|
||||
|
||||
Group group = new SimpleGroup(schema.getType(1).asGroupType());
|
||||
|
||||
Group nest1 = new SimpleGroup(schema.getType(1).asGroupType().getType(0).asGroupType());
|
||||
nest1.add(0, 123.0);
|
||||
nest1.add(0, 345.0);
|
||||
nest1.add(0, 431.0);
|
||||
|
||||
Group nest2 = new SimpleGroup(schema.getType(1).asGroupType().getType(0).asGroupType());
|
||||
nest2.add(0, 2123.0);
|
||||
nest2.add(0, 2345.0);
|
||||
nest2.add(0, 2431.0);
|
||||
|
||||
group.add(0, nest1);
|
||||
group.add(0, nest2);
|
||||
|
||||
List<Object> values = TalendParquetUtils.groupFieldValueToList(group);
|
||||
|
||||
MatcherAssert.assertThat("", (List<Object>) values.get(0), Matchers.contains(123.0, 345.0, 431.0));
|
||||
MatcherAssert.assertThat("", (List<Object>) values.get(1), Matchers.contains(2123.0, 2345.0, 2431.0));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullGroupList() {
|
||||
List<Object> values = TalendParquetUtils.groupFieldValueToList(null);
|
||||
Assert.assertNull(values);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateGroupElementType() {
|
||||
Type emptyElement = TalendParquetUtils.createGroupElementType("field0", null);
|
||||
Assert.assertEquals(PrimitiveTypeName.BINARY, emptyElement.asPrimitiveType().getPrimitiveTypeName());
|
||||
|
||||
emptyElement = TalendParquetUtils.createGroupElementType("field0", "1");
|
||||
Assert.assertEquals(PrimitiveTypeName.BINARY, emptyElement.asPrimitiveType().getPrimitiveTypeName());
|
||||
|
||||
emptyElement = TalendParquetUtils.createGroupElementType("field0", 1.0);
|
||||
Assert.assertEquals(PrimitiveTypeName.DOUBLE, emptyElement.asPrimitiveType().getPrimitiveTypeName());
|
||||
|
||||
emptyElement = TalendParquetUtils.createGroupElementType("field0", 1.0f);
|
||||
Assert.assertEquals(PrimitiveTypeName.FLOAT, emptyElement.asPrimitiveType().getPrimitiveTypeName());
|
||||
|
||||
emptyElement = TalendParquetUtils.createGroupElementType("field0", 1);
|
||||
Assert.assertEquals(PrimitiveTypeName.INT32, emptyElement.asPrimitiveType().getPrimitiveTypeName());
|
||||
|
||||
emptyElement = TalendParquetUtils.createGroupElementType("field0", 1L);
|
||||
Assert.assertEquals(PrimitiveTypeName.INT64, emptyElement.asPrimitiveType().getPrimitiveTypeName());
|
||||
|
||||
emptyElement = TalendParquetUtils.createGroupElementType("field0", true);
|
||||
Assert.assertEquals(PrimitiveTypeName.BOOLEAN, emptyElement.asPrimitiveType().getPrimitiveTypeName());
|
||||
|
||||
// Nest group
|
||||
MessageType schema = parseMessageType("message Schema { " //
|
||||
+ " optional group field1 { " //
|
||||
+ " repeated group field2 {" //
|
||||
+ " repeated double field3;" //
|
||||
+ " } " //
|
||||
+ " } " //
|
||||
+ "}"); //
|
||||
Group group = new SimpleGroup(schema.getType(0).asGroupType());
|
||||
Group nest1 = new SimpleGroup(schema.getType(0).asGroupType().getType(0).asGroupType());
|
||||
nest1.add(0, 123.0);
|
||||
nest1.add(0, 345.0);
|
||||
nest1.add(0, 431.0);
|
||||
Group nest2 = new SimpleGroup(schema.getType(0).asGroupType().getType(0).asGroupType());
|
||||
nest2.add(0, 2123.0);
|
||||
nest2.add(0, 2345.0);
|
||||
nest2.add(0, 2431.0);
|
||||
|
||||
group.add(0, nest1);
|
||||
group.add(0, nest2);
|
||||
Assert.assertFalse("Should be group type", group.getType().isPrimitive());
|
||||
|
||||
Assert.assertEquals(2, group.getFieldRepetitionCount(0));
|
||||
|
||||
emptyElement = TalendParquetUtils.createGroupElementType("field0", group);
|
||||
Assert.assertFalse("Should be group type", emptyElement.isPrimitive());
|
||||
Assert.assertEquals(schema.getType(0).asGroupType(), emptyElement);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateGroupType() {
|
||||
GroupType emptyElement = TalendParquetUtils.createGroupType("field0", true, null);
|
||||
Assert.assertEquals(OriginalType.LIST, emptyElement.asGroupType().getOriginalType());
|
||||
Assert.assertEquals(OriginalType.UTF8, emptyElement.getType(0).asPrimitiveType().getOriginalType());
|
||||
|
||||
emptyElement = TalendParquetUtils.createGroupType("field0", true, 2);
|
||||
Assert.assertEquals(OriginalType.LIST, emptyElement.asGroupType().getOriginalType());
|
||||
Assert.assertEquals(PrimitiveTypeName.INT32, emptyElement.getType(0).asPrimitiveType().getPrimitiveTypeName());
|
||||
|
||||
emptyElement = TalendParquetUtils.createGroupType("field0", true, Byte.valueOf("1"));
|
||||
Assert.assertEquals(OriginalType.LIST, emptyElement.asGroupType().getOriginalType());
|
||||
Assert.assertEquals(OriginalType.INT_8, emptyElement.getType(0).asPrimitiveType().getOriginalType());
|
||||
Assert.assertEquals(PrimitiveTypeName.INT32, emptyElement.getType(0).asPrimitiveType().getPrimitiveTypeName());
|
||||
|
||||
emptyElement = TalendParquetUtils.createGroupType("field0", true, Short.valueOf("1"));
|
||||
Assert.assertEquals(OriginalType.LIST, emptyElement.asGroupType().getOriginalType());
|
||||
Assert.assertEquals(OriginalType.INT_16, emptyElement.getType(0).asPrimitiveType().getOriginalType());
|
||||
Assert.assertEquals(PrimitiveTypeName.INT32, emptyElement.getType(0).asPrimitiveType().getPrimitiveTypeName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWriteGroupField() {
|
||||
Group group = null;
|
||||
MessageType schema = parseMessageType("message Schema { " //
|
||||
+ " optional group field0 (LIST) {" + " repeated int32 array;" + " } " //
|
||||
+ "}"); //
|
||||
group = new SimpleGroup(schema.getType(0).asGroupType());
|
||||
List<?> values = Arrays.asList(1, 2, 3);
|
||||
TalendParquetUtils.writeGroupField(group, values);
|
||||
Assert.assertEquals(3, group.getFieldRepetitionCount(0));
|
||||
|
||||
schema = parseMessageType("message Schema { " //
|
||||
+ " optional group field0 (LIST) {" + " repeated int32 array(INT_8);" + " } " //
|
||||
+ "}"); //
|
||||
group = new SimpleGroup(schema.getType(0).asGroupType());
|
||||
values = Arrays.asList(Byte.valueOf("1"), Byte.valueOf("2"));
|
||||
TalendParquetUtils.writeGroupField(group, values);
|
||||
Assert.assertEquals(2, group.getFieldRepetitionCount(0));
|
||||
|
||||
schema = parseMessageType("message Schema { " //
|
||||
+ " optional group field0 (LIST) {" + " repeated int32 array(INT_16);" + " } " //
|
||||
+ "}"); //
|
||||
group = new SimpleGroup(schema.getType(0).asGroupType());
|
||||
values = Arrays.asList(Short.valueOf("1"));
|
||||
TalendParquetUtils.writeGroupField(group, values);
|
||||
Assert.assertEquals(1, group.getFieldRepetitionCount(0));
|
||||
|
||||
schema = parseMessageType("message Schema { " //
|
||||
+ " optional group field0 (LIST) {" + " repeated int64 array;" + " } " //
|
||||
+ "}"); //
|
||||
group = new SimpleGroup(schema.getType(0).asGroupType());
|
||||
values = Arrays.asList(1L, 2L, 3L);
|
||||
TalendParquetUtils.writeGroupField(group, values);
|
||||
Assert.assertEquals(3, group.getFieldRepetitionCount(0));
|
||||
|
||||
schema = parseMessageType("message Schema { " //
|
||||
+ " optional group field0 (LIST) {" + " repeated double array;" + " } " //
|
||||
+ "}"); //
|
||||
group = new SimpleGroup(schema.getType(0).asGroupType());
|
||||
values = Arrays.asList(1.0, 2.0, 3.0);
|
||||
TalendParquetUtils.writeGroupField(group, values);
|
||||
Assert.assertEquals(3, group.getFieldRepetitionCount(0));
|
||||
|
||||
schema = parseMessageType("message Schema { " //
|
||||
+ " optional group field0 (LIST) {" + " repeated float array;" + " } " //
|
||||
+ "}"); //
|
||||
group = new SimpleGroup(schema.getType(0).asGroupType());
|
||||
values = Arrays.asList(1.0f, 2.0f, 3.0f);
|
||||
TalendParquetUtils.writeGroupField(group, values);
|
||||
Assert.assertEquals(3, group.getFieldRepetitionCount(0));
|
||||
|
||||
schema = parseMessageType("message Schema { " //
|
||||
+ " optional group field0 (LIST) {" + " repeated binary array (UTF8);" + " } " //
|
||||
+ "}"); //
|
||||
group = new SimpleGroup(schema.getType(0).asGroupType());
|
||||
values = Arrays.asList("element 1", "element 2");
|
||||
TalendParquetUtils.writeGroupField(group, values);
|
||||
Assert.assertEquals(2, group.getFieldRepetitionCount(0));
|
||||
|
||||
schema = parseMessageType("message Schema { " //
|
||||
+ " optional group field0 (LIST) {" + " repeated boolean array ;" + " } " //
|
||||
+ "}"); //
|
||||
group = new SimpleGroup(schema.getType(0).asGroupType());
|
||||
values = Arrays.asList(true, false);
|
||||
TalendParquetUtils.writeGroupField(group, values);
|
||||
Assert.assertEquals(2, group.getFieldRepetitionCount(0));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDecimalAnnotation() {
|
||||
Group group = null;
|
||||
MessageType schema = new MessageType("DecimalMessage", new PrimitiveType(REQUIRED, FIXED_LEN_BYTE_ARRAY, 16,
|
||||
"aDecimal", DECIMAL, new DecimalMetadata(38, 2), null));
|
||||
BigDecimal decimalValue = new BigDecimal("1234423199.9999");
|
||||
|
||||
group = new SimpleGroup(schema);
|
||||
group.append("aDecimal", TalendParquetUtils.decimalToBinary(decimalValue, 5));
|
||||
Assert.assertEquals(decimalValue.setScale(5), TalendParquetUtils.binaryToDecimal(group.getBinary(0, 0), 38, 5));
|
||||
|
||||
group = new SimpleGroup(schema);
|
||||
group.append("aDecimal", TalendParquetUtils.decimalToBinary(decimalValue, 4));
|
||||
Assert.assertEquals(decimalValue, TalendParquetUtils.binaryToDecimal(group.getBinary(0, 0), 38, 4));
|
||||
|
||||
decimalValue = new BigDecimal("1234");
|
||||
group = new SimpleGroup(schema);
|
||||
group.append("aDecimal", TalendParquetUtils.decimalToBinary(decimalValue, 0));
|
||||
Assert.assertEquals(decimalValue, TalendParquetUtils.binaryToDecimal(group.getBinary(0, 0), 10, 0));
|
||||
|
||||
decimalValue = new BigDecimal("-93.5788130000");
|
||||
group = new SimpleGroup(schema);
|
||||
group.append("aDecimal", TalendParquetUtils.decimalToBinary(decimalValue, 10));
|
||||
Assert.assertEquals(decimalValue, TalendParquetUtils.binaryToDecimal(group.getBinary(0, 0), 38, 10));
|
||||
|
||||
decimalValue = new BigDecimal("-0.00");
|
||||
group = new SimpleGroup(schema);
|
||||
group.append("aDecimal", TalendParquetUtils.decimalToBinary(decimalValue, 2));
|
||||
Assert.assertEquals(decimalValue, TalendParquetUtils.binaryToDecimal(group.getBinary(0, 0), 38, 2));
|
||||
|
||||
decimalValue = new BigDecimal("0.000");
|
||||
group = new SimpleGroup(schema);
|
||||
group.append("aDecimal", TalendParquetUtils.decimalToBinary(decimalValue, 3));
|
||||
Assert.assertEquals(decimalValue, TalendParquetUtils.binaryToDecimal(group.getBinary(0, 0), 38, 3));
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>org.talend.components</groupId>
|
||||
<artifactId>talend-scp-helper</artifactId>
|
||||
<version>1.1</version>
|
||||
|
||||
<properties>
|
||||
<maven.compiler.source>8</maven.compiler.source>
|
||||
<maven.compiler.target>8</maven.compiler.target>
|
||||
<ssd.version>2.8.0</ssd.version>
|
||||
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
|
||||
</properties>
|
||||
|
||||
|
||||
<distributionManagement>
|
||||
<snapshotRepository>
|
||||
<id>talend_nexus_deployment</id>
|
||||
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceSnapshot/</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
</snapshotRepository>
|
||||
<repository>
|
||||
<id>talend_nexus_deployment</id>
|
||||
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceRelease/</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
</releases>
|
||||
</repository>
|
||||
</distributionManagement>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.sshd</groupId>
|
||||
<artifactId>sshd-scp</artifactId>
|
||||
<version>${ssd.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.sshd</groupId>
|
||||
<artifactId>sshd-common</artifactId>
|
||||
<version>${ssd.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.sshd</groupId>
|
||||
<artifactId>sshd-core</artifactId>
|
||||
<version>${ssd.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
@@ -0,0 +1,117 @@
|
||||
package org.talend.components.talendscp;
|
||||
|
||||
import org.apache.sshd.client.channel.ChannelExec;
|
||||
import org.apache.sshd.scp.client.AbstractScpClient;
|
||||
import org.apache.sshd.scp.client.DefaultScpClient;
|
||||
import org.apache.sshd.scp.client.DefaultScpStreamResolver;
|
||||
import org.apache.sshd.scp.client.ScpClient;
|
||||
import org.apache.sshd.client.session.ClientSession;
|
||||
import org.apache.sshd.common.FactoryManager;
|
||||
import org.apache.sshd.common.file.FileSystemFactory;
|
||||
import org.apache.sshd.common.file.util.MockFileSystem;
|
||||
import org.apache.sshd.common.file.util.MockPath;
|
||||
import org.apache.sshd.scp.common.ScpFileOpener;
|
||||
import org.apache.sshd.scp.common.ScpHelper;
|
||||
import org.apache.sshd.common.util.ValidateUtils;
|
||||
import org.apache.sshd.scp.common.ScpTransferEventListener;
|
||||
import org.apache.sshd.scp.common.helpers.ScpTimestampCommandDetails;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.FileSystem;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.FileTime;
|
||||
import java.nio.file.attribute.PosixFilePermission;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
|
||||
|
||||
/*
|
||||
What we do here is just overwrite the ScpHelper to throw Exception when scp return status 1
|
||||
to keep the old behavior
|
||||
*/
|
||||
|
||||
public class TalendScpClient extends DefaultScpClient {
|
||||
|
||||
public TalendScpClient(ClientSession clientSession, ScpFileOpener fileOpener,
|
||||
ScpTransferEventListener eventListener) {
|
||||
super(clientSession, fileOpener, eventListener);
|
||||
}
|
||||
|
||||
@Override public void upload(InputStream local, String remote, long size, Collection<PosixFilePermission> perms,
|
||||
ScpTimestampCommandDetails time) throws IOException {
|
||||
|
||||
int namePos = ValidateUtils.checkNotNullAndNotEmpty(remote, "No remote location specified").lastIndexOf('/');
|
||||
String name = (namePos < 0) ?
|
||||
remote :
|
||||
ValidateUtils.checkNotNullAndNotEmpty(remote.substring(namePos + 1), "No name value in remote=%s",
|
||||
remote);
|
||||
Collection<Option> options = (time != null) ? EnumSet.of(Option.PreserveAttributes) : Collections.emptySet();
|
||||
String cmd = ScpClient.createSendCommand(remote, options);
|
||||
ClientSession session = getClientSession();
|
||||
ChannelExec channel = openCommandChannel(session, cmd);
|
||||
try (InputStream invOut = channel.getInvertedOut(); OutputStream invIn = channel.getInvertedIn()) {
|
||||
// NOTE: we use a mock file system since we expect no invocations for it
|
||||
ScpHelper helper = new TalendScpHelper(session, invOut, invIn, new MockFileSystem(remote), opener, listener);
|
||||
Path mockPath = new MockPath(remote);
|
||||
helper.sendStream(new DefaultScpStreamResolver(name, mockPath, perms, time, size, local, cmd),
|
||||
options.contains(Option.PreserveAttributes), ScpHelper.DEFAULT_SEND_BUFFER_SIZE);
|
||||
handleCommandExitStatus(cmd, channel);
|
||||
} finally {
|
||||
channel.close(false);
|
||||
}
|
||||
}
|
||||
|
||||
@Override protected <T> void runUpload(String remote, Collection<Option> options, Collection<T> local,
|
||||
AbstractScpClient.ScpOperationExecutor<T> executor) throws IOException {
|
||||
local = ValidateUtils.checkNotNullAndNotEmpty(local, "Invalid argument local: %s", local);
|
||||
remote = ValidateUtils.checkNotNullAndNotEmpty(remote, "Invalid argument remote: %s", remote);
|
||||
if (local.size() > 1) {
|
||||
options = addTargetIsDirectory(options);
|
||||
}
|
||||
|
||||
String cmd = ScpClient.createSendCommand(remote, options);
|
||||
ClientSession session = getClientSession();
|
||||
ChannelExec channel = openCommandChannel(session, cmd);
|
||||
try {
|
||||
FactoryManager manager = session.getFactoryManager();
|
||||
FileSystemFactory factory = manager.getFileSystemFactory();
|
||||
FileSystem fs = factory.createFileSystem(session);
|
||||
|
||||
try (InputStream invOut = channel.getInvertedOut(); OutputStream invIn = channel.getInvertedIn()) {
|
||||
ScpHelper helper = new TalendScpHelper(session, invOut, invIn, fs, opener, listener);
|
||||
executor.execute(helper, local, options);
|
||||
} finally {
|
||||
try {
|
||||
fs.close();
|
||||
} catch (UnsupportedOperationException e) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("runUpload({}) {} => {} - failed ({}) to close file system={}: {}", session, remote,
|
||||
local, e.getClass().getSimpleName(), fs, e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
handleCommandExitStatus(cmd, channel);
|
||||
} finally {
|
||||
channel.close(false);
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void download(String remote, OutputStream local) throws IOException {
|
||||
String cmd = ScpClient.createReceiveCommand(remote, Collections.emptyList());
|
||||
ClientSession session = getClientSession();
|
||||
ChannelExec channel = openCommandChannel(session, cmd);
|
||||
try (InputStream invOut = channel.getInvertedOut(); OutputStream invIn = channel.getInvertedIn()) {
|
||||
// NOTE: we use a mock file system since we expect no invocations for it
|
||||
ScpHelper helper =
|
||||
new TalendScpHelper(session, invOut, invIn, new MockFileSystem(remote), opener, listener);
|
||||
helper.receiveFileStream(local, ScpHelper.DEFAULT_RECEIVE_BUFFER_SIZE);
|
||||
handleCommandExitStatus(cmd, channel);
|
||||
} finally {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
package org.talend.components.talendscp;
|
||||
|
||||
import org.apache.sshd.scp.client.AbstractScpClientCreator;
|
||||
import org.apache.sshd.scp.client.ScpClient;
|
||||
import org.apache.sshd.scp.client.ScpClientCreator;
|
||||
import org.apache.sshd.client.session.ClientSession;
|
||||
import org.apache.sshd.scp.common.ScpFileOpener;
|
||||
import org.apache.sshd.scp.common.ScpTransferEventListener;
|
||||
|
||||
public class TalendScpClientCreator extends AbstractScpClientCreator implements ScpClientCreator {
|
||||
public static final TalendScpClientCreator INSTANCE = new TalendScpClientCreator();
|
||||
|
||||
@Override
|
||||
public ScpClient createScpClient(ClientSession session) {
|
||||
return this.createScpClient(session, this.getScpFileOpener(), this.getScpTransferEventListener());
|
||||
}
|
||||
|
||||
@Override public ScpClient createScpClient(ClientSession clientSession, ScpFileOpener scpFileOpener,
|
||||
ScpTransferEventListener scpTransferEventListener) {
|
||||
return new TalendScpClient(clientSession, scpFileOpener, scpTransferEventListener);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
package org.talend.components.talendscp;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.FileSystem;
|
||||
|
||||
import org.apache.sshd.scp.common.ScpException;
|
||||
import org.apache.sshd.scp.common.ScpFileOpener;
|
||||
import org.apache.sshd.scp.common.ScpHelper;
|
||||
import org.apache.sshd.scp.common.ScpTransferEventListener;
|
||||
import org.apache.sshd.common.session.Session;
|
||||
import org.apache.sshd.scp.common.helpers.ScpAckInfo;
|
||||
|
||||
public class TalendScpHelper extends ScpHelper {
|
||||
|
||||
public TalendScpHelper(Session session, InputStream in, OutputStream out, FileSystem fileSystem,
|
||||
ScpFileOpener opener, ScpTransferEventListener eventListener) {
|
||||
super(session, in, out, fileSystem, opener, eventListener);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScpAckInfo readAck(boolean canEof) throws IOException {
|
||||
final ScpAckInfo scpAckInfo = ScpAckInfo.readAck(this.in, this.csIn, canEof);
|
||||
int c = scpAckInfo == null ? -1 : scpAckInfo.getStatusCode();
|
||||
switch (c) {
|
||||
case -1:
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("readAck({})[EOF={}] received EOF", this, canEof);
|
||||
}
|
||||
if (!canEof) {
|
||||
throw new EOFException("readAck - EOF before ACK");
|
||||
}
|
||||
break;
|
||||
case ScpAckInfo.OK:
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("readAck({})[EOF={}] read OK", this, canEof);
|
||||
}
|
||||
break;
|
||||
case ScpAckInfo.WARNING: {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("readAck({})[EOF={}] read warning message", this, canEof);
|
||||
}
|
||||
|
||||
String line = readLine();
|
||||
log.warn("readAck({})[EOF={}] - Received warning: {}", this, canEof, line);
|
||||
throw new ScpException("received error: " + line, c);
|
||||
}
|
||||
case ScpAckInfo.ERROR: {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("readAck({})[EOF={}] read error message", this, canEof);
|
||||
}
|
||||
String line = readLine();
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("readAck({})[EOF={}] received error: {}", this, canEof, line);
|
||||
}
|
||||
throw new ScpException("Received nack: " + line, c);
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return scpAckInfo;
|
||||
}
|
||||
}
|
||||
@@ -60,9 +60,9 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
<version>1.2.17</version>
|
||||
<groupId>ch.qos.reload4j</groupId>
|
||||
<artifactId>reload4j</artifactId>
|
||||
<version>1.2.19</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
|
||||
@@ -2,17 +2,28 @@
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.talend.libraries</groupId>
|
||||
<groupId>org.talend.components</groupId>
|
||||
<artifactId>talend-ws</artifactId>
|
||||
<version>1.0.1-20191112</version>
|
||||
<version>1.0.7-20220526</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<licenses>
|
||||
<license>
|
||||
<name>Apache License, Version 2.0</name>
|
||||
<url>https://www.talendforge.org/modules/licenses/APACHE_v2.txt</url>
|
||||
<distribution>may be downloaded from the Maven repository</distribution>
|
||||
</license>
|
||||
</licenses>
|
||||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<cxf.version>3.3.4</cxf.version>
|
||||
|
||||
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
|
||||
|
||||
<cxf.version>3.4.7</cxf.version>
|
||||
<odata.version>4.3.0</odata.version>
|
||||
<slf4j.version>1.7.12</slf4j.version>
|
||||
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
|
||||
<httpclient.version>4.5.13</httpclient.version>
|
||||
</properties>
|
||||
|
||||
<distributionManagement>
|
||||
@@ -43,13 +54,19 @@
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
<version>1.10</version>
|
||||
<version>1.14</version>
|
||||
</dependency>
|
||||
<!-- https://mvnrepository.com/artifact/commons-httpclient/commons-httpclient -->
|
||||
<!-- https://mvnrepository.com/artifact/org.apache.httpcomponents/httpclient -->
|
||||
<dependency>
|
||||
<groupId>commons-httpclient</groupId>
|
||||
<artifactId>commons-httpclient</artifactId>
|
||||
<version>3.1</version>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
<version>${httpclient.version}</version>
|
||||
</dependency>
|
||||
<!-- https://mvnrepository.com/artifact/org.apache.httpcomponents/httpcore -->
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpcore</artifactId>
|
||||
<version>4.4.13</version>
|
||||
</dependency>
|
||||
<!-- https://mvnrepository.com/artifact/org.codehaus.woodstox/stax2-api -->
|
||||
<dependency>
|
||||
@@ -91,6 +108,12 @@
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-core</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.glassfish.jaxb</groupId>
|
||||
<artifactId>jaxb-runtime</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
@@ -101,6 +124,16 @@
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-tools-common</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.glassfish.jaxb</groupId>
|
||||
<artifactId>jaxb-xjc</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.glassfish.jaxb</groupId>
|
||||
<artifactId>jaxb-runtime</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
@@ -127,6 +160,16 @@
|
||||
<artifactId>cxf-rt-transports-http</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.xml.bind</groupId>
|
||||
<artifactId>jaxb-api</artifactId>
|
||||
<version>2.2.6</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.talend.libraries</groupId>
|
||||
<artifactId>jaxb-impl-2.2.6-modified</artifactId>
|
||||
<version>6.0.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.talend.libraries</groupId>
|
||||
<artifactId>jaxb-xjc-2.2.6-modified</artifactId>
|
||||
@@ -244,6 +287,7 @@
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.8.1</version>
|
||||
<configuration>
|
||||
<source>1.8</source>
|
||||
<target>1.8</target>
|
||||
@@ -251,4 +295,4 @@
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
</project>
|
||||
|
||||
@@ -48,4 +48,6 @@ please @see org\talend\ws\helper\ServiceDiscoveryHelper.java
|
||||
please @see org\talend\ws\helper\ServiceInvokerHelper.java
|
||||
org\talend\ws\mapper\MapperFactory.java
|
||||
|
||||
10.(2019-01-18 modified by dchmyga) fixed TDI-41647
|
||||
10.(2019-01-18 modified by dchmyga) fixed TDI-41647
|
||||
|
||||
11.(2020-08-24 modified by ozhelezniak) updated commons-codec to 1.14 in scope of TDI-44145
|
||||
@@ -1,6 +1,6 @@
|
||||
// ============================================================================
|
||||
//
|
||||
// Copyright (C) 2006-2019 Talend Inc. - www.talend.com
|
||||
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
//
|
||||
// This source code is available under agreement available at
|
||||
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
|
||||
|
||||
@@ -3,29 +3,22 @@
|
||||
*/
|
||||
package org.talend.webservice.helper;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.Vector;
|
||||
import com.ibm.wsdl.Constants;
|
||||
import com.ibm.wsdl.extensions.schema.SchemaConstants;
|
||||
import com.ibm.wsdl.util.xml.DOMUtils;
|
||||
import com.ibm.wsdl.util.xml.QNameUtils;
|
||||
import org.apache.ws.commons.schema.XmlSchema;
|
||||
import org.apache.ws.commons.schema.XmlSchemaCollection;
|
||||
import org.talend.webservice.helper.conf.ServiceHelperConfiguration;
|
||||
import org.talend.webservice.helper.conf.WSDLLocatorImpl;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
import javax.wsdl.Definition;
|
||||
import javax.wsdl.Import;
|
||||
import javax.wsdl.Types;
|
||||
import javax.wsdl.WSDLException;
|
||||
import javax.wsdl.extensions.ExtensibilityElement;
|
||||
import javax.wsdl.extensions.UnknownExtensibilityElement;
|
||||
import javax.wsdl.extensions.schema.Schema;
|
||||
import javax.wsdl.extensions.schema.SchemaImport;
|
||||
import javax.wsdl.extensions.schema.SchemaReference;
|
||||
@@ -37,16 +30,15 @@ import javax.xml.transform.TransformerException;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
import javax.xml.transform.dom.DOMSource;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
|
||||
import org.apache.ws.commons.schema.XmlSchemaCollection;
|
||||
import org.talend.webservice.helper.conf.ServiceHelperConfiguration;
|
||||
import org.talend.webservice.helper.conf.WSDLLocatorImpl;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
import com.ibm.wsdl.Constants;
|
||||
import com.ibm.wsdl.extensions.schema.SchemaConstants;
|
||||
import com.ibm.wsdl.util.xml.DOMUtils;
|
||||
import com.ibm.wsdl.util.xml.QNameUtils;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* This helper allow easy discovery of services and types
|
||||
@@ -70,26 +62,36 @@ public class ServiceDiscoveryHelper {
|
||||
private Set<String> namespaces;
|
||||
|
||||
private final String LOCAL_WSDL_NAME = "mainWSDL.wsdl";
|
||||
|
||||
|
||||
private boolean createTempFiles = true;
|
||||
|
||||
public ServiceDiscoveryHelper(String wsdlUri) throws WSDLException, IOException, TransformerException, URISyntaxException {
|
||||
this(wsdlUri, null, null);
|
||||
this(wsdlUri, null, null, true);
|
||||
}
|
||||
|
||||
public ServiceDiscoveryHelper(String wsdlUri, String tempPath) throws WSDLException, IOException, TransformerException,
|
||||
URISyntaxException {
|
||||
this(wsdlUri, null, tempPath);
|
||||
this(wsdlUri, null, tempPath, true);
|
||||
}
|
||||
|
||||
public ServiceDiscoveryHelper(String wsdlUri, ServiceHelperConfiguration configuration) throws WSDLException, IOException,
|
||||
TransformerException, URISyntaxException {
|
||||
this(wsdlUri, configuration, null);
|
||||
this(wsdlUri, configuration, null, true);
|
||||
}
|
||||
|
||||
public ServiceDiscoveryHelper(String wsdlUri, ServiceHelperConfiguration configuration, String tempPath)
|
||||
throws WSDLException, IOException, TransformerException, URISyntaxException {
|
||||
this(wsdlUri, configuration, tempPath, true);
|
||||
}
|
||||
|
||||
public ServiceDiscoveryHelper(String wsdlUri, ServiceHelperConfiguration configuration, String tempPath, boolean createTempFiles)
|
||||
throws WSDLException, IOException, TransformerException, URISyntaxException {
|
||||
this.wsdlUri = wsdlUri;
|
||||
this.configuration = configuration;
|
||||
this.wsdlTmpDir = createTempWsdlDir(tempPath);
|
||||
this.createTempFiles = createTempFiles;
|
||||
if(createTempFiles) {
|
||||
this.wsdlTmpDir = createTempWsdlDir(tempPath);
|
||||
}
|
||||
init();
|
||||
}
|
||||
|
||||
@@ -139,7 +141,9 @@ public class ServiceDiscoveryHelper {
|
||||
|
||||
namespaces = collectNamespaces();
|
||||
|
||||
generateTempWsdlFile();
|
||||
if(this.createTempFiles) {
|
||||
generateTempWsdlFile();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -436,6 +440,10 @@ public class ServiceDiscoveryHelper {
|
||||
return definitions.get(this.LOCAL_WSDL_NAME);
|
||||
}
|
||||
|
||||
Collection<Definition> getDefinitions() {
|
||||
return definitions.values();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the xml schema collection
|
||||
*
|
||||
@@ -450,7 +458,11 @@ public class ServiceDiscoveryHelper {
|
||||
}
|
||||
|
||||
public String getLocalWsdlUri() {
|
||||
return new File(wsdlTmpDir, this.LOCAL_WSDL_NAME).toURI().toString();
|
||||
if(createTempFiles) {
|
||||
return new File(wsdlTmpDir, this.LOCAL_WSDL_NAME).toURI().toString();
|
||||
} else {
|
||||
return this.wsdlUri;
|
||||
}
|
||||
}
|
||||
|
||||
public Set<String> getNamespaces() {
|
||||
@@ -464,4 +476,33 @@ public class ServiceDiscoveryHelper {
|
||||
return "NOLOCATION";
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
System.setProperty("javax.xml.transform.TransformerFactory", "org.apache.xalan.processor.TransformerFactoryImpl");
|
||||
System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.NoOpLog");
|
||||
|
||||
System.setProperty("javax.xml.accessExternalSchema", "all");
|
||||
|
||||
//shade the log level for DynamicClientFactory.class
|
||||
java.util.logging.Logger LOG = org.apache.cxf.common.logging.LogUtils.getL7dLogger(org.apache.cxf.endpoint.dynamic.DynamicClientFactory.class);
|
||||
LOG.setLevel(java.util.logging.Level.WARNING);
|
||||
|
||||
ServiceDiscoveryHelper helper = new ServiceDiscoveryHelper("http://gcomputer.net/webservices/knowledge.asmx?WSDL", null, null, false);
|
||||
//ServiceDiscoveryHelper helper = new ServiceDiscoveryHelper("/Users/wangwei/Downloads/knowledge.wsdl", null, null, false);
|
||||
/*
|
||||
WSDLMetadataUtils utils = new WSDLMetadataUtils();
|
||||
//WSDLMetadataUtils.OperationInfo info = utils.parseOperationInfo(helper, "KnowledgeLeakSoap12", "Knowledge");
|
||||
WSDLMetadataUtils.OperationInfo info = utils.parseOperationInfo(helper, null, "Knowledge");
|
||||
System.out.println(info.operationName);
|
||||
System.out.println(info.port);
|
||||
System.out.println(info.service);
|
||||
System.out.println(info.inputParameters);
|
||||
System.out.println(info.outputParameter);
|
||||
System.out.println("done");
|
||||
*/
|
||||
|
||||
org.talend.webservice.helper.ServiceInvokerHelper serviceInvokerHelper = new org.talend.webservice.helper.ServiceInvokerHelper(helper, null);
|
||||
Map<String, Object> result = serviceInvokerHelper.invokeDynamic("Knowledge", Arrays.asList(1));
|
||||
System.out.println(result);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,30 +3,6 @@
|
||||
*/
|
||||
package org.talend.webservice.helper;
|
||||
|
||||
import java.beans.PropertyDescriptor;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.wsdl.Input;
|
||||
import javax.wsdl.Message;
|
||||
import javax.wsdl.Operation;
|
||||
import javax.wsdl.Output;
|
||||
import javax.wsdl.Port;
|
||||
import javax.wsdl.Service;
|
||||
import javax.wsdl.WSDLException;
|
||||
import javax.xml.bind.annotation.XmlSchema;
|
||||
import javax.xml.bind.annotation.XmlType;
|
||||
import javax.xml.namespace.QName;
|
||||
import javax.xml.transform.TransformerException;
|
||||
|
||||
import org.apache.commons.beanutils.PropertyUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.cxf.endpoint.Client;
|
||||
@@ -39,11 +15,18 @@ import org.talend.webservice.helper.conf.ServiceHelperConfiguration;
|
||||
import org.talend.webservice.helper.map.MapConverter;
|
||||
import org.talend.webservice.jaxb.JAXBUtils;
|
||||
import org.talend.webservice.jaxb.JAXBUtils.IdentifierType;
|
||||
import org.talend.webservice.mapper.AnyPropertyMapper;
|
||||
import org.talend.webservice.mapper.ClassMapper;
|
||||
import org.talend.webservice.mapper.EmptyMessageMapper;
|
||||
import org.talend.webservice.mapper.MapperFactory;
|
||||
import org.talend.webservice.mapper.MessageMapper;
|
||||
import org.talend.webservice.mapper.*;
|
||||
|
||||
import javax.wsdl.*;
|
||||
import javax.xml.bind.annotation.XmlSchema;
|
||||
import javax.xml.bind.annotation.XmlType;
|
||||
import javax.xml.namespace.QName;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import java.beans.PropertyDescriptor;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -261,6 +244,61 @@ public class ServiceInvokerHelper implements ClassMapper {
|
||||
return MapConverter.deepMapToMap(result);
|
||||
}
|
||||
|
||||
//auto decide the service, port, and operation name and params are necessary
|
||||
public Map<String, Object> invokeDynamic(String operationNameAndPortName, List<Object> param_values)
|
||||
throws Exception, LocalizedException {
|
||||
String portName = null;
|
||||
String operationName = operationNameAndPortName;
|
||||
try {
|
||||
portName = operationName.substring(operationName.indexOf("(") + 1, operationName.indexOf(")"));
|
||||
operationName = operationName.substring(0, operationName.indexOf("("));
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
|
||||
WSDLMetadataUtils utils = new WSDLMetadataUtils();
|
||||
WSDLMetadataUtils.OperationInfo info = utils.parseOperationInfo(this.serviceDiscoveryHelper, portName, operationName);
|
||||
|
||||
Map<String, Object> paramsMap = null;
|
||||
if(param_values!=null && !param_values.isEmpty()) {
|
||||
List<String> paths = new ArrayList<>();
|
||||
flat(paths, info.inputParameters, null);
|
||||
|
||||
int size = Math.min(paths.size(), param_values.size());
|
||||
|
||||
paramsMap = new HashMap<>();
|
||||
|
||||
for(int i=0;i<size;i++) {
|
||||
paramsMap.put(paths.get(i), param_values.get(i));
|
||||
}
|
||||
|
||||
if (!paramsMap.isEmpty()) {
|
||||
paramsMap = MapConverter.mapToDeepMap(paramsMap);
|
||||
}
|
||||
|
||||
if (paramsMap.isEmpty()) {
|
||||
paramsMap = null;
|
||||
}
|
||||
}
|
||||
Map<String, Object> result = invoke(info.service, info.port, info.operationName, paramsMap);
|
||||
|
||||
if(result==null || result.isEmpty()) return null;
|
||||
|
||||
return MapConverter.deepMapToMap(result, true);
|
||||
}
|
||||
|
||||
private void flat(List<String> paths, List<WSDLMetadataUtils.ParameterInfo> inputParameters, String path) {
|
||||
if(inputParameters==null || inputParameters.isEmpty()) {
|
||||
if(path!=null) {
|
||||
paths.add(path);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
for(WSDLMetadataUtils.ParameterInfo info : inputParameters) {
|
||||
flat(paths, info.childParameters, path!=null? path + "." + info.name : info.name);
|
||||
}
|
||||
}
|
||||
|
||||
protected String getClassNameForType(QName xmlSchemaTypeMapperQname) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(getPackageForNamespaceURI(xmlSchemaTypeMapperQname.getNamespaceURI()));
|
||||
|
||||
@@ -0,0 +1,640 @@
|
||||
package org.talend.webservice.helper;
|
||||
|
||||
import org.apache.ws.commons.schema.*;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
import javax.wsdl.*;
|
||||
import javax.wsdl.extensions.ExtensibilityElement;
|
||||
import javax.wsdl.extensions.UnknownExtensibilityElement;
|
||||
import javax.wsdl.extensions.soap.SOAPBinding;
|
||||
import javax.wsdl.extensions.soap.SOAPBody;
|
||||
import javax.wsdl.extensions.soap.SOAPOperation;
|
||||
import javax.wsdl.extensions.soap12.SOAP12Binding;
|
||||
import javax.wsdl.extensions.soap12.SOAP12Body;
|
||||
import javax.wsdl.extensions.soap12.SOAP12Operation;
|
||||
import javax.wsdl.factory.WSDLFactory;
|
||||
import javax.xml.namespace.QName;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.util.*;
|
||||
|
||||
public class WSDLMetadataUtils {
|
||||
|
||||
WSDLFactory wsdlFactory = null;
|
||||
|
||||
private Vector<XmlSchema> wsdlTypes = new Vector<XmlSchema>();
|
||||
|
||||
private List<String> parametersName = new ArrayList<String>();
|
||||
|
||||
private List<String> schemaNames = new ArrayList<String>();
|
||||
|
||||
private List<String> documentBaseList = new ArrayList<String>();
|
||||
|
||||
private List<XmlSchemaElement> allXmlSchemaElement = new ArrayList<XmlSchemaElement>();
|
||||
|
||||
private List<XmlSchemaType> allXmlSchemaType = new ArrayList<XmlSchemaType>();
|
||||
|
||||
public final static String DEFAULT_SOAP_ENCODING_STYLE = "http://schemas.xmlsoap.org/soap/encoding/";
|
||||
|
||||
public WSDLMetadataUtils() throws WSDLException {
|
||||
wsdlFactory = WSDLFactory.newInstance();
|
||||
}
|
||||
|
||||
public static class OperationInfo {
|
||||
QName port;
|
||||
QName service;
|
||||
|
||||
String operationName;
|
||||
|
||||
List<ParameterInfo> inputParameters = new ArrayList<ParameterInfo>();
|
||||
List<ParameterInfo> outputParameter = new ArrayList<ParameterInfo>();
|
||||
}
|
||||
|
||||
public class ParameterInfo {
|
||||
String name;
|
||||
|
||||
/* list of parameters, only filled if complex type */
|
||||
List<ParameterInfo> childParameters = new ArrayList<ParameterInfo>();
|
||||
}
|
||||
|
||||
//not thread safe
|
||||
private List<OperationInfo> operations;
|
||||
|
||||
private String targetOperationName;
|
||||
private String currentPort;
|
||||
private QName currentService;
|
||||
|
||||
public OperationInfo parseOperationInfo(ServiceDiscoveryHelper sdh, String port, String operationName) throws Exception {
|
||||
this.targetOperationName = operationName;
|
||||
|
||||
Collection<Definition> defs = sdh.getDefinitions();
|
||||
|
||||
wsdlTypes = createSchemaFromTypes(defs);
|
||||
|
||||
collectAllXmlSchemaElement();
|
||||
|
||||
collectAllXmlSchemaType();
|
||||
|
||||
//only fetch services from main wsdl/definition, others for elements and type definition
|
||||
Map services = defs.iterator().next().getServices();
|
||||
if (services != null) {
|
||||
Iterator iter = services.values().iterator();
|
||||
while (iter.hasNext()) {
|
||||
List<OperationInfo> operations = getOperations((Service) iter.next());
|
||||
for(OperationInfo info : operations) {
|
||||
if(port==null) {
|
||||
return info;
|
||||
}
|
||||
if(port.equals(info.port.getLocalPart())) {
|
||||
return info;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new RuntimeException("can't find the operation : " + operationName + " with port : " + port);
|
||||
}
|
||||
|
||||
private void collectAllXmlSchemaElement() {
|
||||
for (int i = 0; i < wsdlTypes.size(); i++) {
|
||||
XmlSchema xmlSchema = (wsdlTypes.elementAt(i));
|
||||
if (xmlSchema == null) {
|
||||
continue;
|
||||
}
|
||||
Map<QName, XmlSchemaElement> elements = xmlSchema.getElements();
|
||||
Iterator elementsItr = elements.values().iterator();
|
||||
while (elementsItr.hasNext()) {
|
||||
XmlSchemaElement xmlSchemaElement = (XmlSchemaElement) elementsItr.next();
|
||||
|
||||
allXmlSchemaElement.add(xmlSchemaElement);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void collectAllXmlSchemaType() {
|
||||
for (int i = 0; i < wsdlTypes.size(); i++) {
|
||||
XmlSchema xmlSchema = (wsdlTypes.elementAt(i));
|
||||
if (xmlSchema == null) {
|
||||
continue;
|
||||
}
|
||||
Map<QName, XmlSchemaType> xmlSchemaObjectTable = xmlSchema.getSchemaTypes();
|
||||
Iterator typesItr = xmlSchemaObjectTable.values().iterator();
|
||||
while (typesItr.hasNext()) {
|
||||
XmlSchemaType xmlSchemaType = (XmlSchemaType) typesItr.next();
|
||||
allXmlSchemaType.add(xmlSchemaType);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected Vector<XmlSchema> createSchemaFromTypes(Collection<Definition> wsdlDefinitions) throws WSDLException {
|
||||
Vector<XmlSchema> schemas = new Vector<XmlSchema>();
|
||||
Set<String> imports = new HashSet<String>();
|
||||
Element schemaElementt = null;
|
||||
Map importElement = null;
|
||||
List includeElement = null;
|
||||
for (Definition def : wsdlDefinitions) {
|
||||
if (def.getTypes() != null) {
|
||||
List schemaExtElem = findExtensibilityElement(def.getTypes().getExtensibilityElements(), "schema");
|
||||
for (int i = 0; i < schemaExtElem.size(); i++) {
|
||||
ExtensibilityElement schemaElement = (ExtensibilityElement) schemaExtElem.get(i);
|
||||
if (schemaElement != null && schemaElement instanceof UnknownExtensibilityElement) {
|
||||
schemaElementt = ((UnknownExtensibilityElement) schemaElement).getElement();
|
||||
|
||||
String documentBase = ((javax.wsdl.extensions.schema.Schema) schemaElement).getDocumentBaseURI();
|
||||
XmlSchema schema = createschemafromtype(schemaElementt, def, documentBase);
|
||||
if (schema != null) {
|
||||
schemas.add(schema);
|
||||
if (schema.getTargetNamespace() != null) {
|
||||
schemaNames.add(schema.getTargetNamespace());
|
||||
}
|
||||
}
|
||||
importElement = ((javax.wsdl.extensions.schema.Schema) schemaElement).getImports();
|
||||
if (importElement != null && importElement.size() > 0) {
|
||||
findImportSchema(def, schemas, importElement, imports);
|
||||
}
|
||||
}
|
||||
|
||||
if (schemaElement != null && schemaElement instanceof javax.wsdl.extensions.schema.Schema) {
|
||||
schemaElementt = ((javax.wsdl.extensions.schema.Schema) schemaElement).getElement();
|
||||
String documentBase = ((javax.wsdl.extensions.schema.Schema) schemaElement).getDocumentBaseURI();
|
||||
Boolean isHaveImport = false;
|
||||
importElement = ((javax.wsdl.extensions.schema.Schema) schemaElement).getImports();
|
||||
if (importElement != null && importElement.size() > 0) {
|
||||
Iterator keyIterator = importElement.keySet().iterator();
|
||||
if (importElement.size() > 0) {
|
||||
isHaveImport = true;
|
||||
}
|
||||
}
|
||||
|
||||
XmlSchema schema = createschemafromtype(schemaElementt, def, documentBase);
|
||||
if (schema != null) {
|
||||
schemas.add(schema);
|
||||
if (schema.getTargetNamespace() != null) {
|
||||
schemaNames.add(schema.getTargetNamespace());
|
||||
}
|
||||
}
|
||||
|
||||
if (isHaveImport) {
|
||||
findImportSchema(def, schemas, importElement, imports);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
return schemas;
|
||||
}
|
||||
|
||||
private void findIncludesSchema(Definition wsdlDefinition, Vector schemas, List includeElement) throws WSDLException {
|
||||
Element schemaElementt;
|
||||
for (int i = 0; i < includeElement.size(); i++) {
|
||||
|
||||
schemaElementt = ((com.ibm.wsdl.extensions.schema.SchemaReferenceImpl) includeElement.get(i)).getReferencedSchema()
|
||||
.getElement();
|
||||
String documentBase = ((com.ibm.wsdl.extensions.schema.SchemaReferenceImpl) includeElement.get(i))
|
||||
.getReferencedSchema().getDocumentBaseURI();
|
||||
XmlSchema schemaInclude = createschemafromtype(schemaElementt, wsdlDefinition, documentBase);
|
||||
if (schemaInclude != null) {
|
||||
schemas.add(schemaInclude);
|
||||
if (schemaInclude.getTargetNamespace() != null) {
|
||||
schemaNames.add(schemaInclude.getTargetNamespace());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void findImportSchema(Definition wsdlDefinition, Vector schemas, Map importElement, Set<String> imports)
|
||||
throws WSDLException {
|
||||
Element schemaElementt;
|
||||
List includeElement = null;
|
||||
Iterator keyIterator = importElement.keySet().iterator();
|
||||
Boolean isHaveImport = false;
|
||||
while (keyIterator.hasNext()) {
|
||||
Object object = keyIterator.next();
|
||||
if (object != null) {
|
||||
String key = object.toString();
|
||||
Vector importEle = (Vector) importElement.get(key);
|
||||
|
||||
for (int i = 0; i < importEle.size(); i++) {
|
||||
Map importChildElement = null;
|
||||
com.ibm.wsdl.extensions.schema.SchemaImportImpl importImpl = (com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle
|
||||
.elementAt(i);
|
||||
// to avoid import cycle
|
||||
String importLocation = importImpl.getSchemaLocationURI() + ":" + importImpl.getNamespaceURI();
|
||||
if (imports.contains(importLocation)) {
|
||||
continue;
|
||||
} else {
|
||||
imports.add(importLocation);
|
||||
}
|
||||
if (importImpl.getReferencedSchema() != null) {
|
||||
|
||||
schemaElementt = importImpl.getReferencedSchema().getElement();
|
||||
String documentBase = importImpl.getReferencedSchema().getDocumentBaseURI();
|
||||
|
||||
if ((com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle.elementAt(i) != null) {
|
||||
if (((com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle.elementAt(i)).getReferencedSchema() != null) {
|
||||
importChildElement = ((com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle.elementAt(i))
|
||||
.getReferencedSchema().getImports();
|
||||
if (importChildElement != null && importChildElement.size() > 0 && !isIncludeSchema(documentBase)) {
|
||||
isHaveImport = true;
|
||||
documentBaseList.add(documentBase);
|
||||
// validateImportUrlPath(importElement);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
XmlSchema schemaImport = createschemafromtype(schemaElementt, wsdlDefinition, documentBase);
|
||||
if (schemaImport != null) {
|
||||
schemas.add(schemaImport);
|
||||
if (schemaImport.getTargetNamespace() != null) {
|
||||
schemaNames.add(schemaImport.getTargetNamespace());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isHaveImport) {
|
||||
findImportSchema(wsdlDefinition, schemas, importChildElement, imports);
|
||||
}
|
||||
|
||||
if ((com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle.elementAt(i) != null) {
|
||||
if (((com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle.elementAt(i)).getReferencedSchema() != null) {
|
||||
includeElement = ((com.ibm.wsdl.extensions.schema.SchemaImportImpl) importEle.elementAt(i))
|
||||
.getReferencedSchema().getIncludes();
|
||||
if (includeElement != null && includeElement.size() > 0) {
|
||||
|
||||
findIncludesSchema(wsdlDefinition, schemas, includeElement);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private List findExtensibilityElement(List extensibilityElements, String elementType) {
|
||||
List elements = new ArrayList();
|
||||
if (extensibilityElements != null) {
|
||||
Iterator iter = extensibilityElements.iterator();
|
||||
while (iter.hasNext()) {
|
||||
ExtensibilityElement element = (ExtensibilityElement) iter.next();
|
||||
if (element.getElementType().getLocalPart().equalsIgnoreCase(elementType)) {
|
||||
elements.add(element);
|
||||
}
|
||||
}
|
||||
}
|
||||
return elements;
|
||||
}
|
||||
|
||||
private XmlSchema createschemafromtype(Element schemaElement, Definition wsdlDefinition, String documentBase)
|
||||
throws WSDLException {
|
||||
if (schemaElement == null) {
|
||||
throw new WSDLException(WSDLException.INVALID_WSDL, "Unable to find schema extensibility element in WSDL");
|
||||
}
|
||||
|
||||
XmlSchema xmlSchema = null;
|
||||
XmlSchemaCollection xmlSchemaCollection = new XmlSchemaCollection();
|
||||
xmlSchemaCollection.setBaseUri(fixDocumentBase(documentBase));
|
||||
|
||||
xmlSchema = xmlSchemaCollection.read(schemaElement);
|
||||
|
||||
return xmlSchema;
|
||||
}
|
||||
|
||||
private String fixDocumentBase(String documentBase) {
|
||||
String fixedPath = documentBase;
|
||||
try {
|
||||
URL url = new URL(documentBase);
|
||||
File file = new File(url.getFile());
|
||||
fixedPath = file.toURI().toString();
|
||||
} catch (Exception e) {
|
||||
fixedPath = documentBase;
|
||||
}
|
||||
return fixedPath;
|
||||
}
|
||||
|
||||
private Boolean isIncludeSchema(String documentBase) {
|
||||
Boolean isHaveSchema = false;
|
||||
for (int i = 0; i < documentBaseList.size(); i++) {
|
||||
String documentBaseTem = documentBaseList.get(i);
|
||||
if (documentBaseTem.equals(documentBase)) {
|
||||
isHaveSchema = true;
|
||||
}
|
||||
}
|
||||
return isHaveSchema;
|
||||
}
|
||||
|
||||
private List<OperationInfo> getOperations(Service service) {
|
||||
currentService = service.getQName();
|
||||
List<OperationInfo> result = new ArrayList<>();
|
||||
|
||||
Map ports = service.getPorts();
|
||||
Iterator portIter = ports.values().iterator();
|
||||
while (portIter.hasNext()) {
|
||||
Port port = (Port) portIter.next();
|
||||
Binding binding = port.getBinding();
|
||||
|
||||
currentPort = port.getName();
|
||||
|
||||
result.addAll(buildOperations(binding));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private List<OperationInfo> buildOperations(Binding binding) {
|
||||
List<OperationInfo> result = new ArrayList<>();
|
||||
|
||||
List operations = binding.getBindingOperations();
|
||||
|
||||
if (operations != null && !operations.isEmpty()) {
|
||||
List soapBindingElems = findExtensibilityElement(binding.getExtensibilityElements(), "binding");
|
||||
String style = "document"; // default
|
||||
|
||||
ExtensibilityElement soapBindingElem = (ExtensibilityElement) soapBindingElems.get(0);
|
||||
if (soapBindingElem != null && soapBindingElem instanceof SOAPBinding) {
|
||||
SOAPBinding soapBinding = (SOAPBinding) soapBindingElem;
|
||||
style = soapBinding.getStyle();
|
||||
} else if (soapBindingElem != null && soapBindingElem instanceof SOAP12Binding) {
|
||||
SOAP12Binding soapBinding = (SOAP12Binding) soapBindingElem;
|
||||
style = soapBinding.getStyle();
|
||||
}
|
||||
|
||||
Iterator opIter = operations.iterator();
|
||||
|
||||
while (opIter.hasNext()) {
|
||||
BindingOperation oper = (BindingOperation) opIter.next();
|
||||
List operElems = findExtensibilityElement(oper.getExtensibilityElements(), "operation");
|
||||
ExtensibilityElement operElem = (ExtensibilityElement) operElems.get(0);
|
||||
|
||||
if(!targetOperationName.equals(oper.getName())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
OperationInfo operationInfo = new OperationInfo();
|
||||
operationInfo.service = currentService;
|
||||
operationInfo.port = new QName(currentService.getNamespaceURI(), currentPort);
|
||||
operationInfo.operationName = oper.getName();
|
||||
|
||||
//TODO do different?
|
||||
if (operElem != null && operElem instanceof SOAPOperation) {
|
||||
buildOperation(operationInfo, oper);
|
||||
} else if (operElem != null && operElem instanceof SOAP12Operation) {
|
||||
buildOperation(operationInfo, oper);
|
||||
}
|
||||
|
||||
result.add(operationInfo);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private void buildOperation(OperationInfo operationInfo, BindingOperation bindingOper) {
|
||||
Operation oper = bindingOper.getOperation();
|
||||
|
||||
List operElems = findExtensibilityElement(bindingOper.getExtensibilityElements(), "operation");
|
||||
ExtensibilityElement operElem = (ExtensibilityElement) operElems.get(0);
|
||||
if (operElem != null && operElem instanceof SOAPOperation) {//TODO do different?
|
||||
SOAPOperation soapOperation = (SOAPOperation) operElem;
|
||||
} else if (operElem != null && operElem instanceof SOAP12Operation) {
|
||||
SOAP12Operation soapOperation = (SOAP12Operation) operElem;
|
||||
}
|
||||
BindingInput bindingInput = bindingOper.getBindingInput();
|
||||
BindingOutput bindingOutput = bindingOper.getBindingOutput();
|
||||
List bodyElems = findExtensibilityElement(bindingInput.getExtensibilityElements(), "body");
|
||||
ExtensibilityElement bodyElem = (ExtensibilityElement) bodyElems.get(0);
|
||||
|
||||
if (bodyElem != null && bodyElem instanceof SOAPBody) {
|
||||
SOAPBody soapBody = (SOAPBody) bodyElem;
|
||||
List styles = soapBody.getEncodingStyles();
|
||||
String encodingStyle = null;
|
||||
if (styles != null) {
|
||||
encodingStyle = styles.get(0).toString();
|
||||
}
|
||||
if (encodingStyle == null) {
|
||||
encodingStyle = DEFAULT_SOAP_ENCODING_STYLE;
|
||||
}
|
||||
//TODO get namespace uri here?
|
||||
//soapBody.getNamespaceURI();
|
||||
} else if (bodyElem != null && bodyElem instanceof SOAP12Body) {
|
||||
SOAP12Body soapBody = (SOAP12Body) bodyElem;
|
||||
String encodingStyle = null;
|
||||
if (soapBody.getEncodingStyle() != null) {
|
||||
encodingStyle = soapBody.getEncodingStyle().toString();
|
||||
}
|
||||
if (encodingStyle == null) {
|
||||
encodingStyle = DEFAULT_SOAP_ENCODING_STYLE;
|
||||
}
|
||||
//TODO get namespace uri here?
|
||||
//soapBody.getNamespaceURI();
|
||||
}
|
||||
|
||||
Input inDef = oper.getInput();
|
||||
if (inDef != null) {
|
||||
Message inMsg = inDef.getMessage();
|
||||
if (inMsg != null) {
|
||||
getParameterFromMessage(operationInfo, inMsg, 1);
|
||||
}
|
||||
}
|
||||
|
||||
//don't need output parameter struct now
|
||||
/*
|
||||
Output outDef = oper.getOutput();
|
||||
if (outDef != null) {
|
||||
Message outMsg = outDef.getMessage();
|
||||
if (outMsg != null) {
|
||||
getParameterFromMessage(operationInfo, outMsg, 2);
|
||||
}
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
private void getParameterFromMessage(OperationInfo operationInfo, Message msg, int manner) {
|
||||
List msgParts = msg.getOrderedParts(null);
|
||||
Iterator iter = msgParts.iterator();
|
||||
while (iter.hasNext()) {
|
||||
Part part = (Part) iter.next();
|
||||
String partName = part.getName();
|
||||
String partElement = null;
|
||||
String namespace = null;
|
||||
if (part.getElementName() != null) {
|
||||
partElement = part.getElementName().getLocalPart();
|
||||
namespace = part.getElementName().getNamespaceURI();
|
||||
} else if (part.getTypeName() != null) {
|
||||
partElement = part.getTypeName().getLocalPart();
|
||||
namespace = part.getTypeName().getNamespaceURI();
|
||||
}
|
||||
// add root parameter from message.
|
||||
ParameterInfo parameterRoot = new ParameterInfo();
|
||||
parameterRoot.name = partName;
|
||||
if (manner == 1) {
|
||||
operationInfo.inputParameters.add(parameterRoot);
|
||||
} else {
|
||||
operationInfo.outputParameter.add(parameterRoot);
|
||||
}
|
||||
if (allXmlSchemaElement.size() > 0) {
|
||||
buildParameterFromElements(partElement, parameterRoot, manner);
|
||||
} else if (allXmlSchemaType.size() > 0) {
|
||||
buileParameterFromTypes(namespace, partElement, parameterRoot, manner);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void buildParameterFromElements(String partElement, ParameterInfo parameterRoot, int ioOrRecursion) {
|
||||
if (ioOrRecursion < 3) {
|
||||
parametersName.clear();
|
||||
parametersName.add(parameterRoot.name);
|
||||
} else if (ioOrRecursion == 3) {
|
||||
parametersName.add(parameterRoot.name);
|
||||
}
|
||||
Iterator elementsItr = allXmlSchemaElement.iterator();
|
||||
if (partElement != null) {
|
||||
while (elementsItr.hasNext()) {
|
||||
XmlSchemaElement xmlSchemaElement = (XmlSchemaElement) elementsItr.next();
|
||||
if (partElement.equals(xmlSchemaElement.getName())) {
|
||||
if (xmlSchemaElement.getSchemaType() != null) {
|
||||
if (xmlSchemaElement.getSchemaType() instanceof XmlSchemaComplexType) {
|
||||
XmlSchemaComplexType xmlElementComplexType = (XmlSchemaComplexType) xmlSchemaElement.getSchemaType();
|
||||
XmlSchemaParticle xmlSchemaParticle = xmlElementComplexType.getParticle();
|
||||
if (xmlSchemaParticle instanceof XmlSchemaGroupParticle) {
|
||||
XmlSchemaGroupParticle xmlSchemaGroupBase = (XmlSchemaGroupParticle) xmlSchemaParticle;
|
||||
if (xmlSchemaGroupBase != null) {
|
||||
buildParameterFromCollection(xmlSchemaGroupBase, parameterRoot, ioOrRecursion);
|
||||
}
|
||||
} else if (xmlSchemaElement.getSchemaTypeName() != null) {
|
||||
String paraTypeName = xmlSchemaElement.getSchemaTypeName().getLocalPart();
|
||||
String paraTypeNamespace = xmlSchemaElement.getSchemaTypeName().getNamespaceURI();
|
||||
if (paraTypeName != null) {
|
||||
buileParameterFromTypes(paraTypeNamespace, paraTypeName, parameterRoot, ioOrRecursion);
|
||||
}
|
||||
}
|
||||
} else if (xmlSchemaElement.getSchemaType() instanceof XmlSchemaSimpleType) {
|
||||
XmlSchemaSimpleType xmlSchemaSimpleType = (XmlSchemaSimpleType) xmlSchemaElement.getSchemaType();
|
||||
String typeName = xmlSchemaSimpleType.getName();
|
||||
if (typeName != null && typeName.equals("anyType")) {
|
||||
ParameterInfo parameterSon = new ParameterInfo();
|
||||
parameterSon.name = "anyType";
|
||||
parameterRoot.childParameters.add(parameterSon);
|
||||
}
|
||||
}
|
||||
} else if (xmlSchemaElement.getSchemaTypeName() != null) {
|
||||
String paraTypeName = xmlSchemaElement.getSchemaTypeName().getLocalPart();
|
||||
String paraTypeNamespace = xmlSchemaElement.getSchemaTypeName().getNamespaceURI();
|
||||
if (paraTypeName != null) {
|
||||
buileParameterFromTypes(paraTypeNamespace, paraTypeName, parameterRoot, ioOrRecursion);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void buileParameterFromTypes(String paraNamespace, String paraType, ParameterInfo parameter, int ioOrRecursion) {
|
||||
if (ioOrRecursion < 3) {
|
||||
parametersName.clear();
|
||||
parametersName.add(parameter.name);
|
||||
} else if (ioOrRecursion == 3) {
|
||||
parametersName.add(parameter.name);
|
||||
}
|
||||
|
||||
//twebserviceinput need auto metadata fetch for input parameters, but it only support very easy, not support custom defined type usage like this:
|
||||
//<element type="tns="s:anyCustomDefinedSimpleOrComplexType"">
|
||||
//so nothing to do here, TODO make sure it
|
||||
}
|
||||
|
||||
private void buildParameterFromCollection(XmlSchemaGroupParticle xmlSchemaGroupParticle, ParameterInfo parameter,
|
||||
int ioOrRecursion) {
|
||||
if (!(xmlSchemaGroupParticle instanceof XmlSchemaSequence)) {
|
||||
throw new RuntimeException("don't support that complex parameter type, only support xsd:sequence");
|
||||
}
|
||||
|
||||
XmlSchemaSequence xmlSchemaSequence = (XmlSchemaSequence) xmlSchemaGroupParticle;
|
||||
List<XmlSchemaSequenceMember> sequences = xmlSchemaSequence.getItems();
|
||||
|
||||
for (XmlSchemaSequenceMember sequence : sequences) {
|
||||
if (sequence instanceof XmlSchemaAny) {//TODO remove it as not support too
|
||||
ParameterInfo parameterSon = new ParameterInfo();
|
||||
parameterSon.name = "_content_";
|
||||
parameter.childParameters.add(parameterSon);
|
||||
} else if (sequence instanceof XmlSchemaElement) {//this is the major part we support
|
||||
XmlSchemaElement xmlSchemaElement = (XmlSchemaElement) sequence;
|
||||
String elementName = xmlSchemaElement.getName();
|
||||
ParameterInfo parameterSon = new ParameterInfo();
|
||||
parameterSon.name= elementName;
|
||||
|
||||
parameter.childParameters.add(parameterSon);
|
||||
|
||||
Boolean isHave = false;
|
||||
if (!parametersName.isEmpty() && parameterSon.name != null) {
|
||||
for (int p = 0; p < parametersName.size(); p++) {
|
||||
if (parameterSon.name.equals(parametersName.get(p))) {
|
||||
isHave = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (xmlSchemaElement.getSchemaTypeName() != null) {
|
||||
String elementTypeName = xmlSchemaElement.getSchemaTypeName().getLocalPart();
|
||||
String elementTypeNamespace = xmlSchemaElement.getSchemaTypeName().getNamespaceURI();
|
||||
if (elementTypeName != null && elementTypeName.equals("anyType")) {//TODO remove it
|
||||
parameterSon.name = xmlSchemaElement.getName() + ":anyType";
|
||||
}
|
||||
if (!isHave && !WsdlTypeUtil.isJavaBasicType(elementTypeName)) {
|
||||
buileParameterFromTypes(elementTypeNamespace, elementTypeName, parameterSon, ioOrRecursion);
|
||||
}
|
||||
} else if (xmlSchemaElement.getSchemaType() != null) {
|
||||
if (xmlSchemaElement.getSchemaType() instanceof XmlSchemaComplexType) {
|
||||
throw new RuntimeException("don't support nested or ref complex type in xsd:sequence");
|
||||
} else if (xmlSchemaElement.getSchemaType() instanceof XmlSchemaSimpleType) {
|
||||
XmlSchemaSimpleType xmlSchemaSimpleType = (XmlSchemaSimpleType) xmlSchemaElement.getSchemaType();
|
||||
String typeName = xmlSchemaSimpleType.getName();
|
||||
if (typeName != null && typeName.equals("anyType")) {
|
||||
ParameterInfo pSon = new ParameterInfo();
|
||||
pSon.name = "anyType";
|
||||
parameter.childParameters.add(pSon);
|
||||
}
|
||||
}
|
||||
} else if (xmlSchemaElement.getRef() != null) {//TODO twebserviceinput support it before?
|
||||
String elementTypeName = xmlSchemaElement.getRef().getTargetQName().getLocalPart();
|
||||
if (!isHave && !WsdlTypeUtil.isJavaBasicType(elementTypeName)) {
|
||||
buildParameterFromElements(elementTypeName, parameterSon, ioOrRecursion);
|
||||
}
|
||||
}
|
||||
} else if (sequence instanceof XmlSchemaAttribute) {
|
||||
XmlSchemaAttribute xmlSchemaAttribute = (XmlSchemaAttribute) sequence;
|
||||
String elementName = xmlSchemaAttribute.getName();
|
||||
ParameterInfo parameterSon = new ParameterInfo();
|
||||
parameterSon.name = elementName;
|
||||
|
||||
parameter.childParameters.add(parameterSon);
|
||||
Boolean isHave = false;
|
||||
if (!parametersName.isEmpty() && parameterSon.name != null) {
|
||||
for (int p = 0; p < parametersName.size(); p++) {
|
||||
if (parameterSon.name.equals(parametersName.get(p))) {
|
||||
isHave = true;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if (xmlSchemaAttribute.getSchemaTypeName() != null) {
|
||||
String elementTypeName = xmlSchemaAttribute.getSchemaTypeName().getLocalPart();
|
||||
String elementTypeNamespace = xmlSchemaAttribute.getSchemaTypeName().getNamespaceURI();
|
||||
if (!isHave && !WsdlTypeUtil.isJavaBasicType(elementTypeName)) {
|
||||
buileParameterFromTypes(elementTypeNamespace, elementTypeName, parameterSon, ioOrRecursion);
|
||||
}
|
||||
} else if (xmlSchemaAttribute.getRef() != null) {//TODO twebserviceinput support it before?
|
||||
String refName = xmlSchemaAttribute.getRef().getTargetQName().getLocalPart();
|
||||
if (!isHave) {
|
||||
buildParameterFromElements(refName, parameterSon, ioOrRecursion);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new RuntimeException("don't support the nest type in xsd:sequence");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
// ============================================================================
|
||||
//
|
||||
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
|
||||
//
|
||||
// This source code is available under agreement available at
|
||||
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
|
||||
//
|
||||
// You should have received a copy of the agreement
|
||||
// along with this program; if not, write to Talend SA
|
||||
// 9 rue Pages 92150 Suresnes, France
|
||||
//
|
||||
// ============================================================================
|
||||
package org.talend.webservice.helper;
|
||||
|
||||
public class WsdlTypeUtil {
|
||||
|
||||
public static Boolean isJavaBasicType(String typeName) {
|
||||
Boolean isJavaBasicType = false;
|
||||
if (typeName == null) {
|
||||
return false;
|
||||
}
|
||||
if ("String".equalsIgnoreCase(typeName)) {
|
||||
isJavaBasicType = true;
|
||||
} else if ("int".equalsIgnoreCase(typeName)) {
|
||||
isJavaBasicType = true;
|
||||
} else if ("long".equalsIgnoreCase(typeName)) {
|
||||
isJavaBasicType = true;
|
||||
} else if ("double".equalsIgnoreCase(typeName)) {
|
||||
isJavaBasicType = true;
|
||||
} else if ("float".equalsIgnoreCase(typeName)) {
|
||||
isJavaBasicType = true;
|
||||
} else if ("char".equalsIgnoreCase(typeName)) {
|
||||
isJavaBasicType = true;
|
||||
}
|
||||
|
||||
return isJavaBasicType;
|
||||
|
||||
}
|
||||
}
|
||||
@@ -15,12 +15,17 @@ import java.util.logging.Logger;
|
||||
|
||||
import javax.wsdl.xml.WSDLLocator;
|
||||
|
||||
import org.apache.commons.httpclient.Credentials;
|
||||
import org.apache.commons.httpclient.HostConfiguration;
|
||||
import org.apache.commons.httpclient.HttpClient;
|
||||
import org.apache.commons.httpclient.UsernamePasswordCredentials;
|
||||
import org.apache.commons.httpclient.auth.AuthScope;
|
||||
import org.apache.commons.httpclient.methods.GetMethod;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.auth.AuthScope;
|
||||
import org.apache.http.auth.UsernamePasswordCredentials;
|
||||
import org.apache.http.client.CredentialsProvider;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpRequestBase;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.xml.sax.InputSource;
|
||||
|
||||
/**
|
||||
@@ -49,10 +54,10 @@ public class WSDLLocatorImpl implements WSDLLocator {
|
||||
}
|
||||
|
||||
public InputSource getBaseInputSource() {
|
||||
GetMethod get = createGetMethod(wsdlUri);
|
||||
HttpRequestBase get = createGetMethod(wsdlUri);
|
||||
try {
|
||||
httpClient.executeMethod(get);
|
||||
InputStream is = get.getResponseBodyAsStream();
|
||||
HttpResponse response = httpClient.execute(get);
|
||||
InputStream is = response.getEntity().getContent();
|
||||
inputStreams.add(is);
|
||||
return new InputSource(is);
|
||||
} catch (IOException ex) {
|
||||
@@ -64,9 +69,9 @@ public class WSDLLocatorImpl implements WSDLLocator {
|
||||
try {
|
||||
URL url = getURL(parentLocation, importLocation);
|
||||
latestImportUri = url.toExternalForm();
|
||||
GetMethod get = createGetMethod(latestImportUri);
|
||||
httpClient.executeMethod(get);
|
||||
InputStream is = get.getResponseBodyAsStream();
|
||||
HttpRequestBase get = createGetMethod(latestImportUri);
|
||||
HttpResponse response = httpClient.execute(get);
|
||||
InputStream is = response.getEntity().getContent();
|
||||
inputStreams.add(is);
|
||||
return new InputSource(is);
|
||||
} catch (MalformedURLException ex) {
|
||||
@@ -110,36 +115,44 @@ public class WSDLLocatorImpl implements WSDLLocator {
|
||||
inputStreams.clear();
|
||||
}
|
||||
|
||||
private GetMethod createGetMethod(String uri) {
|
||||
GetMethod get = new GetMethod(uri);
|
||||
private HttpRequestBase createGetMethod(String uri) {
|
||||
HttpGet get = new HttpGet(uri);
|
||||
if (configuration.getCookie() != null) {
|
||||
get.setRequestHeader(HTTP_HEADER_COOKIE, configuration.getCookie());
|
||||
get.setHeader(HTTP_HEADER_COOKIE, configuration.getCookie());
|
||||
}
|
||||
|
||||
return get;
|
||||
}
|
||||
|
||||
private HttpClient createHttpClient() {
|
||||
HttpClient httpClient = new HttpClient();
|
||||
HttpClientBuilder builder = HttpClients.custom();
|
||||
CredentialsProvider credentialsProvider = null;
|
||||
if (configuration.getProxyServer() != null) {
|
||||
HostConfiguration hostConfiguration = new HostConfiguration();
|
||||
hostConfiguration.setProxy(configuration.getProxyServer(), configuration.getProxyPort());
|
||||
httpClient.setHostConfiguration(hostConfiguration);
|
||||
builder.setProxy(new HttpHost(configuration.getProxyServer(), configuration.getProxyPort()));
|
||||
}
|
||||
|
||||
if (configuration.getUsername() != null) {
|
||||
Credentials credentials = new UsernamePasswordCredentials(configuration.getUsername(), configuration.getPassword());
|
||||
|
||||
httpClient.getState().setCredentials(AuthScope.ANY, credentials);
|
||||
if (credentialsProvider == null) {
|
||||
credentialsProvider = new BasicCredentialsProvider();
|
||||
}
|
||||
credentialsProvider
|
||||
.setCredentials(AuthScope.ANY,
|
||||
new UsernamePasswordCredentials(configuration.getUsername(), configuration.getPassword()));
|
||||
}
|
||||
|
||||
if (configuration.getProxyUsername() != null) {
|
||||
Credentials credentials = new UsernamePasswordCredentials(configuration.getProxyUsername(),
|
||||
configuration.getProxyPassword());
|
||||
|
||||
httpClient.getState().setProxyCredentials(AuthScope.ANY, credentials);
|
||||
httpClient.getHostConfiguration().setProxy(configuration.getProxyServer(), configuration.getProxyPort());
|
||||
if (credentialsProvider == null) {
|
||||
credentialsProvider = new BasicCredentialsProvider();
|
||||
}
|
||||
credentialsProvider
|
||||
.setCredentials(new AuthScope(configuration.getProxyServer(), configuration.getProxyPort()),
|
||||
new UsernamePasswordCredentials(configuration.getProxyUsername(),
|
||||
configuration.getProxyPassword()));
|
||||
builder.setProxy(new HttpHost(configuration.getProxyServer(), configuration.getProxyPort()));
|
||||
}
|
||||
return httpClient;
|
||||
if (credentialsProvider != null) {
|
||||
builder.setDefaultCredentialsProvider(credentialsProvider);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,15 +4,13 @@
|
||||
*/
|
||||
package org.talend.webservice.helper.map;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
|
||||
import javax.xml.namespace.QName;
|
||||
|
||||
import org.talend.webservice.helper.PathUtil;
|
||||
import org.talend.webservice.mapper.AnyTypeMapper;
|
||||
import sun.awt.image.ImageWatched;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -24,35 +22,44 @@ public class MapConverter {
|
||||
public static final String LEFT_SQUARE_BRACKET = "[";
|
||||
public static final String RIGHT_SQUARE_BRACKET = "]";
|
||||
|
||||
private static Map<String, Object> newMap(boolean keepOrder) {
|
||||
if(keepOrder) return new LinkedHashMap<>();
|
||||
return new HashMap<>();
|
||||
}
|
||||
|
||||
public static Map<String, Object> deepMapToMap(Map<String, Object> map) {
|
||||
return deepMapToMap(map, null, SEPARATOR);
|
||||
return deepMapToMap(map, null, SEPARATOR, false);
|
||||
}
|
||||
|
||||
public static Map<String, Object> deepMapToMap(Map<String, Object> map, boolean keepOrder) {
|
||||
return deepMapToMap(map, null, SEPARATOR, keepOrder);
|
||||
}
|
||||
|
||||
public static Map<String, Object> mapToDeepMap(Map<String, Object> map) {
|
||||
return mapToDeepMap(map, SEPARATOR);
|
||||
}
|
||||
|
||||
private static Map<String, Object> deepMapToMap(Object value, String k, String sep) {
|
||||
private static Map<String, Object> deepMapToMap(Object value, String k, String sep, boolean keepOrder) {
|
||||
if (value instanceof Map) {
|
||||
Map<String, Object> map = (Map<String, Object>) value;
|
||||
Map<String, Object> out = new HashMap<String, Object>();
|
||||
Map<String, Object> out = newMap(keepOrder);
|
||||
for (Map.Entry<String, Object> entry : map.entrySet()) {
|
||||
if (k == null) {
|
||||
out.putAll(deepMapToMap(entry.getValue(), entry.getKey(), sep));
|
||||
out.putAll(deepMapToMap(entry.getValue(), entry.getKey(), sep, keepOrder));
|
||||
} else {
|
||||
out.putAll(deepMapToMap(entry.getValue(), k + sep + entry.
|
||||
getKey(), sep));
|
||||
getKey(), sep, keepOrder));
|
||||
}
|
||||
}
|
||||
return out;
|
||||
} else if (value instanceof List) {
|
||||
List<Object> list = (List<Object>) value;
|
||||
Map<String, Object> out = new HashMap<String, Object>();
|
||||
Map<String, Object> out = newMap(keepOrder);
|
||||
int i = 0;
|
||||
for (Object val : list) {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
sb.append(k).append(LEFT_SQUARE_BRACKET).append(i).append(RIGHT_SQUARE_BRACKET);
|
||||
out.putAll(deepMapToMap(val, sb.toString(), sep));
|
||||
out.putAll(deepMapToMap(val, sb.toString(), sep, keepOrder));
|
||||
i++;
|
||||
}
|
||||
out.put(k + ".size", list.size());
|
||||
|
||||
@@ -42,14 +42,18 @@ import javax.xml.transform.Result;
|
||||
import javax.xml.transform.dom.DOMResult;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
|
||||
import org.apache.cxf.Bus;
|
||||
import org.apache.cxf.BusFactory;
|
||||
import org.apache.cxf.common.jaxb.JAXBBeanInfo;
|
||||
import org.apache.cxf.common.jaxb.JAXBContextProxy;
|
||||
import org.apache.cxf.common.logging.LogUtils;
|
||||
import org.apache.cxf.common.spi.ClassGeneratorClassLoader;
|
||||
import org.apache.cxf.common.util.ASMHelper;
|
||||
import org.apache.cxf.common.util.ASMHelper.ClassWriter;
|
||||
import org.apache.cxf.common.util.ASMHelper.FieldVisitor;
|
||||
import org.apache.cxf.common.util.ASMHelper.Label;
|
||||
import org.apache.cxf.common.util.ASMHelper.MethodVisitor;
|
||||
import org.apache.cxf.common.util.ASMHelperImpl;
|
||||
import org.apache.cxf.common.util.CachedClass;
|
||||
import org.apache.cxf.common.util.PackageUtils;
|
||||
import org.apache.cxf.common.util.ReflectionInvokationHandler;
|
||||
@@ -578,7 +582,7 @@ public final class JAXBUtils {
|
||||
public static void setNamespaceWrapper(final Map<String, String> nspref, Marshaller marshaller) throws PropertyException {
|
||||
Object mapper = null;
|
||||
if (marshaller.getClass().getName().contains(".internal.")) {
|
||||
mapper = createNamespaceWrapper(nspref);
|
||||
mapper = createNamespaceWrapper(null, nspref);
|
||||
if (mapper == null) {
|
||||
LOG.log(Level.INFO, "Could not create namespace mapper for JDK internal" + " JAXB implementation.");
|
||||
} else {
|
||||
@@ -595,6 +599,31 @@ public final class JAXBUtils {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* To avoid possible runtime collision.
|
||||
*/
|
||||
public static Object setNamespaceMapper(Bus bus, final Map<String, String> nspref,
|
||||
Marshaller marshaller) throws PropertyException {
|
||||
Object mapper = null;
|
||||
if (marshaller.getClass().getName().contains(".internal.")) {
|
||||
mapper = createNamespaceWrapper(bus, nspref);
|
||||
if (mapper == null) {
|
||||
LOG.log(Level.INFO, "Could not create namespace mapper for JDK internal" + " JAXB implementation.");
|
||||
} else {
|
||||
marshaller.setProperty("com.sun.xml.internal.bind.namespacePrefixMapper", mapper);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
Class<?> cls = Class.forName("org.apache.cxf.common.jaxb.NamespaceMapper");
|
||||
mapper = cls.getConstructor(Map.class).newInstance(nspref);
|
||||
} catch (Exception ex) {
|
||||
LOG.log(Level.INFO, "Could not create NamespaceMapper", ex);
|
||||
}
|
||||
marshaller.setProperty("com.sun.xml.bind.namespacePrefixMapper", mapper);
|
||||
}
|
||||
return mapper;
|
||||
}
|
||||
|
||||
public static BridgeWrapper createBridge(Set<Class<?>> ctxClasses, QName qname, Class<?> refcls, Annotation anns[])
|
||||
throws JAXBException {
|
||||
try {
|
||||
@@ -1018,16 +1047,17 @@ public final class JAXBUtils {
|
||||
return false;
|
||||
}
|
||||
|
||||
private static synchronized Object createNamespaceWrapper(Map<String, String> map) {
|
||||
ASMHelper helper = new ASMHelper();
|
||||
private static synchronized Object createNamespaceWrapper(Bus bus, Map<String, String> map) {
|
||||
ASMHelper helper = new ASMHelperImpl();
|
||||
String className = "org.apache.cxf.jaxb.NamespaceMapperInternal";
|
||||
Class<?> cls = helper.findClass(className, JAXBUtils.class);
|
||||
NamespaceMapperClassGenerator nmcg = new NamespaceMapperClassGenerator(bus);
|
||||
Class<?> cls = nmcg.findClass(className, JAXBUtils.class);
|
||||
if (cls == null) {
|
||||
ClassWriter cw = helper.createClassWriter();
|
||||
if (cw == null) {
|
||||
return null;
|
||||
}
|
||||
cls = createNamespaceWrapperInternal(helper, cw);
|
||||
cls = createNamespaceWrapperInternal(helper, cw, nmcg);
|
||||
}
|
||||
try {
|
||||
return cls.getConstructor(Map.class).newInstance(map);
|
||||
@@ -1036,7 +1066,7 @@ public final class JAXBUtils {
|
||||
}
|
||||
}
|
||||
|
||||
private static Class<?> createNamespaceWrapperInternal(ASMHelper helper, ClassWriter cw) {
|
||||
private static Class<?> createNamespaceWrapperInternal(ASMHelper helper, ClassWriter cw, NamespaceMapperClassGenerator nmcg) {
|
||||
String className = "org.apache.cxf.jaxb.NamespaceMapperInternal";
|
||||
FieldVisitor fv;
|
||||
MethodVisitor mv;
|
||||
@@ -1127,7 +1157,7 @@ public final class JAXBUtils {
|
||||
}
|
||||
}
|
||||
|
||||
return helper.loadClass(className, cls, bts);
|
||||
return nmcg.loadClass(className, cls, bts);
|
||||
}
|
||||
|
||||
public static JAXBBeanInfo getBeanInfo(JAXBContextProxy context, Class<?> cls) {
|
||||
@@ -1138,4 +1168,20 @@ public final class JAXBUtils {
|
||||
return ReflectionInvokationHandler.createProxyWrapper(o, JAXBBeanInfo.class);
|
||||
}
|
||||
|
||||
private static class NamespaceMapperClassGenerator extends ClassGeneratorClassLoader {
|
||||
|
||||
private NamespaceMapperClassGenerator(Bus bus) {
|
||||
super(bus == null ? BusFactory.getDefaultBus() : bus);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<?> findClass(String className, Class<?> cls) {
|
||||
return super.findClass(className, cls);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<?> loadClass(String className, Class<?> cls, byte[] bytes) {
|
||||
return super.loadClass(className, cls, bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,13 +3,12 @@
|
||||
*/
|
||||
package org.talend.webservice.mapper;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.talend.webservice.exception.LocalizedException;
|
||||
|
||||
import javax.xml.namespace.QName;
|
||||
|
||||
import org.talend.webservice.exception.LocalizedException;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -151,7 +150,7 @@ public class ComplexTypeMapper implements TypeMapper {
|
||||
if (!clazz.getName().equals(beanName)) {
|
||||
ComplexTypeMapper instanceComplexTypeMapper = findInstanceByClassName(beanName);
|
||||
if (instanceComplexTypeMapper != null) {
|
||||
Map<String, Object> values = new HashMap<String, Object>();
|
||||
Map<String, Object> values = new LinkedHashMap<String, Object>();
|
||||
values.put(ABSTRACT_TYPE_NAME, instanceComplexTypeMapper.typeName);
|
||||
values.put(instanceComplexTypeMapper.typeName.getLocalPart(), instanceComplexTypeMapper.typeToValue(bean));
|
||||
return values;
|
||||
@@ -167,7 +166,7 @@ public class ComplexTypeMapper implements TypeMapper {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
Map<String, Object> values = new HashMap<String, Object>(mappers.size());
|
||||
Map<String, Object> values = new LinkedHashMap<String, Object>(mappers.size());
|
||||
for (Map.Entry<String, PropertyMapper> entry : mappers.entrySet()) {
|
||||
Object value = entry.getValue().getValueFrom(bean);
|
||||
if (value != null) {
|
||||
@@ -198,7 +197,7 @@ public class ComplexTypeMapper implements TypeMapper {
|
||||
if (params == null) {
|
||||
return null;
|
||||
}
|
||||
Map<String, Object> values = new HashMap<String, Object>(mappers.size());
|
||||
Map<String, Object> values = new LinkedHashMap<String, Object>(mappers.size());
|
||||
|
||||
int i = 0;
|
||||
for (Object param : params) {
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
package org.talend.webservice.mapper;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@@ -336,7 +337,8 @@ public class MapperFactory {
|
||||
Class<?> clazz = classMapper.getClassForType(xmlSchemaComplexType.getQName(), orderedMap.keyList(), 1);
|
||||
|
||||
// 3.create propertyMapper (propertyName,class,schemaTypeMap,typeMapperQname)
|
||||
Map<String, PropertyMapper> mappers = new HashMap<String, PropertyMapper>();
|
||||
//need to use the order map as the order is important for response auto parser by index
|
||||
Map<String, PropertyMapper> mappers = new LinkedHashMap<>();
|
||||
for (String key : properties.keySet()) {
|
||||
Object xmlSchemaObject = properties.get(key);
|
||||
if (xmlSchemaObject == null) {
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
package org.talend.webservice.mapper;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.wsdl.Message;
|
||||
@@ -152,7 +153,7 @@ public class MessageMapperImpl implements MessageMapper {
|
||||
return wrappedValue;
|
||||
} else {
|
||||
List<Part> orderedParts = message.getOrderedParts(null);
|
||||
Map<String, Object> values = new HashMap<String, Object>(params.length);
|
||||
Map<String, Object> values = new LinkedHashMap<String, Object>(params.length);
|
||||
int i = 0;
|
||||
for (Object param : params) {
|
||||
Part part = orderedParts.get(i);
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
org.talend.ws.exception.IllegalPropertyAccessException=\u0391\u03B4\u03C5\u03BD\u03B1\u03BC\u03AF\u03B1 \u03C0\u03C1\u03CC\u03C3\u03B2\u03B1\u03C3\u03B7\u03C2 \u03C3\u03C4\u03B7\u03BD \u03B9\u03B4\u03B9\u03CC\u03C4\u03B7\u03C4\u03B1 {0} \u03C0\u03BF\u03C5 \u03B4\u03B5\u03BD \u03B5\u03AF\u03BD\u03B1\u03B9 \u03C0\u03C1\u03BF\u03C3\u03B2\u03AC\u03C3\u03B9\u03BC\u03B7 \u03B3\u03B9\u03B1 \u03C4\u03CD\u03C0\u03BF {1}
|
||||
org.talend.ws.exception.NoSuchPropertyException=\u0397 \u03B9\u03B4\u03B9\u03CC\u03C4\u03B7\u03C4\u03B1 {0} \u03B4\u03B5\u03BD \u03C5\u03C0\u03AC\u03C1\u03C7\u03B5\u03B9 \u03B3\u03B9\u03B1 \u03C4\u03BF\u03BD \u03C4\u03CD\u03C0\u03BF {1}
|
||||
org.talend.ws.exception.Instantiation=\u0391\u03B4\u03C5\u03BD\u03B1\u03BC\u03AF\u03B1 \u03BD\u03B1 \u03B4\u03BF\u03B8\u03B5\u03AF \u03C5\u03C0\u03CC\u03C3\u03C4\u03B1\u03C3\u03B7 \u03C3\u03B5 \u03BA\u03BB\u03AC\u03C3\u03B7 \u03C4\u03CD\u03C0\u03BF\u03C5 {0}
|
||||
org.talend.ws.exception.illegalAccessValueOf=\u0391\u03B4\u03C5\u03BD\u03B1\u03BC\u03AF\u03B1 \u03BA\u03BB\u03AE\u03C3\u03B7\u03C2 \u03BC\u03B5\u03B8\u03CC\u03B4\u03BF\u03C5 valueOf \u03B3\u03B9\u03B1 \u03C4\u03CD\u03C0\u03BF enum {0}
|
||||
org.talend.ws.exception.Unknown=\u039C\u03B9\u03B1 \u03AC\u03B3\u03BD\u03C9\u03C3\u03C4\u03B7 \u03B5\u03BE\u03B1\u03AF\u03C1\u03B5\u03C3\u03B7 \u03C0\u03C1\u03BF\u03AD\u03BA\u03C5\u03C8\u03B5
|
||||
org.talend.ws.exception.InvalidEnumValueException=\u0397 \u03C4\u03B9\u03BC\u03AE {0} \u03B4\u03B5\u03BD \u03B5\u03AF\u03BD\u03B1\u03B9 \u03AD\u03B3\u03BA\u03C5\u03C1\u03B7 \u03B3\u03B9\u03B1 enum {1}
|
||||
org.talend.ws.exception.InvalidParameterAnyType=\u03A0\u03C1\u03AD\u03C0\u03B5\u03B9 \u03BD\u03B1 \u03C0\u03C1\u03BF\u03C3\u03B4\u03B9\u03BF\u03C1\u03AF\u03C3\u03B5\u03C4\u03B5 \u03AD\u03BD\u03B1\u03BD \u03C7\u03AC\u03C1\u03C4\u03B7 \u03C3\u03B1\u03BD \u03B1\u03C5\u03C4\u03CC\u03BD \u03B3\u03B9\u03B1 anyTypes : {anyType: value, anyType_type: qname}
|
||||
@@ -1,8 +0,0 @@
|
||||
org.talend.ws.exception.IllegalPropertyAccessException=Unable to access property {0} which is not accessible for type {1}
|
||||
org.talend.ws.exception.NoSuchPropertyException=Property {0} does not exist for type {1}
|
||||
org.talend.ws.exception.Instantiation=Unable to instantiate class of type {0}
|
||||
org.talend.ws.exception.InvocationTargetPropertyAccessor=A property accessor has thrown an exception : property {0} of class {1}
|
||||
org.talend.ws.exception.illegalAccessValueOf=Unable to call method valueOf for enum type {0}
|
||||
org.talend.ws.exception.Unknown=An unknown exception has been thrown
|
||||
org.talend.ws.exception.InvalidEnumValueException=Value {0} is not valid for enum {1}
|
||||
org.talend.ws.exception.InvalidParameterAnyType=You must specify a map like this for anyTypes : {anyType: value, anyType_type: qname} pour les anyType
|
||||
@@ -1,6 +1,8 @@
|
||||
org.talend.ws.exception.IllegalPropertyAccessException=Impossible d'acc\u00e9der \u00e0 la propri\u00e9t\u00e9 {0} qui est inaccessible pour le type {1}
|
||||
org.talend.ws.exception.NoSuchPropertyException=La propri\u00e9t\u00e9 {0} n'existe pas pour le type {1}
|
||||
org.talend.ws.exception.Instantiation=Impossible d'instancier la classe de type {0}
|
||||
org.talend.ws.exception.illegalAccessValueOf=Impossible d'appeler une m\u00e9thode valueOf pour le type enum {0}
|
||||
org.talend.ws.exception.Unknown=Une exception inconnue a \u00e9t\u00e9 rencontr\u00e9e
|
||||
org.talend.ws.exception.IllegalPropertyAccessException=Impossible d'acc\u00E9der \u00E0 la propri\u00E9t\u00E9 {0} qui est inaccessible pour le type {1}
|
||||
org.talend.ws.exception.NoSuchPropertyException=La propri\u00E9t\u00E9 {0} n'existe pas pour le type {1}
|
||||
org.talend.ws.exception.Instantiation=Impossible d'instancier le type de classe {0}
|
||||
org.talend.ws.exception.InvocationTargetPropertyAccessor=Un accesseur de propri\u00E9t\u00E9 a retourner une exception : propri\u00E9t\u00E9 {0} de classe {1}
|
||||
org.talend.ws.exception.illegalAccessValueOf=Impossible d'appeler une m\u00E9thode valueOf pour le type enum {0}
|
||||
org.talend.ws.exception.Unknown=Une exception inconnue a \u00E9t\u00E9 d\u00E9clench\u00E9e
|
||||
org.talend.ws.exception.InvalidEnumValueException=La valeur {0} n'est pas valide pour enum {1}
|
||||
org.talend.ws.exception.InvalidParameterAnyType=Vous devez sp\u00E9cifier une Map comme celle-ci pour anyTypes : {anyType: value, anyType_type: qname} pour les anyType
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
org.talend.ws.exception.IllegalPropertyAccessException=\u30BF\u30A4\u30D7{1}\u306B\u30A2\u30AF\u30BB\u30B9\u3067\u304D\u306A\u3044\u30D7\u30ED\u30D1\u30C6\u30A3{0}\u306B\u30A2\u30AF\u30BB\u30B9\u3059\u308B\u3053\u3068\u306F\u3067\u304D\u307E\u305B\u3093
|
||||
org.talend.ws.exception.IllegalPropertyAccessException={0} \u30D7\u30ED\u30D1\u30C6\u30A3\u306B\u30A2\u30AF\u30BB\u30B9\u3067\u304D\u307E\u305B\u3093\u3002\u3053\u308C\u306F {1} \u578B\u306E\u305F\u3081\u30A2\u30AF\u30BB\u30B9\u3067\u304D\u307E\u305B\u3093\u3002
|
||||
org.talend.ws.exception.NoSuchPropertyException=\u30BF\u30A4\u30D7{1}\u306E\u30D7\u30ED\u30D1\u30C6\u30A3{0}\u306F\u5B58\u5728\u3057\u307E\u305B\u3093
|
||||
org.talend.ws.exception.Instantiation=\u30BF\u30A4\u30D7{0}\u306E\u30AF\u30E9\u30B9\u3092\u30A4\u30F3\u30B9\u30BF\u30F3\u30B9\u5316\u3059\u308B\u3053\u3068\u306F\u3067\u304D\u307E\u305B\u3093
|
||||
org.talend.ws.exception.InvocationTargetPropertyAccessor=\u30D7\u30ED\u30D1\u30C6\u30A3\u30A2\u30AF\u30BB\u30C3\u30B5\u306F\u4F8B\u5916\u3092\u767A\u751F\u3055\u305B\u307E\u3057\u305F: \u30AF\u30E9\u30B9{1}\u306E\u30D7\u30ED\u30D1\u30C6\u30A3{0}
|
||||
org.talend.ws.exception.illegalAccessValueOf=enum\u578B{0}\u306E\u30E1\u30BD\u30C3\u30C9valueOf\u3092\u547C\u3073\u51FA\u3059\u3053\u3068\u304C\u3067\u304D\u307E\u305B\u3093
|
||||
org.talend.ws.exception.Unknown=\u4E0D\u660E\u306A\u4F8B\u5916\u304C\u767A\u751F\u3057\u307E\u3057\u305F
|
||||
org.talend.ws.exception.InvalidEnumValueException=\u5024{0}\u306F\u5217\u6319\u578B{1}\u3068\u3057\u3066\u6709\u52B9\u3067\u306F\u3042\u308A\u307E\u305B\u3093
|
||||
org.talend.ws.exception.InvalidParameterAnyType=\u3053\u306E\u3088\u3046\u306AanyTypes\u306E\u30DE\u30C3\u30D7\u3092\u6307\u5B9A\u3059\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059: {anyType: value, anyType_type: qname}
|
||||
org.talend.ws.exception.Instantiation={0} \u578B\u306E\u30AF\u30E9\u30B9\u306E\u30A4\u30F3\u30B9\u30BF\u30F3\u30B9\u3092\u4F5C\u6210\u3067\u304D\u307E\u305B\u3093\u3002
|
||||
org.talend.ws.exception.InvocationTargetPropertyAccessor=\u30D7\u30ED\u30D1\u30C6\u30A3\u30A2\u30AF\u30BB\u30B9\u306B\u3088\u308A\u3001\u4F8B\u5916\u304C\u30B9\u30ED\u30FC\u3055\u308C\u307E\u3057\u305F\uFF1A\u30AF\u30E9\u30B9 {1} \u306E\u30D7\u30ED\u30D1\u30C6\u30A3 {0}
|
||||
org.talend.ws.exception.illegalAccessValueOf=\u5217\u6319\u578B {0} \u306EvalueOf\u30E1\u30BD\u30C3\u30C9\u3092\u547C\u3073\u51FA\u3059\u3053\u3068\u306F\u3067\u304D\u307E\u305B\u3093
|
||||
org.talend.ws.exception.Unknown=\u4E0D\u660E\u306A\u4F8B\u5916\u304C\u30B9\u30ED\u30FC\u3055\u308C\u307E\u3057\u305F
|
||||
org.talend.ws.exception.InvalidEnumValueException=\u5024 {0} \u306Fenum\u578B {1} \u3067\u306F\u3042\u308A\u307E\u305B\u3093\u3002
|
||||
org.talend.ws.exception.InvalidParameterAnyType=\u30DE\u30C3\u30D7\u306F\u3069\u306E\u30BF\u30A4\u30D7\u306B\u3064\u3044\u3066\u3082\u4EE5\u4E0B\u306E\u3088\u3046\u306B\u6307\u5B9A\u3059\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059: {anyType: value, anyType_type: qname}
|
||||
|
||||
@@ -3,6 +3,6 @@ org.talend.ws.exception.NoSuchPropertyException=\u5C5E\u6027 {0} \u5BF9\u4E8E\u7
|
||||
org.talend.ws.exception.Instantiation=\u65E0\u6CD5\u5B9E\u4F8B\u5316\u7C7B\u578B {0} \u7684\u7C7B
|
||||
org.talend.ws.exception.InvocationTargetPropertyAccessor=\u5C5E\u6027\u8BBF\u95EE\u5668\u629B\u51FA\u4E86\u4E00\u4E2A\u5F02\u5E38\uFF1A\u7C7B {1} \u7684\u5C5E\u6027 {0}
|
||||
org.talend.ws.exception.illegalAccessValueOf=\u65E0\u6CD5\u4E3A\u679A\u4E3E\u7C7B\u578B {0} \u8C03\u7528\u65B9\u6CD5 valueOf
|
||||
org.talend.ws.exception.Unknown=\u629B\u51FA\u4E86\u4E00\u4E2A\u672A\u77E5\u7684\u5F02\u5E38
|
||||
org.talend.ws.exception.Unknown=\u53D1\u751F\u4E00\u4E2A\u672A\u77E5\u5F02\u5E38
|
||||
org.talend.ws.exception.InvalidEnumValueException=\u503C {0} \u5BF9\u4E8E\u679A\u4E3E {1} \u65E0\u6548
|
||||
org.talend.ws.exception.InvalidParameterAnyType=\u60A8\u5FC5\u987B\u6309\u5982\u4E0B\u6240\u793A\u4E3A anyType \u6307\u5B9A\u6620\u5C04\uFF1A{anyType: value, anyType_type: qname}
|
||||
|
||||
@@ -63,9 +63,9 @@
|
||||
<version>4.1.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
<version>1.2.17</version>
|
||||
<groupId>ch.qos.reload4j</groupId>
|
||||
<artifactId>reload4j</artifactId>
|
||||
<version>1.2.19</version>
|
||||
</dependency>
|
||||
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->
|
||||
<dependency>
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.talend</groupId>
|
||||
<artifactId>talendMQConnectionUtil</artifactId>
|
||||
<version>1.0.1-20190215</version>
|
||||
<version>1.1.0-20220307</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>talendMQConnectionUtil</name>
|
||||
@@ -55,13 +55,8 @@
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.ibm.mq</groupId>
|
||||
<artifactId>com.ibm.mq</artifactId>
|
||||
<version>8.0.0.9</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.ibm.mq</groupId>
|
||||
<artifactId>com.ibm.mqjms</artifactId>
|
||||
<version>8.0.0.9</version>
|
||||
<artifactId>com.ibm.mq.allclient</artifactId>
|
||||
<version>9.2.4.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.resource</groupId>
|
||||
@@ -71,7 +66,7 @@
|
||||
<dependency>
|
||||
<groupId>org.talend</groupId>
|
||||
<artifactId>talendMQRFH2</artifactId>
|
||||
<version>1.0.1-20190206</version>
|
||||
<version>1.1.0-20220307</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
|
||||
@@ -53,9 +53,9 @@
|
||||
<version>4.1.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
<version>1.2.17</version>
|
||||
<groupId>ch.qos.reload4j</groupId>
|
||||
<artifactId>reload4j</artifactId>
|
||||
<version>1.2.19</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
|
||||
@@ -4,7 +4,16 @@
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.talend.components</groupId>
|
||||
<artifactId>talendzip</artifactId>
|
||||
<version>1.1-20201120</version>
|
||||
<version>1.3</version>
|
||||
|
||||
<licenses>
|
||||
<license>
|
||||
<name>Apache License, Version 2.0</name>
|
||||
<url>https://www.talendforge.org/modules/licenses/APACHE_v2.txt</url>
|
||||
<distribution>may be downloaded from the Maven repository</distribution>
|
||||
</license>
|
||||
</licenses>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
@@ -52,19 +61,14 @@
|
||||
<dependency>
|
||||
<groupId>net.lingala.zip4j</groupId>
|
||||
<artifactId>zip4j</artifactId>
|
||||
<version>1.3.3</version>
|
||||
<version>2.10.0</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<version>1.19</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.talend.libraries</groupId>
|
||||
<artifactId>checkArchive-1.1-20190917</artifactId>
|
||||
<version>6.0.0</version>
|
||||
<version>1.21</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
</project>
|
||||
|
||||
@@ -1,184 +1,183 @@
|
||||
package org.talend.archive;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
|
||||
import javax.crypto.Cipher;
|
||||
import javax.crypto.CipherInputStream;
|
||||
import javax.crypto.SecretKey;
|
||||
import javax.crypto.SecretKeyFactory;
|
||||
import javax.crypto.spec.PBEKeySpec;
|
||||
import javax.crypto.spec.PBEParameterSpec;
|
||||
|
||||
import org.apache.commons.compress.archivers.ArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
||||
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
|
||||
import org.apache.commons.compress.archivers.zip.ZipFile;
|
||||
|
||||
// import javax.crypto.Cipher;
|
||||
|
||||
public class IntegrityUtil {
|
||||
|
||||
/**
|
||||
* Is used to check if the zip file is corrupted/destroyed
|
||||
*
|
||||
* @param file
|
||||
* @return
|
||||
*/
|
||||
public static boolean isZipValid(final File file) {
|
||||
ZipFile zipFile = null;
|
||||
|
||||
try {
|
||||
zipFile = new ZipFile(file);
|
||||
|
||||
return true;
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
} finally {
|
||||
try {
|
||||
if (zipFile != null) {
|
||||
zipFile.close();
|
||||
zipFile = null;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void validate(final File file) {
|
||||
ZipFile zipFile = null;
|
||||
|
||||
try {
|
||||
zipFile = new ZipFile(file);
|
||||
|
||||
} catch (IOException e) {
|
||||
} finally {
|
||||
try {
|
||||
if (zipFile != null) {
|
||||
zipFile.close();
|
||||
zipFile = null;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* To check if the encrpted zip file is corrupted or not
|
||||
*
|
||||
* @param file
|
||||
* @param password
|
||||
* @return
|
||||
*/
|
||||
public static boolean isEncryptedZipValid(final File file, String password) {
|
||||
ZipArchiveInputStream input = null;
|
||||
InputStream target = null;
|
||||
try {
|
||||
target = new FileInputStream(file);
|
||||
target = new CipherInputStream(target, createCipher(Cipher.DECRYPT_MODE, password));
|
||||
input = new ZipArchiveInputStream(target);
|
||||
ArchiveEntry entry = input.getNextEntry();
|
||||
return true;
|
||||
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
} finally {
|
||||
try {
|
||||
if (input != null) {
|
||||
input.close();
|
||||
input = null;
|
||||
}
|
||||
if (target != null) {
|
||||
target.close();
|
||||
target = null;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to check tar.gz/.tgz/.gz file is corrupted/destroyed
|
||||
*
|
||||
* @param fileName
|
||||
* @return
|
||||
*/
|
||||
public static boolean isGZIPValid(final String fileName) {
|
||||
GZIPInputStream inputStream = null;
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = new FileInputStream(new File(fileName));
|
||||
inputStream = new GZIPInputStream(is);
|
||||
return true;
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
}finally {
|
||||
try {
|
||||
if (inputStream != null) {
|
||||
inputStream.close();
|
||||
inputStream = null;
|
||||
} else if(is != null) {
|
||||
is.close();
|
||||
is = null;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to check tar.tar file is corrupted/destroyed
|
||||
*
|
||||
* @param fileName
|
||||
* @return
|
||||
*/
|
||||
public static boolean isTarValid(final String fileName) {
|
||||
TarArchiveInputStream inputStream = null;
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = new FileInputStream(new File(fileName));
|
||||
inputStream = new TarArchiveInputStream(is);
|
||||
return inputStream.canReadEntryData(inputStream.getNextEntry());
|
||||
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
}finally {
|
||||
try {
|
||||
if (inputStream != null) {
|
||||
inputStream.close();
|
||||
inputStream = null;
|
||||
} else if(is != null) {
|
||||
is.close();
|
||||
is = null;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param mode
|
||||
* @param password
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
public static Cipher createCipher(int mode, String password) throws Exception {
|
||||
String alg = "PBEWithSHA1AndDESede"; // BouncyCastle has better algorithms
|
||||
PBEKeySpec keySpec = new PBEKeySpec(password.toCharArray());
|
||||
SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(alg);
|
||||
SecretKey secretKey = keyFactory.generateSecret(keySpec);
|
||||
|
||||
Cipher cipher = Cipher.getInstance("PBEWithSHA1AndDESede");
|
||||
cipher.init(mode, secretKey, new PBEParameterSpec("saltsalt".getBytes(), 2000));
|
||||
|
||||
return cipher;
|
||||
}
|
||||
}
|
||||
package com.talend.compress.zip;
|
||||
|
||||
import org.apache.commons.compress.archivers.ArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
||||
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
|
||||
import org.apache.commons.compress.archivers.zip.ZipFile;
|
||||
|
||||
import javax.crypto.Cipher;
|
||||
import javax.crypto.CipherInputStream;
|
||||
import javax.crypto.SecretKey;
|
||||
import javax.crypto.SecretKeyFactory;
|
||||
import javax.crypto.spec.PBEKeySpec;
|
||||
import javax.crypto.spec.PBEParameterSpec;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
|
||||
// import javax.crypto.Cipher;
|
||||
|
||||
public class IntegrityUtil {
|
||||
|
||||
/**
|
||||
* Is used to check if the zip file is corrupted/destroyed
|
||||
*
|
||||
* @param file
|
||||
* @return
|
||||
*/
|
||||
public static boolean isZipValid(final File file) {
|
||||
ZipFile zipFile = null;
|
||||
|
||||
try {
|
||||
zipFile = new ZipFile(file);
|
||||
|
||||
return true;
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
} finally {
|
||||
try {
|
||||
if (zipFile != null) {
|
||||
zipFile.close();
|
||||
zipFile = null;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void validate(final File file) {
|
||||
ZipFile zipFile = null;
|
||||
|
||||
try {
|
||||
zipFile = new ZipFile(file);
|
||||
|
||||
} catch (IOException e) {
|
||||
} finally {
|
||||
try {
|
||||
if (zipFile != null) {
|
||||
zipFile.close();
|
||||
zipFile = null;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* To check if the encrpted zip file is corrupted or not
|
||||
*
|
||||
* @param file
|
||||
* @param password
|
||||
* @return
|
||||
*/
|
||||
public static boolean isEncryptedZipValid(final File file, String password) {
|
||||
ZipArchiveInputStream input = null;
|
||||
InputStream target = null;
|
||||
try {
|
||||
target = new FileInputStream(file);
|
||||
target = new CipherInputStream(target, createCipher(Cipher.DECRYPT_MODE, password));
|
||||
input = new ZipArchiveInputStream(target);
|
||||
ArchiveEntry entry = input.getNextEntry();
|
||||
return true;
|
||||
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
} finally {
|
||||
try {
|
||||
if (input != null) {
|
||||
input.close();
|
||||
input = null;
|
||||
}
|
||||
if (target != null) {
|
||||
target.close();
|
||||
target = null;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to check tar.gz/.tgz/.gz file is corrupted/destroyed
|
||||
*
|
||||
* @param fileName
|
||||
* @return
|
||||
*/
|
||||
public static boolean isGZIPValid(final String fileName) {
|
||||
GZIPInputStream inputStream = null;
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = new FileInputStream(new File(fileName));
|
||||
inputStream = new GZIPInputStream(is);
|
||||
return true;
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
}finally {
|
||||
try {
|
||||
if (inputStream != null) {
|
||||
inputStream.close();
|
||||
inputStream = null;
|
||||
} else if(is != null) {
|
||||
is.close();
|
||||
is = null;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to check tar.tar file is corrupted/destroyed
|
||||
*
|
||||
* @param fileName
|
||||
* @return
|
||||
*/
|
||||
public static boolean isTarValid(final String fileName) {
|
||||
TarArchiveInputStream inputStream = null;
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = new FileInputStream(new File(fileName));
|
||||
inputStream = new TarArchiveInputStream(is);
|
||||
return inputStream.canReadEntryData(inputStream.getNextEntry());
|
||||
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
}finally {
|
||||
try {
|
||||
if (inputStream != null) {
|
||||
inputStream.close();
|
||||
inputStream = null;
|
||||
} else if(is != null) {
|
||||
is.close();
|
||||
is = null;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param mode
|
||||
* @param password
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
public static Cipher createCipher(int mode, String password) throws Exception {
|
||||
String alg = "PBEWithSHA1AndDESede"; // BouncyCastle has better algorithms
|
||||
PBEKeySpec keySpec = new PBEKeySpec(password.toCharArray());
|
||||
SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(alg);
|
||||
SecretKey secretKey = keyFactory.generateSecret(keySpec);
|
||||
|
||||
Cipher cipher = Cipher.getInstance("PBEWithSHA1AndDESede");
|
||||
cipher.init(mode, secretKey, new PBEParameterSpec("saltsalt".getBytes(), 2000));
|
||||
|
||||
return cipher;
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user