Compare commits

...

47 Commits

Author SHA1 Message Date
vyu-talend
126daf55ab chore(TDI-49458):bump file enhanced. (#6086) 2023-03-30 22:08:28 +08:00
Jane Ding
4f5ce34920 fix(TUP-37227):Issue building when items names too long (#6085)
https://jira.talendforge.org/browse/TUP-37227
2023-03-30 18:15:12 +08:00
Oleksandr Zhelezniak
4b377af1aa chore(TDI-49600): bump connectors 1.27.21 2023-03-24 15:09:35 +02:00
Jane Ding
695dcba405 Revert "fix(TUP-37227):Issue building when items names too long (#5941)" (#6019)
This reverts commit 8d3832fdfb.
2023-03-02 09:44:55 +08:00
jiezhang-tlnd
4dc7e4e82f fix(TUP-37874)SQL inject risk (#6009) 2023-02-23 15:13:01 +08:00
jiezhang-tlnd
4c819add92 fix(TUP-37874)SQL inject risk (#5970)
* fix(TUP-37874)SQL inject risk

* SQL inject risk
2023-02-22 18:18:23 +08:00
zyuan-talend
9c0f5e966d fix(TUP-35060): tWarn does not show the TalendDate.TO_CHAR method. (#5969) 2023-02-21 17:45:05 +08:00
Oleksandr Zhelezniak
da5b7979fd chore(TDI-49419): bump connectors 1.27.20 2023-02-16 17:14:14 +02:00
Jane Ding
8d3832fdfb fix(TUP-37227):Issue building when items names too long (#5941)
* fix(TUP-37227):Issue building when items names too long
https://jira.talendforge.org/browse/TUP-37227

* fix(TUP-37227):Issue building when items names too long
https://jira.talendforge.org/browse/TUP-37227

* fix(TUP-37227):Issue building when items names too long
https://jira.talendforge.org/browse/TUP-37227

* fix(TUP-37227):Issue building when items names too long
https://jira.talendforge.org/browse/TUP-37227
2023-02-15 18:01:04 +08:00
bhe-talendbj
cd8ff90b5b fix(TUP-37920): fix sqllite retrieve tables (#5960) 2023-02-06 17:23:42 +08:00
bhe-talendbj
d98e5ea43c fix(TUP-37425): Performance: Not Responding while Retrieving schema and the database contains many tables (#5948)
* fix(TUP-37425): fix retrieve schema

* fix(TUP-37425): check NPE
2023-01-30 19:38:12 +08:00
AlixMetivier
13b57c8823 fix(TBD-14720): fix hadoop conf jar not found with context metadata (#5932) 2023-01-18 09:30:58 +01:00
bhe-talendbj
0436bc8dd2 chore: fix junit exe resolve (#5936) (#5939)
* chore: fix exe resolve

* chore: revert unnecessay changes

* chore: remove exe dependency test
2023-01-17 18:04:28 +08:00
pyzhou
091de02e75 fix(TDI-49256) Upgrade tck in studio 7.3 (#5933) 2023-01-16 21:21:40 +08:00
bhe-talendbj
eb84dfe75d bugfix(TUP-37425): Performance: Not Responding while Retrieving schema and the database contains many tables (#5929)
* fix(TUP-37425): retrieve table comment once

* fix(TUP-37425): add busy indicator

* fix(TUP-37425): fix UI responsiveness
2023-01-16 19:26:41 +08:00
bhe-talendbj
a7dd89cc3a fix(TUP-37723): Patch Installation: Need restart to generate correct jar version in pom after new patch has been installed (#5914)
* fix(TUP-37723): do not load dropped extensions

* fix(TUP-37723): fix dependencies
2023-01-16 11:19:55 +08:00
pyzhou
211abcac09 fix(TDI-49150) Upgrade tck in studio 7.3 (#5901) 2022-12-30 15:09:18 +08:00
jiezhang-tlnd
089f43ccd4 chore(TUP-37524)CVE-2022-46364,org.apache.cxf:cxf-core:3.4.4,3.5.2 (#5877) 2022-12-29 11:09:38 +08:00
Jane Ding
d008463ef1 fix(TUP-37467):[8.0.1&7.3.1] custom component are no more build as (#5882)
* fix(TUP-37467):[8.0.1&7.3.1] custom component are no more build as
snapshot but releases since june/july patch
https://jira.talendforge.org/browse/TUP-37467

* fix(TUP-37467):[8.0.1&7.3.1] custom component are no more build as
snapshot but releases since june/july patch
https://jira.talendforge.org/browse/TUP-37467
2022-12-28 17:40:50 +08:00
wang wei
d7ed643621 fix(TDI-48822): Bump component-runtime to 1.38.8 in Studio 7.3.1 (#5896) 2022-12-28 13:58:56 +08:00
pyzhou
0972593afd fix(TDI-49113) Upgrade tck in studio 7.3 (#5894) 2022-12-26 16:22:24 +08:00
pyzhou
c50c5c2e46 fix(TDI-49004): backport tmap java17 7.3
* fix(TDI-46572): tMap support Java17 (#4659)

* fix(TDI-46572):add jboss dependency to routines

* add to .m2

* fix(TDI-46572):advancedPersistentLookup lib upgrade

* replace advancedPersistentLookupLib jar

* change name

* format

* upgrade jar

* add back dependency

* remove LGPL dependency

* Remove binary jar and copy during build

* fix NPE

* test remove jboss from build
# Conflicts:
#	main/plugins/org.talend.libraries.persist.lookup/META-INF/MANIFEST.MF
#	main/plugins/org.talend.librariesmanager/META-INF/MANIFEST.MF
#	main/plugins/org.talend.librariesmanager/build.properties

* fix(TDI-46923):add back jboss marshalling (#4711)

# Conflicts:
#	main/plugins/org.talend.designer.maven.tos/resources/build/pom.xml

* fix(TDI-46937): fix tmap tuj (#4723)

* fix(TDI-46937): fix tmap tuj

* fix(TDI-46937):fix tuj

* fix(TDI-47147):tMap TUJ fix (#4817)

* Revert "fix(TDI-46937): fix tmap tuj (#4723)"

This reverts commit aacc14f92b.

* delete middle version
2022-12-26 14:38:14 +08:00
Jane Ding
0b7b156f22 Revert "fix(TUP-37227):Issue building when items names too long (#5837)" (#5890)
This reverts commit 30aa2d25a6.
2022-12-23 11:43:03 +08:00
Jane Ding
30aa2d25a6 fix(TUP-37227):Issue building when items names too long (#5837)
* fix(TUP-37227):Issue building when items names too long
https://jira.talendforge.org/browse/TUP-37227

* fix(TUP-37227):Issue building when items names too long
https://jira.talendforge.org/browse/TUP-37227
2022-12-19 12:00:44 +08:00
jzhao
6a813e2a73 chore(TDI-48946):Bump TCK connector 1.27.16 for 7.3.1-R2022-12 (#5839) 2022-11-28 16:33:54 +08:00
Chao MENG
4b7a59b497 fix(TUP-37228): Studio errors when attempting to create a Cloudera Dynamic Distro when repository.apache.org port 80 is blocked (#5823)
* fix(TUP-37228): Studio errors when attempting to create a Cloudera
Dynamic Distro when repository.apache.org port 80 is blocked
https://jira.talendforge.org/browse/TUP-37228

* fix(TUP-37228): Studio errors when attempting to create a Cloudera
Dynamic Distro when repository.apache.org port 80 is blocked
https://jira.talendforge.org/browse/TUP-37228
2022-11-25 14:49:33 +08:00
Jane Ding
d5490d2663 fix(TUP-36933):[7.3.1] import dependencies is not working when we do a copy from brunch (#5794)
* fix(TUP-36933):[7.3.1] import dependencies is not working when we do a
copy from brunch
https://jira.talendforge.org/browse/TUP-36933

* fix(TUP-36933):[7.3.1] import dependencies is not working when we do a
copy from brunch
https://jira.talendforge.org/browse/TUP-36933

Conflicts:
	main/plugins/org.talend.repository.items.importexport.ui/src/main/java/org/talend/repository/items/importexport/ui/wizard/imports/ImportItemsWizardPage.java

* fix(TUP-36933):[7.3.1] import dependencies is not working when we do a
copy from brunch
https://jira.talendforge.org/browse/TUP-36933

Conflicts:
	main/plugins/org.talend.repository.items.importexport.ui/src/main/java/org/talend/repository/items/importexport/ui/wizard/imports/ImportItemsWizardPage.java

* fix(TUP-36933):[7.3.1] import dependencies is not working when we do a
copy from brunch
https://jira.talendforge.org/browse/TUP-36933
2022-11-22 11:08:29 +08:00
zyuan-talend
fbfc3735ad fix(TUP-37016):CVE-2022-41853, upgrade org.hsqldb:hsqldb:2.3.1 to 2.7.1. (#5809) 2022-11-17 15:30:45 +08:00
zyuan-talend
76fbd6fd32 fix(TUP-37016):CVE-2022-41853, upgrade org.hsqldb:hsqldb:2.3.1 to 2.7.1. (#5766) (#5797) 2022-11-15 16:14:56 +08:00
sbliu
43ef7fa5e0 chore(TUP-36964) upgrade commons-text to 1.10.0 (#5770) 2022-11-09 14:39:39 +08:00
pyzhou
be28a0d122 fix(48799) Upgrade tck in studio 7.3 (#5771) 2022-11-03 23:13:04 +08:00
sbliu
e44522bb69 feat(TUP-35340) fix ui problem of show checkbox (#5755) 2022-10-31 16:59:10 +08:00
sbliu
67f04b7db3 feat(TUP-36340) oracle add support of orai18n.jar (#5703) 2022-10-31 16:04:37 +08:00
pyzhou
2502688e64 Pyzhou/tdi 48676 resume util oom 7.3 (#5742)
* fix(TDI-48676):ResumeUtil OOM

* replace duplicate variable
2022-10-27 14:51:53 +08:00
sbliu
6175aca630 chore(TUP-36930) upgrade jackson-databind to 2.13.4.2 (#5724)
upgrade jackson-databind to 2.13.4.2, jackson-core/jackson-annotations to 2.13.4
2022-10-27 11:06:25 +08:00
zyuan-talend
55f7531d68 fix(TUP-36820):Improve performance of importing large size metadata file (#5677)
xml.
2022-10-24 16:01:24 +08:00
Svitlana Anulich
d6c888e235 fix(TBD-14194): add knox session timeout variable (#5685) (#5718) 2022-10-24 10:24:32 +03:00
Svitlana Anulich
7db18b198f fix(TBD-14328): ClassNotFoundException when check service for CDP 7.x knox (#5715) 2022-10-24 10:20:15 +03:00
pyzhou
039ed90481 fix(TDI-48746) Upgrade tck in studio 7.3 (#5737) 2022-10-24 13:05:33 +08:00
apoltavtsev
eea9c85609 fix(APPINT-35054) Build type for child Jobs is corrected (#5675) 2022-10-10 11:54:35 +02:00
Jane Ding
ca9d09e04e fix(APPINT-35054) Add optional mechanism to align project models (#5671)
BUILD_TYPE
https://jira.talendforge.org/browse/APPINT-35054
2022-10-09 16:19:37 +08:00
jiezhang-tlnd
b5dadab5a5 chore(TUP-36715)CVE: xerces:xercesImpl:2.12.0 (#5609) 2022-10-08 11:07:28 +08:00
apoltavtsev
2add3ffc0e fix(APPINT-35054) Add optional mechanism to align project models BUILD_TYPE (#5661)
* Add files via upload

* Update MavenProjectSettingPage.java

* Update messages.properties

* Add files via upload

* Update MANIFEST.MF

* Update CorrectBuildTypeForRoutesMigrationTask.java

* Update CorrectBuildTypeForRoutesMigrationTask.java

* Update CorrectBuildTypeForDIJobMigrationTask.java

* Update CorrectBuildTypeForDIJobMigrationTask.java

* Update CorrectBuildTypeForDsRestMigrationTask.java

* Update CorrectBuildTypeForRoutesMigrationTask.java

* Update CorrectBuildTypeForSOAPServiceJobMigrationTask.java

* Update BuildTypeManager.java

* Update MavenProjectSettingPage.java

* Update MavenProjectSettingPage.java

* Add files via upload

* Update MavenProjectSettingPage.java

* Update BuildTypeManager.java

* Update AbstractCorrectBuildItemMigrationTask.java

* Update CorrectBuildTypeForDIJobMigrationTask.java

* Update CorrectBuildTypeForDsRestMigrationTask.java

* Update CorrectBuildTypeForRoutesMigrationTask.java

* Update CorrectBuildTypeForSOAPServiceJobMigrationTask.java

* Update CorrectBuildTypeForDIJobMigrationTask.java

* Update BuildTypeManager.java

* Update BuildTypeManager.java

* Update CorrectBuildTypeForDIJobMigrationTask.java

* Update AbstractCorrectBuildItemMigrationTask.java

* Update CorrectBuildTypeForDIJobMigrationTask.java

* Update CorrectBuildTypeForDsRestMigrationTask.java

* Update CorrectBuildTypeForRoutesMigrationTask.java

* Update CorrectBuildTypeForSOAPServiceJobMigrationTask.java
2022-10-07 09:43:15 +02:00
pyzhou
38b02ee746 fix(TDI-48597) Upgrade tck in studio 7.3 (#5644) 2022-09-26 23:38:59 +08:00
jiezhang-tlnd
d8daf75329 fix(TUP-36674)align the version of com.google.code.gson:gson with (#5601)
component to be 2.8.9 for mssql metadata part
2022-09-23 15:14:42 +08:00
Liu Xinquan
555a722ade fix(TDQ-20610) org.apache.lucene:lucene-core (#5569) (#5570) 2022-09-06 17:20:00 +08:00
hzhao
8a003e1a21 fix(TUP-36593): Nexus proxy password is stored in clear text in project (#5563) 2022-09-02 16:54:57 +08:00
117 changed files with 3429 additions and 452 deletions

View File

@@ -92,7 +92,7 @@
<dependency>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
<version>2.12.0</version>
<version>2.12.2</version>
</dependency>
<dependency>
<groupId>ch.qos.reload4j</groupId>

View File

@@ -12,16 +12,28 @@
// ============================================================================
package org.talend.commons.utils.workbench.extensions;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.core.runtime.IConfigurationElement;
import org.eclipse.core.runtime.IExtension;
import org.eclipse.core.runtime.IExtensionPoint;
import org.eclipse.core.runtime.IExtensionRegistry;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.preferences.ConfigurationScope;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.exception.IllegalPluginConfigurationException;
import org.talend.commons.i18n.internal.Messages;
import org.talend.utils.json.JSONException;
import org.talend.utils.json.JSONObject;
/**
* Utilities class uses to get implementation of extension points defined by plug-ins. <br/>
@@ -36,6 +48,10 @@ public abstract class ExtensionImplementationProvider<I> {
private String plugInId;
public final static String FILE_FEATURES_INDEX = "extra_feature.index";
public final static String DROP_BUNDLE_INFO = "drop.bundle.info";
/**
* Default Constructor. Must not be used.
*/
@@ -166,9 +182,17 @@ public abstract class ExtensionImplementationProvider<I> {
}
IExtension[] extensions = pt.getExtensions();
Map<String, String> dropBundles = null;
try {
dropBundles = getDropBundleInfo();
} catch (IOException e) {
ExceptionHandler.process(e);
}
for (IExtension extension : extensions) {
if (dropBundles != null && dropBundles.containsKey(extension.getNamespaceIdentifier())
&& StringUtils.isEmpty(dropBundles.get(extension.getNamespaceIdentifier()))) {
continue;
}
if (plugInId == null || extension.getNamespaceIdentifier().equals(plugInId)) {
String configurationElementName = extensionPointLimiter.getConfigurationElementName();
if (configurationElementName != null) {
@@ -196,6 +220,32 @@ public abstract class ExtensionImplementationProvider<I> {
return toReturn;
}
/**********************************************************
* Copied from org.talend.commons.configurator
**********************************************************/
public Map<String, String> getDropBundleInfo() throws IOException {
File indexFile = new File(ConfigurationScope.INSTANCE.getLocation().toFile(), FILE_FEATURES_INDEX);
if (!indexFile.exists()) {
return Collections.emptyMap();
}
Map<String, String> dropInfoMap = new HashMap<>();
try {
String jsonStr = new String(Files.readAllBytes(indexFile.toPath()));
if (!jsonStr.isEmpty()) {
JSONObject obj = new JSONObject(jsonStr);
JSONObject dropInfo = obj.getJSONObject(DROP_BUNDLE_INFO);
Iterator<String> iterator = dropInfo.keys();
while (iterator.hasNext()) {
String key = iterator.next();
dropInfoMap.put(key, dropInfo.getString(key));
}
}
} catch (JSONException e) {
throw new IOException(e);
}
return dropInfoMap;
}
/**
* DOC amaumont Comment method "createAndAddImplementation".
*

View File

@@ -16,6 +16,7 @@ import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -154,6 +155,8 @@ public interface ILibraryManagerService extends IService {
public boolean contains(String jarName);
public void clearCache();
public void deployLibsFromCustomComponents(File componentFolder, List<ModuleNeeded> modulesNeeded);
@Deprecated
public Set<String> list(boolean withComponent, IProgressMonitor... monitorWrap);

View File

@@ -380,5 +380,6 @@ public class ConnParameterKeys {
public static final String CONN_PARA_KEY_KNOX_DIRECTORY="CONN_PARA_KEY_KNOX_DIRECTORY";
public static final String CONN_PARA_KEY_KNOX_TIMEOUT="CONN_PARA_KEY_KNOX_TIMEOUT";
}

View File

@@ -25,8 +25,8 @@ import org.talend.core.database.conn.DatabaseConnConstants;
public enum EDatabaseVersion4Drivers {
// access
ACCESS_JDBC(new DbVersion4Drivers(EDatabaseTypeName.ACCESS, new String[] {
"jackcess-2.1.0.jar", "ucanaccess-2.0.9.5.jar", "commons-lang-2.6.jar", "commons-logging-1.1.1.jar", "hsqldb.jar", //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
"jackcess-encrypt-2.1.0.jar", "bcprov-jdk15on-1.51.jar", "talend-ucanaccess-utils-1.0.0.jar" })),
"jackcess-2.1.12.jar", "ucanaccess-2.0.9.5.jar", "commons-lang-2.6.jar", "commons-logging-1.1.3.jar", "hsqldb.jar", //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
"jackcess-encrypt-2.1.4.jar", "bcprov-jdk15on-1.69.jar", "talend-ucanaccess-utils-1.0.0.jar" })),
ACCESS_2003(new DbVersion4Drivers(EDatabaseTypeName.ACCESS, "Access 2003", "Access_2003")), //$NON-NLS-1$ //$NON-NLS-2$
ACCESS_2007(new DbVersion4Drivers(EDatabaseTypeName.ACCESS, "Access 2007", "Access_2007")), //$NON-NLS-1$ //$NON-NLS-2$
// oracle
@@ -87,7 +87,7 @@ public enum EDatabaseVersion4Drivers {
MSSQL_PROP(new DbVersion4Drivers(EDatabaseTypeName.MSSQL,
"Microsoft", "MSSQL_PROP", //$NON-NLS-1$ //$NON-NLS-2$
new String[] { "mssql-jdbc.jar", "slf4j-api-1.7.25.jar", "slf4j-log4j12-1.7.25.jar", "adal4j-1.6.7.jar", //$NON-NLS-1$
"commons-lang3-3.10.jar", "commons-codec-1.14.jar", "gson-2.8.6.jar", "oauth2-oidc-sdk-9.7.jar",
"commons-lang3-3.10.jar", "commons-codec-1.14.jar", "gson-2.8.9.jar", "oauth2-oidc-sdk-9.7.jar",
"json-smart-2.4.7.jar", "nimbus-jose-jwt-9.22.jar", "javax.mail-1.6.2.jar", "reload4j-1.2.19.jar",
"accessors-smart-2.4.7.jar", "asm-9.1.jar", "content-type-2.1.jar" })),
@@ -171,8 +171,10 @@ public enum EDatabaseVersion4Drivers {
REDSHIFT(new DbVersion4Drivers(EDatabaseTypeName.REDSHIFT, "redshift", "REDSHIFT", //$NON-NLS-1$ //$NON-NLS-2$
new String[]{ "redshift-jdbc42-no-awssdk-1.2.55.1083.jar", "antlr4-runtime-4.8-1.jar" })), //$NON-NLS-1$ //$NON-NLS-2$
REDSHIFT_SSO(new DbVersion4Drivers(EDatabaseTypeName.REDSHIFT_SSO, "redshift sso", "REDSHIFT_SSO", //$NON-NLS-1$ //$NON-NLS-2$
new String[] { "redshift-jdbc42-no-awssdk-1.2.55.1083.jar", "antlr4-runtime-4.8-1.jar", "aws-java-sdk-1.11.848.jar", "jackson-core-2.10.1.jar", //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
"jackson-databind-2.10.1.jar", "jackson-annotations-2.10.1.jar", "httpcore-4.4.11.jar", "httpclient-4.5.9.jar", //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$//$NON-NLS-4$
new String[] { "redshift-jdbc42-no-awssdk-1.2.55.1083.jar", "antlr4-runtime-4.8-1.jar", "aws-java-sdk-1.11.848.jar", //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
"jackson-core-2.13.4.jar", //$NON-NLS-1$
"jackson-databind-2.13.4.2.jar", "jackson-annotations-2.13.4.jar", "httpcore-4.4.11.jar", //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
"httpclient-4.5.9.jar", //$NON-NLS-1$
"joda-time-2.8.1.jar", "commons-logging-1.2.jar", "commons-codec-1.11.jar" })), //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
AMAZON_AURORA(new DbVersion4Drivers(EDatabaseTypeName.AMAZON_AURORA, "mysql-connector-java-5.1.49.jar")); //$NON-NLS-1$

View File

@@ -38,7 +38,10 @@ public class HadoopClassLoaderFactory2 {
public static ClassLoader getHDFSClassLoader(String relatedClusterId, String distribution, String version, boolean useKrb) {
return getClassLoader(relatedClusterId, EHadoopCategory.HDFS, distribution, version, useKrb);
}
public static ClassLoader getHDFSKnoxClassLoader(String relatedClusterId, String distribution, String version, boolean useKrb) {
return HadoopClassLoaderFactory2.getClassLoader(relatedClusterId, EHadoopCategory.HDFS, distribution, version, useKrb,
IHadoopArgs.HDFS_ARG_KNOX);
}
public static ClassLoader getMRClassLoader(String relatedClusterId, String distribution, String version, boolean useKrb) {
return getClassLoader(relatedClusterId, EHadoopCategory.MAP_REDUCE, distribution, version, useKrb);
}

View File

@@ -22,4 +22,6 @@ public interface IHadoopArgs {
public static final String HIVE_ARG_STANDALONE = "STANDALONE"; //$NON-NLS-1$
public static final String HDFS_ARG_KNOX = "USE_KNOX"; //$NON-NLS-1$
}

View File

@@ -194,6 +194,10 @@ public interface IMetadataConnection extends IMetadata {
public String getContextName();
public void setContextName(String contextName);
public boolean isSupportNLS();
public void setSupportNLS(boolean newSupportNLS);
/**
* Returns the value that you stored in the data collection by the key. Normally, it is like this key-value. For

View File

@@ -254,6 +254,7 @@ public final class ConvertionHelper {
result.setContentModel(connection.isContextMode());
result.setContextId(sourceConnection.getContextId());
result.setContextName(sourceConnection.getContextName());
result.setSupportNLS(sourceConnection.isSupportNLS());
// handle oracle database connnection of general_jdbc.
result.setSchema(getMeataConnectionSchema(result));
convertOtherParameters(result, connection);

View File

@@ -113,6 +113,7 @@ public class MetadataConnection implements IMetadataConnection {
private String contextName;
private boolean supportNLS = false;
// ~
private String comment;
@@ -729,6 +730,14 @@ public class MetadataConnection implements IMetadataConnection {
public void setContextName(String contextName) {
this.contextName = contextName;
}
public boolean isSupportNLS() {
return supportNLS;
}
public void setSupportNLS(boolean supportNLS) {
this.supportNLS = supportNLS;
}
/*
* (non-Javadoc)

View File

@@ -1225,6 +1225,11 @@ public class RepositoryToComponentProperty {
return value2;
}
if(value.equals("SUPPORT_NLS")) {
return connection.isSupportNLS();
}
if (value.equals("CDC_TYPE_MODE")) { //$NON-NLS-1$
return new Boolean(CDCTypeMode.LOG_MODE.getName().equals(connection.getCdcTypeMode()));
}

View File

@@ -79,4 +79,8 @@ public final class TalendPropertiesUtil {
public static String getProductApp() {
return System.getProperty(PROD_APP);
}
public static boolean isEnabledUseShortJobletName() {
return isEnabled("talend.job.build.useShortJobletName"); //$NON-NLS-1$
}
}

View File

@@ -27,6 +27,7 @@ import org.talend.core.runtime.projectsetting.ProjectPreferenceManager;
import org.talend.core.service.IRemoteService;
import org.talend.repository.model.IProxyRepositoryFactory;
import org.talend.repository.model.RepositoryConstants;
import org.talend.utils.security.StudioEncryption;
/**
* created by wchen on 2015年6月16日 Detailled comment
@@ -255,7 +256,7 @@ public class TalendLibsServerManager {
if (enableProxyFlag) {
serverBean.setServer(prefManager.getValue(TalendLibsServerManager.NEXUS_PROXY_URL));
serverBean.setUserName(prefManager.getValue(TalendLibsServerManager.NEXUS_PROXY_USERNAME));
serverBean.setPassword(prefManager.getValue(TalendLibsServerManager.NEXUS_PROXY_PASSWORD));
serverBean.setPassword(StudioEncryption.getStudioEncryption(StudioEncryption.EncryptionKeyName.SYSTEM).decrypt(prefManager.getValue(TalendLibsServerManager.NEXUS_PROXY_PASSWORD)));
serverBean.setRepositoryId(prefManager.getValue(TalendLibsServerManager.NEXUS_PROXY_REPOSITORY_ID));
serverBean.setType(prefManager.getValue(TalendLibsServerManager.NEXUS_PROXY_TYPE));
}

View File

@@ -78,6 +78,7 @@ public class RoutinesFunctionProposal implements IContentProposal {
message += Messages.getString("RoutinesFunctionProposal.CreatedBy");
message += Messages.getString("RoutinesFunctionProposal.ReturnType");
message += Messages.getString("RoutinesFunctionProposal.VariableName");
message = message.replaceAll("\n", System.getProperty("line.separator", "\n")); // for display on Windows platform
MessageFormat format = new MessageFormat(message);
Object[] args = new Object[] { function.getDescription(),

View File

@@ -46,6 +46,8 @@ import org.talend.designer.maven.aether.util.TalendAetherProxySelector;
*/
public class RepositorySystemFactory {
private static Boolean ignoreArtifactDescriptorRepositories;
private static Map<LocalRepository, DefaultRepositorySystemSession> sessions = new HashMap<LocalRepository, DefaultRepositorySystemSession>();
private static DefaultRepositorySystemSession newRepositorySystemSession(String localRepositoryPath)
@@ -61,6 +63,8 @@ public class RepositorySystemFactory {
repositorySystemSession.setTransferListener(new ChainedTransferListener());
repositorySystemSession.setRepositoryListener(new ChainedRepositoryListener());
repositorySystemSession.setProxySelector(new TalendAetherProxySelector());
repositorySystemSession.setIgnoreArtifactDescriptorRepositories(
RepositorySystemFactory.isIgnoreArtifactDescriptorRepositories());
sessions.put(localRepo, repositorySystemSession);
}
@@ -157,4 +161,13 @@ public class RepositorySystemFactory {
doDeploy(content, pomFile, localRepository, repositoryId, repositoryUrl, userName, password, groupId, artifactId,
classifier, extension, version);
}
public static boolean isIgnoreArtifactDescriptorRepositories() {
if (ignoreArtifactDescriptorRepositories == null) {
ignoreArtifactDescriptorRepositories = Boolean.valueOf(
System.getProperty("talend.studio.aether.ignoreArtifactDescriptorRepositories", Boolean.TRUE.toString()));
}
return ignoreArtifactDescriptorRepositories;
}
}

View File

@@ -63,6 +63,7 @@ import org.eclipse.m2e.core.MavenPlugin;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.designer.maven.aether.DummyDynamicMonitor;
import org.talend.designer.maven.aether.IDynamicMonitor;
import org.talend.designer.maven.aether.RepositorySystemFactory;
import org.talend.designer.maven.aether.node.DependencyNode;
import org.talend.designer.maven.aether.node.ExclusionNode;
import org.talend.designer.maven.aether.selector.DynamicDependencySelector;
@@ -513,6 +514,7 @@ public class DynamicDistributionAetherUtils {
LocalRepository localRepo = new LocalRepository(repositoryPath);
session.setLocalRepositoryManager(system.newLocalRepositoryManager(session, localRepo));
session.setProxySelector(new TalendAetherProxySelector());
session.setIgnoreArtifactDescriptorRepositories(RepositorySystemFactory.isIgnoreArtifactDescriptorRepositories());
updateDependencySelector(session, monitor);

View File

@@ -34,7 +34,6 @@ import org.codehaus.plexus.PlexusContainerException;
import org.eclipse.aether.DefaultRepositorySystemSession;
import org.eclipse.aether.RepositorySystem;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.connector.basic.BasicRepositoryConnectorFactory;
import org.eclipse.aether.impl.DefaultServiceLocator;
@@ -57,6 +56,7 @@ import org.talend.core.nexus.ArtifactRepositoryBean;
import org.talend.core.nexus.NexusConstants;
import org.talend.core.nexus.TalendLibsServerManager;
import org.talend.core.runtime.maven.MavenArtifact;
import org.talend.designer.maven.aether.RepositorySystemFactory;
public class MavenLibraryResolverProvider {
@@ -283,8 +283,9 @@ public class MavenLibraryResolverProvider {
LocalRepository localRepo = new LocalRepository( /* "target/local-repo" */target);
session.setLocalRepositoryManager(system.newLocalRepositoryManager(session, localRepo));
session.setProxySelector(new TalendAetherProxySelector());
session.setIgnoreArtifactDescriptorRepositories(RepositorySystemFactory.isIgnoreArtifactDescriptorRepositories());
return session;
return session;
}
private String getLocalMVNRepository() {

View File

@@ -139,10 +139,15 @@
<version>1.21</version>
</dependency>
<dependency>
<groupId>org.apache-extras.beanshell</groupId>
<artifactId>bsh</artifactId>
<version>2.0b6</version>
</dependency>
<groupId>org.apache-extras.beanshell</groupId>
<artifactId>bsh</artifactId>
<version>2.0b6</version>
</dependency>
<dependency>
<groupId>org.jboss.marshalling</groupId>
<artifactId>jboss-marshalling</artifactId>
<version>2.0.12.Final</version>
</dependency>
</dependencies>
<build>
<plugins>

View File

@@ -151,6 +151,12 @@
<groupId>org.talend.components</groupId>
<artifactId>components-marklogic-runtime</artifactId>
<version>${components.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.talend.components</groupId>
@@ -210,6 +216,11 @@
<artifactId>commons-beanutils</artifactId>
<version>1.9.4</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>1.10.0</version>
</dependency>
</dependencies>
<build>
<plugins>

View File

@@ -20,7 +20,7 @@
<module>zip/pom.xml</module>
</modules>
<properties>
<m2.fasterxml.jackson.version>2.13.2</m2.fasterxml.jackson.version>
<m2.fasterxml.jackson.version>2.13.4</m2.fasterxml.jackson.version>
<jackson-codehaus.version>1.9.16-TALEND</jackson-codehaus.version>
</properties>
</project>

View File

@@ -10,7 +10,7 @@
<artifactId>studio-tacokit-dependencies</artifactId>
<packaging>pom</packaging>
<properties>
<tacokit.components.version>1.27.12</tacokit.components.version>
<tacokit.components.version>1.27.21</tacokit.components.version>
</properties>
<repositories>
<repository>

View File

@@ -11,7 +11,7 @@
<packaging>pom</packaging>
<properties>
<tcomp.version>1.38.6</tcomp.version>
<tcomp.version>1.38.8</tcomp.version>
<slf4j.version>1.7.32</slf4j.version>
<log4j2.version>2.17.1</log4j2.version>
<reload4j.version>1.2.19</reload4j.version>

View File

@@ -10,10 +10,11 @@ ProjectPomProjectSettingPage_ConfirmMessage=Will apply and update for project PO
ProjectPomProjectSettingPage_FilterPomLabel=Filter to use to generate poms:
ProjectPomProjectSettingPage_FilterErrorMessage=Invalid filter: {0}
ProjectPomProjectSettingPage.syncAllPomsButtonText=Force full re-synchronize poms
ProjectPomProjectSettingPage.syncBuildTypesButtonText=Force full re-synchronize build types
AbstractPersistentProjectSettingPage.syncAllPoms=Do you want to update all poms? \n This operation might take long time depends on your project size.
MavenProjectSettingPage.filterExampleMessage=Filter examples:\nlabel=myJob \t\t\t\t=> Generate only the job named "myJob"\n!(label=myJob) \t\t\t\t=> Generate any job except the one named "myJob"\n(path=folder1/folder2) \t\t\t=> Generate any job in the folder "folder1/folder2"\n(path=folder1/folder2)or(label=myJob)\t=> Generate any job in the folder "folder1/folder2" or named "myJob"\n(label=myJob)and(version=0.2) \t=> Generate only the job named "myJob" with version 0.2\n!((label=myJob)and(version=0.1)) \t=> Generate every jobs except the "myJob" version 0.1
MavenProjectSettingPage.refModuleText=Set reference project modules in profile
MavenProjectSettingPage.excludeDeletedItems=Exclude deleted items
MavenProjectSettingPage.syncAllPomsWarning=Click the Force full re-synchronize poms button to apply the new settings.
MavenProjectSettingPage.skipFolders=Skip folders
BuildProjectSettingPage.allowRecursiveJobs=Allow recursive jobs (Not recommended)
BuildProjectSettingPage.allowRecursiveJobs=Allow recursive jobs (Not recommended)

View File

@@ -34,6 +34,7 @@ import org.talend.core.runtime.projectsetting.AbstractProjectSettingPage;
import org.talend.core.runtime.services.IFilterService;
import org.talend.designer.maven.DesignerMavenPlugin;
import org.talend.designer.maven.tools.AggregatorPomsHelper;
import org.talend.designer.maven.tools.BuildTypeManager;
import org.talend.designer.maven.ui.i18n.Messages;
/**
@@ -169,7 +170,25 @@ public class MavenProjectSettingPage extends AbstractProjectSettingPage {
});
}
if (isSyncBuildTypeAllowed()) {
Button syncBuildTypes = new Button(parent, SWT.NONE);
syncBuildTypes.setText(Messages.getString("ProjectPomProjectSettingPage.syncBuildTypesButtonText")); //$NON-NLS-1$
syncBuildTypes.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent event) {
try {
new BuildTypeManager().syncBuildTypes(getCurrentPage());
} catch (Exception e) {
ExceptionHandler.process(e);
}
}
});
}
}
private void addSyncWarning() {
setMessage(Messages.getString("MavenProjectSettingPage.syncAllPomsWarning"), IMessage.WARNING); //$NON-NLS-1$
@@ -203,4 +222,12 @@ public class MavenProjectSettingPage extends AbstractProjectSettingPage {
return displayVersion;
}
private static boolean isSyncBuildTypeAllowed() {
return Boolean.getBoolean("talend.builtype.syncallowed");
}
private MavenProjectSettingPage getCurrentPage() {
return this;
}
}

View File

@@ -23,7 +23,8 @@ Require-Bundle: org.eclipse.core.runtime,
org.talend.common.ui.runtime,
org.talend.core.runtime,
org.eclipse.jface,
org.talend.core.repository
org.talend.core.repository,
org.talend.core
Bundle-ActivationPolicy: lazy
Export-Package: org.talend.designer.maven,
org.talend.designer.maven.launch,

View File

@@ -0,0 +1,149 @@
// ============================================================================
//
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.designer.maven.migration.common;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.exception.PersistenceException;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.model.properties.Item;
import org.talend.core.model.properties.Property;
import org.talend.core.model.repository.ERepositoryObjectType;
import org.talend.core.ui.ITestContainerCoreService;
/**
* DOC jding class global comment. Detailled comment
*/
public class ItemReportRecorder {
protected Item item;
protected String detailMessage;
public String getItemType() {
String type = "";
ERepositoryObjectType itemType = ERepositoryObjectType.getItemType(item);
if (itemType != null) {
if (ERepositoryObjectType.getAllTypesOfTestContainer().contains(itemType)) {
Item parentJobItem = getTestCaseParentJobItem(item);
if (parentJobItem != null) {
ERepositoryObjectType parentJobType = ERepositoryObjectType.getItemType(parentJobItem);
if (parentJobType != null) {
String parentTypePath = getCompleteObjectTypePath(parentJobType);
if (StringUtils.isNotBlank(parentTypePath)) {
type = parentTypePath + "/";
}
}
}
type += itemType;
} else {
type = getCompleteObjectTypePath(itemType);
}
}
return type;
}
public String getItemPath() {
String path = "";
StringBuffer buffer = new StringBuffer();
ERepositoryObjectType itemType = ERepositoryObjectType.getItemType(item);
if (ERepositoryObjectType.getAllTypesOfTestContainer().contains(itemType)) {
StringBuffer testcaseBuffer = new StringBuffer();
Item parentJobItem = getTestCaseParentJobItem(item);
if (parentJobItem != null) {
if (parentJobItem.getState() != null && StringUtils.isNotBlank(parentJobItem.getState().getPath())) {
testcaseBuffer.append(parentJobItem.getState().getPath()).append("/");
}
testcaseBuffer.append(parentJobItem.getProperty() != null ? parentJobItem.getProperty().getLabel() : "");
if (StringUtils.isNotBlank(testcaseBuffer.toString())) {
buffer.append(testcaseBuffer.toString()).append("/");
}
}
} else {
if (item.getState() != null && StringUtils.isNotBlank(item.getState().getPath())) {
buffer.append(item.getState().getPath()).append("/");
}
}
Property property = item.getProperty();
if (property != null) {
buffer.append(property.getLabel() + "_" + property.getVersion());
}
path = buffer.toString();
return path;
}
private Item getTestCaseParentJobItem(Item testcaseItem) {
Item parentJobItem = null;
if (GlobalServiceRegister.getDefault().isServiceRegistered(ITestContainerCoreService.class)) {
ITestContainerCoreService testcaseService = GlobalServiceRegister.getDefault()
.getService(ITestContainerCoreService.class);
if (testcaseService != null) {
try {
parentJobItem = testcaseService.getParentJobItem(item);
} catch (PersistenceException e) {
ExceptionHandler.process(e);
}
}
}
return parentJobItem;
}
private String getCompleteObjectTypePath(ERepositoryObjectType itemType) {
ERepositoryObjectType rootItemType = itemType;
if (ERepositoryObjectType.JDBC != null && ERepositoryObjectType.JDBC.equals(rootItemType)) {
rootItemType = ERepositoryObjectType.METADATA_CONNECTIONS;
}
List<String> typeLabels = new ArrayList<String>();
findOutCompleteTypePath(rootItemType, typeLabels);
StringBuffer buffer = new StringBuffer();
if (!typeLabels.isEmpty()) {
for (int i = 0; i < typeLabels.size(); i++) {
if (i != 0) {
buffer.append("/");
}
buffer.append(typeLabels.get(i));
}
}
return buffer.toString();
}
private void findOutCompleteTypePath(ERepositoryObjectType type, List<String> typeLabels) {
ERepositoryObjectType parentType = ERepositoryObjectType.findParentType(type);
if (parentType != null) {
findOutCompleteTypePath(parentType, typeLabels);
}
typeLabels.add(type.getLabel());
}
public Item getItem() {
return item;
}
public void setItem(Item item) {
this.item = item;
}
public String getDetailMessage() {
return detailMessage;
}
public void setDetailMessage(String detailMessage) {
this.detailMessage = detailMessage;
}
}

View File

@@ -0,0 +1,81 @@
// ============================================================================
//
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.designer.maven.migration.common;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.eclipse.core.runtime.Path;
import org.talend.core.utils.TalendQuoteUtils;
/**
* DOC jding class global comment. Detailled comment
*/
public class ItemsReportUtil {
public static boolean generateReportFile(File reportFile, String header, List<String> records) throws Exception {
boolean generateDone = false;
BufferedWriter printWriter = null;
try {
File parentFolder = new Path(reportFile.getAbsolutePath()).removeLastSegments(1).toFile();
// File parentFolder = new File(parentPath);
if (!parentFolder.exists()) {
parentFolder.mkdirs();
}
if (!reportFile.exists()) {
reportFile.createNewFile();
}
FileOutputStream fos = new FileOutputStream(reportFile);
fos.write(new byte[] { (byte) 0xEF, (byte) 0xBB, (byte) 0xBF });
OutputStreamWriter outputWriter = new OutputStreamWriter(fos, "UTF-8");
printWriter = new BufferedWriter(outputWriter);
printWriter.write(header);
printWriter.newLine();
for (String recordStr : records) {
printWriter.write(recordStr);
printWriter.newLine();
}
printWriter.flush();
generateDone = true;
} finally {
if (printWriter != null) {
printWriter.close();
}
}
return generateDone;
}
public static String handleColumnQuotes(String text) {
String quoteMark = TalendQuoteUtils.QUOTATION_MARK;
text = StringUtils.isBlank(text) ? "" : text;
if (text.contains(quoteMark)) {
// replace to double quote surround
text = text.replace(quoteMark, quoteMark + quoteMark);
}
return quoteMark + text + quoteMark;
}
public static String getCurrentTimeString() {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
String time = dateFormat.format(new Date());
return time;
}
}

View File

@@ -0,0 +1,177 @@
// ============================================================================
//
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.designer.maven.migration.common;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.preferences.ConfigurationScope;
import org.eclipse.core.runtime.preferences.IEclipsePreferences;
import org.eclipse.core.runtime.preferences.IScopeContext;
import org.osgi.service.prefs.BackingStoreException;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.core.PluginChecker;
import org.talend.core.model.properties.Item;
import org.talend.core.model.properties.Property;
import org.talend.migration.IProjectMigrationTask;
/**
* DOC jding class global comment. Detailled comment
*/
public class MigrationReportHelper {
private static final String COMMA = ",";
private static final String PLUGIN_ID = "org.talend.designer.maven";
private static final String DO_NOT_SHOW_PREF_KEY = "talend.migrationReportDialog.doNotShowAgain";
private static final String MIGRATION_REPORT_HEAD = "Task name,Task description,Item type,Path to migrated item,Migration details";
private static final MigrationReportHelper instance = new MigrationReportHelper();
public static MigrationReportHelper getInstance() {
return instance;
}
private String reportGeneratedPath = "";
private Set<String> taskItemRecords = new HashSet<String>();
private List<MigrationReportRecorder> migrationReportRecorders = new ArrayList<MigrationReportRecorder>();
public void generateMigrationReport(String projectTecName) {
if (migrationReportRecorders == null || migrationReportRecorders.isEmpty()) {
return;
}
if (!PluginChecker.isTIS()) {
clearRecorders();
return;
}
File exportFolder = null;
File reportFile = null;
try {
String currentTime = getCurrentTime();
String filePath = getReportExportFolder(currentTime) + "/" + getReportFileName(currentTime, projectTecName);
reportGeneratedPath = filePath;
reportFile = new File(filePath);
List<String> recordLines = new ArrayList<String>();
for (MigrationReportRecorder record : migrationReportRecorders) {
StringBuffer buffer = new StringBuffer();
buffer.append(ItemsReportUtil.handleColumnQuotes(record.getTaskClassName())).append(COMMA);
buffer.append(ItemsReportUtil.handleColumnQuotes(record.getTaskDescription())).append(COMMA);
buffer.append(ItemsReportUtil.handleColumnQuotes(record.getItemType())).append(COMMA);
buffer.append(ItemsReportUtil.handleColumnQuotes(record.getItemPath())).append(COMMA);
buffer.append(ItemsReportUtil.handleColumnQuotes(record.getDetailMessage()));
recordLines.add(buffer.toString());
}
ItemsReportUtil.generateReportFile(reportFile, MIGRATION_REPORT_HEAD, recordLines);
} catch (Exception e) {
ExceptionHandler.process(e);
if (reportFile != null && reportFile.exists()) {
reportFile.delete();
}
if (exportFolder != null && exportFolder.exists()) {
exportFolder.delete();
}
} finally {
migrationReportRecorders.clear();
taskItemRecords.clear();
}
}
public boolean isRequireDefaultRecord(IProjectMigrationTask task, Item item) {
boolean require = true;
if (task.getOrder() != null) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(task.getOrder());
int year = calendar.get(Calendar.YEAR);
if (year <= 2016) {
return false;
}
}
if (item.getProperty() != null) {
Property property = item.getProperty();
String key = task.getId() + "_" + property.getId() + "_" + property.getVersion();
if (taskItemRecords.contains(key)) {
return false;
}
}
return require;
}
private String getCurrentTime() {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
String time = dateFormat.format(new Date());
return time;
}
private String getReportExportFolder(String time) {
String folderName = "migrationReport" + "_" + time;
String path = ResourcesPlugin.getWorkspace().getRoot().getLocation().toString() + "/report/" + folderName;
return path;
}
private String getReportFileName(String time, String projectTecName) {
String fileName = time + "_" + projectTecName + "_" + "Migration" + "_" + "Report.csv";
return fileName;
}
public static void storeDoNotShowAgainPref(boolean selected) {
IScopeContext scopeContext = ConfigurationScope.INSTANCE;
IEclipsePreferences pref = scopeContext.getNode(PLUGIN_ID);
pref.putBoolean(DO_NOT_SHOW_PREF_KEY, selected);
try {
pref.flush();
} catch (BackingStoreException e) {
ExceptionHandler.process(e);
}
}
public static boolean isReportDialogDisable() {
IScopeContext scopeContext = ConfigurationScope.INSTANCE;
IEclipsePreferences pref = scopeContext.getNode(PLUGIN_ID);
return pref.getBoolean(DO_NOT_SHOW_PREF_KEY, false);
}
public void addRecorder(MigrationReportRecorder recorder) {
if (recorder != null) {
migrationReportRecorders.add(recorder);
if (recorder.getItem() != null && recorder.getItem().getProperty() != null) {
Property property = recorder.getItem().getProperty();
taskItemRecords.add(recorder.getTask().getId() + "_" + property.getId() + "_" + property.getVersion());
}
}
}
public void clearRecorders() {
reportGeneratedPath = "";
migrationReportRecorders.clear();
taskItemRecords.clear();
}
public String getReportGeneratedPath() {
return reportGeneratedPath;
}
}

View File

@@ -0,0 +1,197 @@
// ============================================================================
//
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.designer.maven.migration.common;
import org.apache.commons.lang.StringUtils;
import org.talend.core.model.components.ComponentUtilities;
import org.talend.core.model.properties.Item;
import org.talend.core.utils.TalendQuoteUtils;
import org.talend.designer.core.model.utils.emf.talendfile.ElementParameterType;
import org.talend.designer.core.model.utils.emf.talendfile.NodeType;
import org.talend.migration.IProjectMigrationTask;
/**
* DOC jding class global comment. Detailled comment
*/
public class MigrationReportRecorder extends ItemReportRecorder {
private IProjectMigrationTask task;
private MigrationOperationType operationType;
private NodeType node;
private String paramName;
private String oldValue;
private String newValue;
public enum MigrationOperationType {
ADD,
MODIFY,
DELETE
}
public MigrationReportRecorder(IProjectMigrationTask task, Item item) {
super();
this.task = task;
this.item = item;
}
public MigrationReportRecorder(IProjectMigrationTask task, MigrationOperationType operationType, Item item, NodeType node,
String paramName, String oldValue, String newValue) {
super();
this.task = task;
this.operationType = operationType;
this.item = item;
this.node = node;
this.paramName = paramName;
this.oldValue = oldValue;
this.newValue = newValue;
}
public MigrationReportRecorder(IProjectMigrationTask task, Item item, String detailMessage) {
super();
this.task = task;
this.item = item;
this.detailMessage = detailMessage;
}
public String getTaskClassName() {
return task.getClass().getSimpleName();
}
public String getTaskDescription() {
String description = "";
if (StringUtils.isNotBlank(task.getDescription())) {
description = task.getDescription();
}
return description;
}
public String getDetailMessage() {
String details = detailMessage;
if (StringUtils.isNotBlank(detailMessage)) {
return details;
}
if (operationType == null || StringUtils.isBlank(paramName)) {
details = getTaskClassName() + " task is applied";
return details;
}
StringBuffer detailBuffer = new StringBuffer();
if (node != null) {
// migration for node, e.g. tRESTClient component "tRESTClient_2":
detailBuffer.append(node.getComponentName()).append(" component ");
ElementParameterType uniqueName = ComponentUtilities.getNodeProperty(node, "UNIQUE_NAME");
detailBuffer.append(TalendQuoteUtils.addQuotes(uniqueName.getValue())).append(":");
} else {
// migration for item, e.g. context item "testContext":
detailBuffer.append(getItemType() + " item ")
.append(TalendQuoteUtils.addQuotes(item.getProperty().getLabel())).append(":");
}
detailBuffer.append(paramName).append(" was ");
switch (operationType) {
case ADD:
detailBuffer.append("added");
if (StringUtils.isNotBlank(newValue)) {
detailBuffer.append(" with ").append(newValue);
}
break;
case MODIFY:
detailBuffer.append("changed");
if (StringUtils.isNotBlank(oldValue)) {
detailBuffer.append(" from ").append(oldValue);
}
if (StringUtils.isNotBlank(newValue)) {
detailBuffer.append(" to ").append(newValue);
}
break;
case DELETE:
detailBuffer.append("deleted");
break;
default:
break;
}
details = detailBuffer.toString();
return details;
}
/**
* Getter for task.
*
* @return the task
*/
public IProjectMigrationTask getTask() {
return task;
}
/**
* Sets the task.
*
* @param task the task to set
*/
public void setTask(IProjectMigrationTask task) {
this.task = task;
}
/**
* Sets the operationType.
*
* @param operationType the operationType to set
*/
public void setOperationType(MigrationOperationType operationType) {
this.operationType = operationType;
}
/**
* Sets the node.
*
* @param node the node to set
*/
public void setNode(NodeType node) {
this.node = node;
}
/**
* Sets the paramName.
*
* @param paramName the paramName to set
*/
public void setParamName(String paramName) {
this.paramName = paramName;
}
/**
* Sets the oldValue.
*
* @param oldValue the oldValue to set
*/
public void setOldValue(String oldValue) {
this.oldValue = oldValue;
}
/**
* Sets the newValue.
*
* @param newValue the newValue to set
*/
public void setNewValue(String newValue) {
this.newValue = newValue;
}
}

View File

@@ -0,0 +1,121 @@
package org.talend.designer.maven.migration.tasks;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.talend.commons.exception.PersistenceException;
import org.talend.commons.runtime.model.emf.EmfHelper;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.model.components.filters.IComponentFilter;
import org.talend.core.model.migration.AbstractItemMigrationTask;
import org.talend.core.model.properties.Item;
import org.talend.core.model.properties.JobletProcessItem;
import org.talend.core.model.properties.ProcessItem;
import org.talend.core.model.repository.ERepositoryObjectType;
import org.talend.core.repository.utils.ConvertJobsUtil;
import org.talend.designer.core.model.utils.emf.talendfile.ElementParameterType;
import org.talend.designer.core.model.utils.emf.talendfile.NodeType;
import org.talend.designer.core.model.utils.emf.talendfile.ProcessType;
import org.talend.designer.maven.migration.common.MigrationReportHelper;
import org.talend.designer.maven.migration.common.MigrationReportRecorder;
import org.talend.repository.model.IProxyRepositoryFactory;
import org.talend.repository.model.IRepositoryService;
public abstract class AbstractCorrectBuildItemMigrationTask extends AbstractItemMigrationTask implements ICorrectBuildTypeMigrationTask {
protected static final String BUILD_TYPE_PROPERTY = "BUILD_TYPE";
protected static final String BUILD_TYPE_STANDALONE = "STANDALONE";
protected static final String BUILD_TYPE_OSGI = "OSGI";
protected static final String BUILD_TYPE_ROUTE = "ROUTE";
protected static final String BUILD_TYPE_ROUTE_MICROSERVICE = "ROUTE_MICROSERVICE";
protected static final String REST_MS = "REST_MS";
protected static Map<String, String> migratedJobs = new HashMap<String, String>();
protected static void clearMigratedJobs () {
migratedJobs.clear();
}
protected static void skipMigrationForJob (String jobName, String migrationTask) {
migratedJobs.put(jobName, migrationTask);
}
protected static void storeMigratedJob (String jobName, String migrationTask) {
migratedJobs.put(jobName, migrationTask);
}
protected static boolean isJobMigrated (String jobName) {
return migratedJobs.containsKey(jobName);
}
protected static String getStoredJobMigraionTask (String jobName) {
return migratedJobs.get(jobName);
}
public void generateReportRecord(MigrationReportRecorder recorder) {
MigrationReportHelper.getInstance().addRecorder(recorder);
}
public static List<NodeType> searchComponent(ProcessType processType, IComponentFilter filter) {
List<NodeType> list = new ArrayList<NodeType>();
if (filter == null || processType == null) {
return list;
}
for (Object o : processType.getNode()) {
if (filter.accept((NodeType) o)) {
list.add((NodeType) o);
}
}
return list;
}
public void save(Item item) throws PersistenceException {
IRepositoryService service = (IRepositoryService) GlobalServiceRegister.getDefault()
.getService(IRepositoryService.class);
IProxyRepositoryFactory factory = service.getProxyRepositoryFactory();
factory.save(item, true);
}
public ProcessType getProcessType(Item item) {
ProcessType processType = null;
if (item instanceof ProcessItem) {
processType = ((ProcessItem) item).getProcess();
}
if (item instanceof JobletProcessItem) {
processType = ((JobletProcessItem) item).getJobletProcess();
}
if (processType != null) {
EmfHelper.visitChilds(processType);
ERepositoryObjectType itemType = ERepositoryObjectType.getItemType(item);
if (itemType == ERepositoryObjectType.TEST_CONTAINER
&& !ConvertJobsUtil.JobType.STANDARD.getDisplayName().equalsIgnoreCase(processType.getJobType())) {
return null;
}
}
return processType;
}
/**
* Find element parameter with a given parameter name
*
* @param paramName
* @param elementParameterTypes
* @return
*/
public static final ElementParameterType findElementParameterByName(String paramName, NodeType node) {
for (Object obj : node.getElementParameter()) {
ElementParameterType cpType = (ElementParameterType) obj;
if (paramName.equals(cpType.getName())) {
return cpType;
}
}
return null;
}
abstract public void clear ();
}

View File

@@ -0,0 +1,17 @@
package org.talend.designer.maven.migration.tasks;
import java.util.ArrayList;
import java.util.List;
import org.talend.core.model.repository.ERepositoryObjectType;
public abstract class AbstractDataServiceJobMigrationTask extends AbstractCorrectBuildItemMigrationTask {
@Override
public List<ERepositoryObjectType> getTypes() {
List<ERepositoryObjectType> toReturn = new ArrayList<ERepositoryObjectType>();
toReturn.add(ERepositoryObjectType.PROCESS);
return toReturn;
}
}

View File

@@ -0,0 +1,18 @@
package org.talend.designer.maven.migration.tasks;
import java.util.ArrayList;
import java.util.List;
import org.talend.core.model.repository.ERepositoryObjectType;
public abstract class AbstractRouteMigrationTask extends AbstractCorrectBuildItemMigrationTask {
@Override
public List<ERepositoryObjectType> getTypes() {
List<ERepositoryObjectType> toReturn = new ArrayList<ERepositoryObjectType>();
toReturn.add(ERepositoryObjectType.PROCESS_ROUTE);
toReturn.add(ERepositoryObjectType.PROCESS_ROUTE_MICROSERVICE);
return toReturn;
}
}

View File

@@ -0,0 +1,268 @@
package org.talend.designer.maven.migration.tasks;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.exception.PersistenceException;
import org.talend.core.model.components.filters.IComponentFilter;
import org.talend.core.model.components.filters.NameComponentFilter;
import org.talend.core.model.general.Project;
import org.talend.core.model.properties.Item;
import org.talend.core.model.properties.ProcessItem;
import org.talend.designer.core.model.utils.emf.talendfile.NodeType;
import org.talend.designer.core.model.utils.emf.talendfile.ProcessType;
import org.talend.designer.maven.migration.common.MigrationReportRecorder;
import org.talend.designer.runprocess.ItemCacheManager;
import org.talend.repository.ProjectManager;
/*
* If Job does not contain any of the following components: "tRouteInput", "tRESTClient", "tESBConsumer"
* then BUILD_TYPE must be STANDALONE
* Else (job contains one of "tRouteInput", "tRESTClient" or "tESBConsumer")
* If no BUILD_TYPE is set then default BUILD_TYPE must be STANDALONE
* Manage child jobs for jobs ( parent, target BUILD_TYPE = STANDALONE )
* If BUILD_TYPE is STANDALONE
* Manage child jobs for jobs ( parent, target BUILD_TYPE = STANDALONE )
* If BUILD_TYPE is ROUTE > EXCEPTION: need warning message! BUILD_TYPE was wrongly set to ROUTE from a previous migration task and has to be manually updated (all subjobs have to be checked manually). Value should be either STANDALONE (in most cases) or OSGI.
*/
public class CorrectBuildTypeForDIJobMigrationTask extends AbstractDataServiceJobMigrationTask {
private static final String[] ESB_COMPONENTS = { "tRouteInput", "tRESTClient", "tESBConsumer" };
private static final String T_RUB_JOB_COMPONENT = "tRunJob";
boolean failure = false;
/*
* (non-Javadoc)
*
* @see org.talend.migration.IMigrationTask#getOrder()
*/
@Override
public Date getOrder() {
GregorianCalendar gc = new GregorianCalendar(2021, 7, 25, 12, 0, 0);
return gc.getTime();
}
@SuppressWarnings("unchecked")
@Override
public ExecutionResult execute(Item item) {
final ProcessType processType = getProcessType(item);
String jobName = item.getProperty().getLabel();
/*
* Migrating remaining jobs only (which was not migrated previously)
*/
if (isJobMigrated(jobName)) {
return ExecutionResult.NOTHING_TO_DO;
}
Object originalBuildType = item.getProperty().getAdditionalProperties().get(BUILD_TYPE_PROPERTY);
/*
* If BUILD_TYPE is ROUTE > EXCEPTION: need warning message! BUILD_TYPE was
* wrongly set to ROUTE from a previous migration task and has to be manually
* updated (all subjobs have to be checked manually). Value should be either
* STANDALONE (in most cases) or OSGI.
*/
if (originalBuildType != null && BUILD_TYPE_ROUTE.equalsIgnoreCase((String) originalBuildType)) {
ExceptionHandler.process(new RuntimeException("Job [" + jobName + "] has incorrect BUILD_TYPE ["
+ BUILD_TYPE_ROUTE
+ "] which has to be manually updated (all subjobs have to be checked manually). Value should be either STANDALONE (in most cases) or OSGI"));
return ExecutionResult.FAILURE;
}
for (String name : ESB_COMPONENTS) {
boolean modified = false;
IComponentFilter filter = new NameComponentFilter(name);
List<NodeType> c = searchComponent(processType, filter);
if (!c.isEmpty()) {
/*
* job contains one of "tRouteInput", "tRESTClient" or "tESBConsumer") If no
* BUILD_TYPE is set then default BUILD_TYPE must be STANDALONE
*/
if (null == originalBuildType) {
item.getProperty().getAdditionalProperties().put(BUILD_TYPE_PROPERTY, BUILD_TYPE_STANDALONE);
try {
save(item);
modified |= true;
generateReportRecord(
new MigrationReportRecorder(this, MigrationReportRecorder.MigrationOperationType.MODIFY,
item, null, "Build Type", null, BUILD_TYPE_STANDALONE));
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
/*
* Manage child jobs for jobs ( parent, target BUILD_TYPE = STANDALONE )
*/
String currentParentJobBuildType = (String) item.getProperty().getAdditionalProperties()
.get(BUILD_TYPE_PROPERTY);
if (BUILD_TYPE_STANDALONE.equalsIgnoreCase(currentParentJobBuildType)) {
updateBuildTypeForSubJobs(item, currentParentJobBuildType);
}
if (failure) {
return ExecutionResult.FAILURE;
}
if (modified) {
return ExecutionResult.SUCCESS_NO_ALERT;
}
return ExecutionResult.NOTHING_TO_DO;
}
}
/*
* If Job does not contain any of the following components: "tRouteInput",
* "tRESTClient", "tESBConsumer" then BUILD_TYPE must be STANDALONE
* Manage child jobs for jobs ( parent, target BUILD_TYPE = STANDALONE )
*/
if (null == originalBuildType || !BUILD_TYPE_STANDALONE.equalsIgnoreCase(originalBuildType.toString())) {
item.getProperty().getAdditionalProperties().put(BUILD_TYPE_PROPERTY, BUILD_TYPE_STANDALONE);
boolean modified = false;
try {
save(item);
modified |= true;
generateReportRecord(new MigrationReportRecorder(this,
MigrationReportRecorder.MigrationOperationType.MODIFY, item, null, "Build Type",
(null == originalBuildType) ? null : originalBuildType.toString(), BUILD_TYPE_STANDALONE));
updateBuildTypeForSubJobs(item, BUILD_TYPE_STANDALONE);
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
if (failure) {
return ExecutionResult.FAILURE;
}
if (modified) {
return ExecutionResult.SUCCESS_NO_ALERT;
}
}
/*
* If Job does not contain any of the following components: "tRouteInput",
* "tRESTClient", "tESBConsumer" and BUILD_TYPE is STANDALONE
* Manage child jobs for jobs ( parent, target BUILD_TYPE = STANDALONE )
*/
if (BUILD_TYPE_STANDALONE.equalsIgnoreCase(originalBuildType.toString())) {
updateBuildTypeForSubJobs(item, BUILD_TYPE_STANDALONE);
if (failure) {
return ExecutionResult.FAILURE;
}
}
return ExecutionResult.NOTHING_TO_DO;
}
@Override
public String getDescription() {
return "Synchronize build types for DI jobs";
}
@Override
public void clear() {
clearMigratedJobs();
}
@SuppressWarnings("unchecked")
private void updateBuildTypeForSubJobs(Item parentJobItem, String parentJobBuiltType) {
IComponentFilter filter = new NameComponentFilter(T_RUB_JOB_COMPONENT);
ProcessType processType = getProcessType(parentJobItem);
List<NodeType> c = searchComponent(processType, filter);
if (!c.isEmpty()) {
for (NodeType tRunJobComponent : c) {
String processID = findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_PROCESS",
tRunJobComponent) == null ? null
: findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_PROCESS", tRunJobComponent)
.getValue();
if (processID == null) {
processID = findElementParameterByName("PROCESS:PROCESS_TYPE_PROCESS",
tRunJobComponent) == null ? null
: findElementParameterByName("PROCESS:PROCESS_TYPE_PROCESS", tRunJobComponent)
.getValue();
}
String processVersion = findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_VERSION",
tRunJobComponent) == null ? null
: findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_VERSION", tRunJobComponent)
.getValue();
if (processVersion == null) {
processVersion = findElementParameterByName("PROCESS:PROCESS_TYPE_VERSION",
tRunJobComponent) == null ? null
: findElementParameterByName("PROCESS:PROCESS_TYPE_VERSION", tRunJobComponent)
.getValue();
}
if (processID != null && processVersion != null) {
ProcessItem childItem = ItemCacheManager.getProcessItem(processID, processVersion);
Project childItemProject = ProjectManager.getInstance().getCurrentProject();
if (childItem == null) {
for (Project refProject : ProjectManager.getInstance().getAllReferencedProjects()) {
childItem = ItemCacheManager.getRefProcessItem(getProject(), processID);
if (childItem != null) {
childItemProject = refProject;
break;
}
}
}
if (childItem != null) {
Object currentChildBuildType = childItem.getProperty().getAdditionalProperties()
.get(BUILD_TYPE_PROPERTY);
// String jobID = childItem.getProperty().getLabel();
String currentChildBuildTypeStr = (null == currentChildBuildType) ? null
: (String) currentChildBuildType;
if (BUILD_TYPE_STANDALONE.equalsIgnoreCase(parentJobBuiltType)
&& !BUILD_TYPE_STANDALONE.equalsIgnoreCase(currentChildBuildTypeStr)) {
childItem.getProperty().getAdditionalProperties().put(BUILD_TYPE_PROPERTY,
BUILD_TYPE_STANDALONE);
try {
save(childItem);
generateReportRecord(new MigrationReportRecorder(this,
MigrationReportRecorder.MigrationOperationType.MODIFY, childItem, null,
"Build Type", currentChildBuildTypeStr, BUILD_TYPE_STANDALONE));
} catch (PersistenceException e) {
ExceptionHandler.process(e);
failure = true;
}
}
updateBuildTypeForSubJobs(childItem, parentJobBuiltType);
}
}
}
}
}
}

View File

@@ -0,0 +1,107 @@
package org.talend.designer.maven.migration.tasks;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.exception.PersistenceException;
import org.talend.core.model.components.filters.IComponentFilter;
import org.talend.core.model.components.filters.NameComponentFilter;
import org.talend.core.model.properties.Item;
import org.talend.designer.core.model.utils.emf.talendfile.NodeType;
import org.talend.designer.core.model.utils.emf.talendfile.ProcessType;
import org.talend.designer.maven.migration.common.MigrationReportRecorder;
/*
* (non-Javadoc)
* Data service REST (process) = Job with "tRESTRequest"
* If no BUILD_TYPE is set then default BUILD_TYPE must be OSGI
* If Job does not contain any of the following components: "tRouteInput", "tRESTClient", "tESBConsumer" then BUILD_TYPE must be STANDALONE
*/
public class CorrectBuildTypeForDsRestMigrationTask extends AbstractDataServiceJobMigrationTask {
private static final String T_REST_REQUEST = "tRESTRequest";
/*
* (non-Javadoc)
*
* @see org.talend.migration.IMigrationTask#getOrder()
*/
@Override
public Date getOrder() {
GregorianCalendar gc = new GregorianCalendar(2021, 7, 25, 12, 0, 0);
return gc.getTime();
}
/*
* (non-Javadoc)
*
* @see
* org.talend.core.model.migration.AbstractDataserviceMigrationTask#execute(org
* .talend.core.model.properties.Item)
*/
@SuppressWarnings("unchecked")
@Override
public ExecutionResult execute(Item item) {
final ProcessType processType = getProcessType(item);
boolean modified = false;
/*
* If no BUILD_TYPE is set then default BUILD_TYPE must be OSGI
*/
IComponentFilter filter = new NameComponentFilter(T_REST_REQUEST);
List<NodeType> c = searchComponent(processType, filter);
if (!c.isEmpty()) {
Object buildType = item.getProperty().getAdditionalProperties().get(BUILD_TYPE_PROPERTY);
if (null == buildType) {
item.getProperty().getAdditionalProperties().put(BUILD_TYPE_PROPERTY, BUILD_TYPE_OSGI);
try {
save(item);
modified |= true;
generateReportRecord(
new MigrationReportRecorder(this, MigrationReportRecorder.MigrationOperationType.MODIFY,
item, null, "Build Type", null, BUILD_TYPE_OSGI));
storeMigratedJob(item.getProperty().getLabel(), this.getClass().getName());
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
return ExecutionResult.SUCCESS_NO_ALERT;
} else if (BUILD_TYPE_OSGI.equalsIgnoreCase((String)buildType)){
// current job has correct build type
// skip this job during next migrations
skipMigrationForJob(item.getProperty().getLabel(), this.getClass().getName());
} else if (REST_MS.equalsIgnoreCase((String)buildType)){
// current job has correct build type
// skip this job during next migrations
skipMigrationForJob(item.getProperty().getLabel(), this.getClass().getName());
}
}
if (modified) {
return ExecutionResult.SUCCESS_NO_ALERT;
} else {
return ExecutionResult.NOTHING_TO_DO;
}
}
@Override
public String getDescription() {
return "Synchronize build types for DS Rest jobs";
}
@Override
public void clear () {
clearMigratedJobs();
}
}

View File

@@ -0,0 +1,255 @@
package org.talend.designer.maven.migration.tasks;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.exception.PersistenceException;
import org.talend.core.model.components.filters.IComponentFilter;
import org.talend.core.model.components.filters.NameComponentFilter;
import org.talend.core.model.general.Project;
import org.talend.core.model.properties.Item;
import org.talend.core.model.properties.ProcessItem;
import org.talend.core.repository.model.ProxyRepositoryFactory;
import org.talend.core.runtime.process.TalendProcessArgumentConstant;
import org.talend.designer.core.model.utils.emf.talendfile.NodeType;
import org.talend.designer.core.model.utils.emf.talendfile.ProcessType;
import org.talend.designer.maven.migration.common.MigrationReportRecorder;
import org.talend.designer.runprocess.ItemCacheManager;
import org.talend.repository.ProjectManager;
/*
* Routes
* If no BUILD_TYPE is set then Default BUILD_TYPE must be ROUTE
* - Manage child jobs for routes ( parent, target BUILD_TYPE = OSGI )
* Else if BUILD_TYPE is ROUTE
* - Manage child jobs for routes ( parent, target BUILD_TYPE = OSGI )
* Else if BUILD_TYPE is ROUTE_MICROSERVICE
* -Manage child jobs for jobs ( parent, target BUILD_TYPE = STANDALONE )
*/
public class CorrectBuildTypeForRoutesMigrationTask extends AbstractRouteMigrationTask {
private static final String C_TALEND_JOB = "cTalendJob";
protected Map<String, String> migratedChildJobs = new HashMap<String, String>();
/*
* (non-Javadoc)
*
* @see org.talend.migration.IMigrationTask#getOrder()
*/
@Override
public Date getOrder() {
GregorianCalendar gc = new GregorianCalendar(2021, 7, 25, 12, 0, 0);
return gc.getTime();
}
@SuppressWarnings("unchecked")
@Override
public ExecutionResult execute(Item item) {
final ProcessType processType = getProcessType(item);
boolean modified = false;
boolean migrationFailure = false;
/*
* If no BUILD_TYPE is set then default BUILD_TYPE must be ROUTE
*/
Object buildType = item.getProperty().getAdditionalProperties().get(BUILD_TYPE_PROPERTY);
if (null == buildType) {
item.getProperty().getAdditionalProperties().put(BUILD_TYPE_PROPERTY, BUILD_TYPE_ROUTE);
try {
save(item);
modified |= true;
generateReportRecord(
new MigrationReportRecorder(this, MigrationReportRecorder.MigrationOperationType.MODIFY, item,
null, "Build Type", "null", BUILD_TYPE_OSGI));
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
/*
* If no BUILD_TYPE is set then Default BUILD_TYPE must be ROUTE - Manage child
* jobs for routes ( parent, target BUILD_TYPE = OSGI ) Else if BUILD_TYPE is
* ROUTE - Manage child jobs for routes ( parent, target BUILD_TYPE = OSGI )
* Else if BUILD_TYPE is ROUTE_MICROSERVICE -Manage child jobs for jobs (
* parent, target BUILD_TYPE = STANDALONE )
*/
String currentRouteBuildType = (String) item.getProperty().getAdditionalProperties().get(BUILD_TYPE_PROPERTY);
String currentRouteID = item.getProperty().getLabel();
IComponentFilter filter = new NameComponentFilter(C_TALEND_JOB);
List<NodeType> c = searchComponent(processType, filter);
if (!c.isEmpty()) {
for (NodeType cTalendJobComponent : c) {
String processID = findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_PROCESS",
cTalendJobComponent) == null ? null
: findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_PROCESS",
cTalendJobComponent).getValue();
String processVersion = findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_VERSION",
cTalendJobComponent) == null ? null
: findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_VERSION",
cTalendJobComponent).getValue();
if (processID != null && processVersion != null) {
ProcessItem childItem = ItemCacheManager.getProcessItem(processID, processVersion);
Project childItemProject = ProjectManager.getInstance().getCurrentProject();
if (childItem == null) {
for (Project refProject : ProjectManager.getInstance().getAllReferencedProjects()) {
childItem = ItemCacheManager.getRefProcessItem(getProject(), processID);
if (childItem != null) {
childItemProject = refProject;
break;
}
}
}
if (childItem != null) {
Object currentChildBuildType = childItem.getProperty().getAdditionalProperties()
.get(BUILD_TYPE_PROPERTY);
String jobID = childItem.getProperty().getLabel();
String currentChildBuildTypeStr = (null == currentChildBuildType) ? null
: (String) currentChildBuildType;
if (BUILD_TYPE_ROUTE.equalsIgnoreCase(currentRouteBuildType)
&& BUILD_TYPE_OSGI.equalsIgnoreCase(currentChildBuildTypeStr)) {
storeMigratedModel(jobID, currentRouteID);
}
if (BUILD_TYPE_ROUTE.equalsIgnoreCase(currentRouteBuildType)
&& !BUILD_TYPE_OSGI.equalsIgnoreCase(currentChildBuildTypeStr)) {
try {
if (isModelMigrated(jobID)) {
ExceptionHandler.process(new RuntimeException(
"Child Job is called by 2 or more different Routes which have different build types. Build type for child Job ["
+ jobID + "] was previously updated to [" + currentChildBuildTypeStr
+ "] to be compatible with parent Route ["
+ getStoredMigratedModelParentRoute(jobID)
+ "] which is not compatible with current parent Route ["
+ currentRouteID + "] with build type [" + currentRouteBuildType
+ "]. "));
migrationFailure = true;
continue;
} else {
childItem.getProperty().getAdditionalProperties()
.put(TalendProcessArgumentConstant.ARG_BUILD_TYPE, BUILD_TYPE_OSGI);
generateReportRecord(new MigrationReportRecorder(this,
MigrationReportRecorder.MigrationOperationType.MODIFY, childItem, null,
"Build Type", currentChildBuildTypeStr, BUILD_TYPE_OSGI));
ProxyRepositoryFactory.getInstance().save(childItemProject, childItem, true);
}
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
if (BUILD_TYPE_ROUTE_MICROSERVICE.equalsIgnoreCase(currentRouteBuildType)
&& !BUILD_TYPE_STANDALONE.equalsIgnoreCase(currentChildBuildTypeStr)) {
try {
if (isModelMigrated(jobID)) {
ExceptionHandler.process(new RuntimeException(
"Child Job is called by 2 or more different Routes which have different build types. Build type for child Job ["
+ jobID + "] was previously updated to [" + currentChildBuildTypeStr
+ "] to be compatible with parent Route ["
+ getStoredMigratedModelParentRoute(jobID)
+ "] which is not compatible with current parent Route ["
+ currentRouteID + "] with build type [" + currentRouteBuildType
+ "]. "));
migrationFailure = true;
continue;
} else {
childItem.getProperty().getAdditionalProperties()
.put(TalendProcessArgumentConstant.ARG_BUILD_TYPE, BUILD_TYPE_STANDALONE);
generateReportRecord(new MigrationReportRecorder(this,
MigrationReportRecorder.MigrationOperationType.MODIFY, childItem, null,
"Build Type", currentChildBuildTypeStr, BUILD_TYPE_STANDALONE));
ProxyRepositoryFactory.getInstance().save(childItemProject, childItem, true);
}
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
if(!isModelMigrated(jobID)) {
storeMigratedModel(jobID, currentRouteID);
}
}
}
}
}
if (migrationFailure) {
return ExecutionResult.FAILURE;
}
if (modified) {
return ExecutionResult.SUCCESS_NO_ALERT;
}
return ExecutionResult.NOTHING_TO_DO;
}
protected void clearMigratedChildJobs() {
migratedChildJobs.clear();
}
protected void storeMigratedModel(String jobName, String parentRouteName) {
migratedChildJobs.put(jobName, parentRouteName);
}
protected boolean isModelMigrated(String jobName) {
return migratedChildJobs.containsKey(jobName);
}
protected String getStoredMigratedModelParentRoute(String jobName) {
return migratedChildJobs.get(jobName);
}
@Override
public String getDescription() {
return "Synchronize build types for Routes (incuding child jobs)";
}
@Override
public void clear() {
clearMigratedChildJobs();
clearMigratedJobs();
}
}

View File

@@ -0,0 +1,103 @@
package org.talend.designer.maven.migration.tasks;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.exception.PersistenceException;
import org.talend.core.model.components.filters.IComponentFilter;
import org.talend.core.model.components.filters.NameComponentFilter;
import org.talend.core.model.properties.Item;
import org.talend.designer.core.model.utils.emf.talendfile.NodeType;
import org.talend.designer.core.model.utils.emf.talendfile.ProcessType;
import org.talend.designer.maven.migration.common.MigrationReportRecorder;
/*
* Data service SOAP = Job with "tESBProviderRequest"
* Set BUILD_TYPE as OSGI
* Manage child jobs for jobs ( parent job, target BUILD_TYPE = OSGI )
*/
public class CorrectBuildTypeForSOAPServiceJobMigrationTask extends AbstractDataServiceJobMigrationTask {
private static final String T_ESB_PROVIDER_REQUEST = "tESBProviderRequest";
private static final String BUILD_TYPE_PROPERTY = "BUILD_TYPE";
private static final String BUILD_TYPE_OSGI = "OSGI";
/*
* (non-Javadoc)
*
* @see org.talend.migration.IMigrationTask#getOrder()
*/
@Override
public Date getOrder() {
GregorianCalendar gc = new GregorianCalendar(2021, 7, 25, 12, 0, 0);
return gc.getTime();
}
/*
* (non-Javadoc)
*
* @see
* org.talend.core.model.migration.AbstractDataserviceMigrationTask#execute(org
* .talend.core.model.properties.Item)
*/
@SuppressWarnings("unchecked")
@Override
public ExecutionResult execute(Item item) {
final ProcessType processType = getProcessType(item);
boolean modified = false;
/*
* If no BUILD_TYPE is set then default BUILD_TYPE must be OSGI
*/
IComponentFilter filter = new NameComponentFilter(T_ESB_PROVIDER_REQUEST);
List<NodeType> c = searchComponent(processType, filter);
if (!c.isEmpty()) {
Object originalBuildType = item.getProperty().getAdditionalProperties().get(BUILD_TYPE_PROPERTY);
if (null == originalBuildType || !BUILD_TYPE_OSGI.equalsIgnoreCase(originalBuildType.toString())) {
item.getProperty().getAdditionalProperties().put(BUILD_TYPE_PROPERTY, BUILD_TYPE_OSGI);
try {
save(item);
modified |= true;
generateReportRecord(new MigrationReportRecorder(this,
MigrationReportRecorder.MigrationOperationType.MODIFY, item, null, "Build Type",
(null == originalBuildType) ? null : originalBuildType.toString(), BUILD_TYPE_OSGI));
storeMigratedJob(item.getProperty().getLabel(), this.getClass().getName());
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
return ExecutionResult.SUCCESS_NO_ALERT;
} else if (BUILD_TYPE_OSGI.equalsIgnoreCase((String)originalBuildType)){
// current job has correct build type
// skip this job during next migrations
skipMigrationForJob(item.getProperty().getLabel(), this.getClass().getName());
}
}
if (modified) {
return ExecutionResult.SUCCESS_NO_ALERT;
}
return ExecutionResult.NOTHING_TO_DO;
}
@Override
public String getDescription() {
return "Synchronize build types for SOAP service Jobs";
}
@Override
public void clear () {
clearMigratedJobs();
}
}

View File

@@ -0,0 +1,7 @@
package org.talend.designer.maven.migration.tasks;
import org.talend.migration.IProjectMigrationTask;
public interface ICorrectBuildTypeMigrationTask extends IProjectMigrationTask {
public void clear ();
}

View File

@@ -0,0 +1,118 @@
package org.talend.designer.maven.tools;
import java.lang.reflect.InvocationTargetException;
import org.eclipse.core.resources.IWorkspace;
import org.eclipse.core.resources.IWorkspaceRunnable;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.SubMonitor;
import org.eclipse.core.runtime.jobs.ISchedulingRule;
import org.eclipse.jface.dialogs.ProgressMonitorDialog;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.preference.FieldEditorPreferencePage;
import org.eclipse.swt.widgets.Display;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.core.model.general.Project;
import org.talend.core.repository.model.ProxyRepositoryFactory;
import org.talend.designer.maven.migration.common.MigrationReportHelper;
import org.talend.designer.maven.migration.tasks.CorrectBuildTypeForDIJobMigrationTask;
import org.talend.designer.maven.migration.tasks.CorrectBuildTypeForDsRestMigrationTask;
import org.talend.designer.maven.migration.tasks.CorrectBuildTypeForRoutesMigrationTask;
import org.talend.designer.maven.migration.tasks.CorrectBuildTypeForSOAPServiceJobMigrationTask;
import org.talend.designer.maven.migration.tasks.ICorrectBuildTypeMigrationTask;
import org.talend.migration.IMigrationTask;
import org.talend.migration.IProjectMigrationTask;
import org.talend.repository.ProjectManager;
import org.talend.repository.RepositoryWorkUnit;
public class BuildTypeManager {
private ICorrectBuildTypeMigrationTask[] syncBuildTypeMigrationTasks = {
new CorrectBuildTypeForRoutesMigrationTask(), new CorrectBuildTypeForSOAPServiceJobMigrationTask(),
new CorrectBuildTypeForDsRestMigrationTask(), new CorrectBuildTypeForDIJobMigrationTask() };
private boolean hasErrors = false;
public void syncBuildTypes(FieldEditorPreferencePage page) throws Exception {
IRunnableWithProgress runnableWithProgress = new IRunnableWithProgress() {
@Override
public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
RepositoryWorkUnit<Object> workUnit = new RepositoryWorkUnit<Object>("Synchronize all build types") { //$NON-NLS-1$
@Override
protected void run() {
final IWorkspaceRunnable op = new IWorkspaceRunnable() {
@Override
public void run(final IProgressMonitor monitor) throws CoreException {
try {
syncAllBuildTypesWithProgress(monitor, page);
} catch (Exception e) {
ExceptionHandler.process(e);
}
}
};
IWorkspace workspace = ResourcesPlugin.getWorkspace();
try {
ISchedulingRule schedulingRule = workspace.getRoot();
workspace.run(op, schedulingRule, IWorkspace.AVOID_UPDATE, monitor);
} catch (CoreException e) {
ExceptionHandler.process(e);
}
}
};
workUnit.setAvoidUnloadResources(true);
ProxyRepositoryFactory.getInstance().executeRepositoryWorkUnit(workUnit);
}
};
hasErrors = false;
new ProgressMonitorDialog(Display.getDefault().getActiveShell()).run(true, true, runnableWithProgress);
if (hasErrors) {
page.setErrorMessage("Build types synchronization finished with errors. Check workspace logs for details.");
} else {
page.setErrorMessage(null);
}
}
public void syncAllBuildTypesWithProgress(IProgressMonitor monitor, FieldEditorPreferencePage page)
throws Exception {
Project project = ProjectManager.getInstance().getCurrentProject();
SubMonitor subMonitor = SubMonitor.convert(monitor, syncBuildTypeMigrationTasks.length);
for (ICorrectBuildTypeMigrationTask task : syncBuildTypeMigrationTasks) {
task.clear();
}
for (ICorrectBuildTypeMigrationTask task : syncBuildTypeMigrationTasks) {
subMonitor.beginTask(task.getDescription(), syncBuildTypeMigrationTasks.length);
IMigrationTask.ExecutionResult result = task.execute(project);
if (IMigrationTask.ExecutionResult.FAILURE.equals(result)) {
hasErrors = true;
}
subMonitor.worked(1);
}
subMonitor.beginTask("Generate migration report", syncBuildTypeMigrationTasks.length);
MigrationReportHelper.getInstance().generateMigrationReport(project.getTechnicalLabel());
monitor.done();
}
}

View File

@@ -1,5 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry exported="true" kind="lib" path="lib/commons-text-1.10.0.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-pool2-2.4.2.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-validator-1.5.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-math3-3.3.jar"/>
@@ -8,6 +9,5 @@
<classpathentry exported="true" kind="lib" path="lib/commons-digester-2.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-cli-2.0-SNAPSHOT.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-codec-1.15.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-text-1.1.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@@ -10,7 +10,7 @@ Bundle-ClassPath: .,
lib/commons-math3-3.3.jar,
lib/commons-validator-1.5.1.jar,
lib/commons-pool2-2.4.2.jar,
lib/commons-text-1.1.jar
lib/commons-text-1.10.0.jar
Export-Package: org.apache.commons.cli2,
org.apache.commons.cli2.builder,
org.apache.commons.cli2.commandline,

View File

@@ -7,4 +7,4 @@ bin.includes = META-INF/,\
lib/commons-math3-3.3.jar,\
lib/commons-validator-1.5.1.jar,\
lib/commons-pool2-2.4.2.jar,\
lib/commons-text-1.1.jar
lib/commons-text-1.10.0.jar

View File

@@ -62,6 +62,11 @@
<artifactId>commons-collections</artifactId>
<version>3.2.2</version>
</artifactItem>
<artifactItem>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>1.10.0</version>
</artifactItem>
<artifactItem>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>

View File

@@ -6,23 +6,23 @@
<classpathentry exported="true" kind="lib" path="lib/wsdl4j-1.6.3.jar"/>
<classpathentry exported="true" kind="lib" path="lib/istack-commons-runtime-3.0.12.jar"/>
<classpathentry exported="true" kind="lib" path="lib/jaxb-runtime-2.3.4.jar"/>
<classpathentry exported="true" kind="lib" path="lib/woodstox-core-6.2.6.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-core-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-bindings-soap-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-bindings-xml-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-databinding-jaxb-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-features-clustering-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-frontend-jaxrs-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-frontend-jaxws-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-frontend-simple-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-rs-client-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-security-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-security-saml-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-transports-http-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-ws-addr-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-wsdl-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-ws-policy-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-ws-security-3.4.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/woodstox-core-6.4.0.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-core-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-bindings-soap-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-bindings-xml-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-databinding-jaxb-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-features-clustering-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-frontend-jaxrs-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-frontend-jaxws-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-frontend-simple-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-rs-client-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-security-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-security-saml-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-transports-http-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-ws-addr-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-wsdl-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-ws-policy-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-ws-security-3.5.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/jakarta.activation-1.2.2.jar"/>
<classpathentry exported="true" kind="lib" path="lib/jakarta.activation-api-1.2.2.jar"/>
<classpathentry exported="true" kind="lib" path="lib/jakarta.annotation-api-1.3.5.jar"/>

View File

@@ -5,22 +5,22 @@ Bundle-SymbolicName: org.talend.libraries.apache.cxf;singleton:=true
Bundle-Version: 7.3.1.qualifier
Bundle-ActivationPolicy: lazy
Bundle-ClassPath: .,
lib/cxf-core-3.4.7.jar,
lib/cxf-rt-bindings-soap-3.4.7.jar,
lib/cxf-rt-bindings-xml-3.4.7.jar,
lib/cxf-rt-databinding-jaxb-3.4.7.jar,
lib/cxf-rt-features-clustering-3.4.7.jar,
lib/cxf-rt-frontend-jaxrs-3.4.7.jar,
lib/cxf-rt-frontend-jaxws-3.4.7.jar,
lib/cxf-rt-frontend-simple-3.4.7.jar,
lib/cxf-rt-rs-client-3.4.7.jar,
lib/cxf-rt-security-3.4.7.jar,
lib/cxf-rt-security-saml-3.4.7.jar,
lib/cxf-rt-transports-http-3.4.7.jar,
lib/cxf-rt-ws-addr-3.4.7.jar,
lib/cxf-rt-wsdl-3.4.7.jar,
lib/cxf-rt-ws-security-3.4.7.jar,
lib/cxf-rt-ws-policy-3.4.7.jar,
lib/cxf-core-3.5.5.jar,
lib/cxf-rt-bindings-soap-3.5.5.jar,
lib/cxf-rt-bindings-xml-3.5.5.jar,
lib/cxf-rt-databinding-jaxb-3.5.5.jar,
lib/cxf-rt-features-clustering-3.5.5.jar,
lib/cxf-rt-frontend-jaxrs-3.5.5.jar,
lib/cxf-rt-frontend-jaxws-3.5.5.jar,
lib/cxf-rt-frontend-simple-3.5.5.jar,
lib/cxf-rt-rs-client-3.5.5.jar,
lib/cxf-rt-security-3.5.5.jar,
lib/cxf-rt-security-saml-3.5.5.jar,
lib/cxf-rt-transports-http-3.5.5.jar,
lib/cxf-rt-ws-addr-3.5.5.jar,
lib/cxf-rt-wsdl-3.5.5.jar,
lib/cxf-rt-ws-security-3.5.5.jar,
lib/cxf-rt-ws-policy-3.5.5.jar,
lib/istack-commons-runtime-3.0.12.jar,
lib/jakarta.activation-1.2.2.jar,
lib/jakarta.activation-api-1.2.2.jar,
@@ -35,7 +35,7 @@ Bundle-ClassPath: .,
lib/stax2-api-4.2.1.jar,
lib/txw2-2.3.4.jar,
lib/xmlschema-core-2.2.5.jar,
lib/woodstox-core-6.2.6.jar,
lib/woodstox-core-6.4.0.jar,
lib/wsdl4j-1.6.3.jar
Export-Package: javax.jws,
javax.ws.rs,

View File

@@ -11,7 +11,7 @@
<packaging>eclipse-plugin</packaging>
<properties>
<cxf.version>3.4.7</cxf.version>
<cxf.version>3.5.5</cxf.version>
</properties>
<repositories>
@@ -170,7 +170,7 @@
<dependency>
<groupId>com.fasterxml.woodstox</groupId>
<artifactId>woodstox-core</artifactId>
<version>6.2.6</version>
<version>6.4.0</version>
</dependency>
<dependency>
<groupId>org.apache.ws.xmlschema</groupId>

View File

@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry exported="true" kind="lib" path="lib/lucene-core-3.0.3.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-core-8.11.2.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="output" path="class"/>

View File

@@ -4,22 +4,38 @@ Bundle-Name: Lucene plug-in
Bundle-SymbolicName: org.talend.libraries.apache.lucene
Bundle-Version: 7.3.1.qualifier
Bundle-Vendor: .Talend SA.
Bundle-ClassPath: lib/lucene-core-3.0.3.jar,
Bundle-ClassPath: lib/lucene-core-8.11.2.jar,
.
Export-Package: org.apache.lucene,
org.apache.lucene.analysis,
org.apache.lucene.analysis.standard,
org.apache.lucene.analysis.tokenattributes,
org.apache.lucene.codecs,
org.apache.lucene.codecs.blocktree,
org.apache.lucene.codecs.compressing,
org.apache.lucene.codecs.lucene50,
org.apache.lucene.codecs.lucene60,
org.apache.lucene.codecs.lucene80,
org.apache.lucene.codecs.lucene84,
org.apache.lucene.codecs.lucene86,
org.apache.lucene.codecs.lucene87,
org.apache.lucene.codecs.perfield,
org.apache.lucene.document,
org.apache.lucene.geo,
org.apache.lucene.index,
org.apache.lucene.messages,
org.apache.lucene.queryParser,
org.apache.lucene.search,
org.apache.lucene.search.function,
org.apache.lucene.search.payloads,
org.apache.lucene.search.comparators,
org.apache.lucene.search.similarities,
org.apache.lucene.search.spans,
org.apache.lucene.store,
org.apache.lucene.util,
org.apache.lucene.util.cache
org.apache.lucene.util.automaton,
org.apache.lucene.util.bkd,
org.apache.lucene.util.compress,
org.apache.lucene.util.fst,
org.apache.lucene.util.graph,
org.apache.lucene.util.hppc,
org.apache.lucene.util.mutable,
org.apache.lucene.util.packed
Bundle-ActivationPolicy: lazy
Eclipse-BundleShape: dir

View File

@@ -1,5 +1,4 @@
output.. = class/
bin.includes = META-INF/,\
.,\
lib/lucene-core-2.9.3.jar,\
lib/lucene-core-3.0.3.jar
lib/lucene-core-8.11.2.jar

View File

@@ -9,4 +9,36 @@
</parent>
<artifactId>org.talend.libraries.apache.lucene</artifactId>
<packaging>eclipse-plugin</packaging>
<properties>
<lucene.version>8.11.2</lucene.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
<version>${lucene.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>copy-dependencies</id>
<phase>generate-sources</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<outputDirectory>${project.basedir}/lib</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -1,9 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry exported="true" kind="lib" path="lib/lucene-analyzers-common-8.3.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-core-8.3.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-queries-8.3.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-queryparser-8.3.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-analyzers-common-8.11.2.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-backward-codecs-8.11.2.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-core-8.11.2.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-queries-8.11.2.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-queryparser-8.11.2.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="output" path="bin"/>

View File

@@ -5,10 +5,11 @@ Bundle-SymbolicName: org.talend.libraries.apache.lucene8
Bundle-Version: 7.3.1.qualifier
Bundle-Vendor: .Talend SA.
Bundle-ActivationPolicy: lazy
Bundle-ClassPath: lib/lucene-analyzers-common-8.3.1.jar,
lib/lucene-core-8.3.1.jar,
lib/lucene-queries-8.3.1.jar,
lib/lucene-queryparser-8.3.1.jar
Bundle-ClassPath: lib/lucene-analyzers-common-8.11.2.jar,
lib/lucene-backward-codecs-8.11.2.jar,
lib/lucene-core-8.11.2.jar,
lib/lucene-queries-8.11.2.jar,
lib/lucene-queryparser-8.11.2.jar
Export-Package: org.apache.lucene,
org.apache.lucene.analysis,
org.apache.lucene.analysis.ar,
@@ -78,6 +79,8 @@ Export-Package: org.apache.lucene,
org.apache.lucene.codecs.lucene60,
org.apache.lucene.codecs.lucene70,
org.apache.lucene.codecs.lucene80,
org.apache.lucene.codecs.lucene84,
org.apache.lucene.codecs.lucene86,
org.apache.lucene.codecs.perfield,
org.apache.lucene.collation,
org.apache.lucene.collation.tokenattributes,

View File

@@ -1,7 +1,8 @@
bin.includes = META-INF/,\
.,\
lib/lucene-analyzers-common-8.3.1.jar,\
lib/lucene-core-8.3.1.jar,\
lib/lucene-queries-8.3.1.jar,\
lib/lucene-queryparser-8.3.1.jar
lib/lucene-analyzers-common-8.11.2.jar,\
lib/lucene-backward-codecs-8.11.2.jar,\
lib/lucene-core-8.11.2.jar,\
lib/lucene-queries-8.11.2.jar,\
lib/lucene-queryparser-8.11.2.jar

View File

@@ -9,4 +9,57 @@
</parent>
<artifactId>org.talend.libraries.apache.lucene8</artifactId>
<packaging>eclipse-plugin</packaging>
<properties>
<lucene.version>8.11.2</lucene.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-common</artifactId>
<version>${lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-backward-codecs</artifactId>
<version>${lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
<version>${lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queries</artifactId>
<version>${lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId>
<version>${lucene.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>copy-dependencies</id>
<phase>generate-sources</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<outputDirectory>${project.basedir}/lib</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -35,7 +35,7 @@
<!-- same as xercesImpl.jar-->
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
<version>2.12.0</version>
<version>2.12.2</version>
</artifactItem>
<artifactItem>
<groupId>org.apache.ws.xmlschema</groupId>

View File

@@ -6,9 +6,42 @@ COPYRIGHTS AND LICENSES
ORIGINAL LICENSE (a.k.a. "hypersonic_lic.txt")
For content, code, and products originally developed by Thomas Mueller and the Hypersonic SQL Group:
For work developed by the HSQL Development Group:
Copyright (c) 1995-2000 by the Hypersonic SQL Group.
Copyright (c) 2001-2022, The HSQL Development Group
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
Neither the name of the HSQL Development Group nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
For work originally developed by the Hypersonic SQL Group:
Copyright (c) 1995-2000, The Hypersonic SQL Group.
All rights reserved.
Redistribution and use in source and binary forms, with or without
@@ -37,12 +70,12 @@ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
This software consists of voluntary contributions made by many individuals on behalf of the
Hypersonic SQL Group.
This software consists of voluntary contributions made by many individuals
on behalf of the Hypersonic SQL Group.
For work added by the HSQL Development Group (a.k.a. hsqldb_lic.txt):
Copyright (c) 2001-2005, The HSQL Development Group
Copyright (c) 2001-2022, The HSQL Development Group
All rights reserved.
Redistribution and use in source and binary forms, with or without

29
main/plugins/org.talend.libraries.jdbc.hsql/pom.xml Normal file → Executable file
View File

@@ -9,4 +9,33 @@
</parent>
<artifactId>org.talend.libraries.jdbc.hsql</artifactId>
<packaging>eclipse-plugin</packaging>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>copy</id>
<phase>generate-sources</phase>
<goals>
<goal>copy</goal>
</goals>
<configuration>
<artifactItems>
<artifactItem>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<version>2.7.1</version>
<classifier>jdk8</classifier>
<outputDirectory>${project.basedir}/lib</outputDirectory>
<destFileName>hsqldb.jar</destFileName>
</artifactItem>
</artifactItems>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry exported="true" kind="lib" path="lib/advancedPersistentLookupLib-1.2.jar"/>
<classpathentry exported="true" kind="lib" path="lib/advancedPersistentLookupLib-1.4.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="output" path="bin"/>

View File

@@ -3,8 +3,8 @@ Bundle-ManifestVersion: 2
Bundle-Name: org.talend.libraries.persist.lookup
Bundle-SymbolicName: org.talend.libraries.persist.lookup
Bundle-Version: 7.3.1.qualifier
Bundle-ClassPath: lib/advancedPersistentLookupLib-1.2.jar,
.
Bundle-ClassPath: .,
lib/advancedPersistentLookupLib-1.4.jar
Export-Package: org.talend.commons.utils.data.map,
org.talend.commons.utils.time,
org.talend.core.model.process,

View File

@@ -2,4 +2,6 @@ source.. = src/
output.. = bin/
bin.includes = META-INF/,\
.,\
lib/advancedPersistentLookupLib-1.4.jar,\
lib/advancedPersistentLookupLib-1.3.jar,\
lib/advancedPersistentLookupLib-1.2.jar

View File

@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<jardesc>
<jar path="D:/studio_code/tcommon-studio-se/main/plugins/org.talend.libraries.persist.lookup/lib/advancedPersistentLookupLib-1.2.jar"/>
<jar path="D:/studio_code/tcommon-studio-se/main/plugins/org.talend.libraries.persist.lookup/lib/advancedPersistentLookupLib-1.4.jar"/>
<options buildIfNeeded="true" compress="true" descriptionLocation="/org.talend.libraries.persist.lookup/export_advancedPersistentLookupLib.jardesc" exportErrors="true" exportWarnings="true" includeDirectoryEntries="false" overwrite="true" saveDescription="false" storeRefactorings="false" useSourceFolders="false"/>
<storedRefactorings deprecationInfo="true" structuralOnly="false"/>
<selectedProjects/>

View File

@@ -2,6 +2,7 @@
<classpath>
<classpathentry kind="src" path="src/main/java"/>
<classpathentry kind="src" path="resources/java"/>
<classpathentry exported="true" kind="lib" path="lib/jboss-marshalling-2.0.12.Final.jar"/>
<classpathentry kind="lib" path="lib/crypto-utils.jar"/>
<classpathentry kind="lib" path="lib/slf4j-api-1.7.25.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>

View File

@@ -28,7 +28,8 @@ Eclipse-LazyStart: true
Bundle-ClassPath: .,
lib/crypto-utils.jar,
lib/slf4j-api-1.7.25.jar
Export-Package: org.talend.librariesmanager.emf.librariesindex,
Export-Package: org.jboss.marshalling,
org.talend.librariesmanager.emf.librariesindex,
org.talend.librariesmanager.librarydata,
org.talend.librariesmanager.maven,
org.talend.librariesmanager.model,
@@ -40,3 +41,5 @@ Export-Package: org.talend.librariesmanager.emf.librariesindex,
Import-Package: org.eclipse.emf.ecore.xmi.impl,
org.talend.osgi.hook.notification
Eclipse-BundleShape: dir
Bundle-ClassPath: lib/jboss-marshalling-2.0.12.Final.jar,
.

View File

@@ -69,6 +69,12 @@
name="crypto-utils-0.31.12.jar">
</library>
</systemRoutine>
<systemRoutine
name="IPersistableLookupRow">
<library
name="mvn:org.jboss.marshalling/jboss-marshalling/2.0.12.Final">
</library>
</systemRoutine>
</extension>
<extension
point="org.talend.core.runtime.artifact_handler">

View File

@@ -51,6 +51,11 @@
<version>1.7.25</version>
<overWrite>true</overWrite>
</artifactItem>
<artifactItem>
<groupId>org.jboss.marshalling</groupId>
<artifactId>jboss-marshalling</artifactId>
<version>2.0.12.Final</version>
</artifactItem>
</artifactItems>
</configuration>
</execution>

View File

@@ -467,13 +467,13 @@ public class TalendDate {
*
* {Category} TalendDate
*
* {param} String("") string : date represent in string
* {param} String("2008/11/24 12:15:25") string : date represent in string
*
* {param} String("yyyy-MM-dd") pattern : date pattern
* {param} String("yyyy/MM/dd HH:mm:ss") pattern : date pattern
*
* {param} int(addValue) nb : the added value
* {param} int(5) nb : the added value
*
* {param} date("MM") dateType : the part to add
* {param} String("dd") dateType : the part to add
*
* {examples}
*
@@ -1216,8 +1216,19 @@ public class TalendDate {
}
/**
* format date to mssql 2008 type datetimeoffset ISO 8601 string with local time zone format string : yyyy-MM-dd
* HH:mm:ss.SSSXXX(JDK7 support it)
* Format date to mssql 2008 type datetimeoffset ISO 8601 string with local time zone format string : yyyy-MM-dd
* HH:mm:ss.SSSXXX (JDK7 support it)
*
* @param date the time value to be formatted into a time string.
* @return the formatted time string.
*
* {talendTypes} String
*
* {Category} TalendDate
*
* {param} date(new Date()) date : the time value to be formatted into a time string
*
* {example} formatDatetimeoffset(new Date()) #
*/
public static String formatDatetimeoffset(Date date) {
String dateString = formatDate("yyyy-MM-dd HH:mm:ss.SSSZ", date);// keep the max precision in java
@@ -1346,14 +1357,28 @@ public class TalendDate {
}
/**
*
* Convert a formatted string to date
*
* @param string Must be a string datatype. Passes the values that you want to convert.
* @param format Enter a valid TO_DATE format string. The format string must match the parts of the string argument
* default formate is "MM/DD/yyyy HH:mm:ss.sss" if not specified.
* default format is "MM/DD/yyyy HH:mm:ss.sss" if not specified.
*
* @return Date
* @throws ParseException
* {example} TO_DATE("1464576463231", "J") #Mon May 30 10:47:43 CST 2016
* {example} TO_DATE("2015-11-21 13:23:45","yyyy-MM-dd HH:mm:ss") #Sat Nov 21 13:23:45 CST 2015
*
* {talendTypes} Date
*
* {Category} TalendDate
*
* {param} String("2015-11-21 13:23:45") string : string Must be a string datatype. Passes the values that you want
* to convert.
*
* {param} String("yyyy-MM-dd HH:mm:ss") format : Enter a valid TO_DATE format string. The format string must match
* the parts of the string argument default format is "MM/DD/yyyy HH:mm:ss.sss" if not specified.
*
*
* {example} TO_DATE("1464576463231", "J") #Mon May 30 10:47:43 CST 2016 {example} TO_DATE("2015-11-21
* 13:23:45","yyyy-MM-dd HH:mm:ss") #Sat Nov 21 13:23:45 CST 2015
*
*/
public static Date TO_DATE(String string, String format) throws ParseException {
@@ -1374,6 +1399,24 @@ public class TalendDate {
}
/**
* Convert a formatted string to date with default format as ""MM/DD/yyyy HH:mm:ss.sss"
*
* @param string Must be a string datatype. Passes the values that you want to convert.
* @return Date
* @throws ParseException
*
* {talendTypes} Date
*
* {Category} TalendDate
*
* {param} String("11/21/2015 13:23:45.111") string : string Must be a string datatype. Passes the values that you
* want to convert.
*
* {example} TO_DATE("11/21/2015 13:23:45.111") #Sat Nov 21 13:23:45.111 CST 2015
*
*/
public static Date TO_DATE(String string) throws ParseException {
return TO_DATE(string, null);
}
@@ -1410,13 +1453,25 @@ public class TalendDate {
}
/**
*
* @param date Passes the values you want to change
* Add values to the specified portion of the date
*
* @param date Passes the values you want to change
* @param format A format string specifying the portion of the date value you want to change.For example, 'mm'.
* @param amount An integer value specifying the amount of years, months, days, hours,
* and so on by which you want to change the date value.
* @return Date NULL if a null value is passed as an argument to the function.
* @param amount An integer value specifying the amount of years, months, days, hours, and so on by which you want
* to change the date value.
* @return Date NULL if a null value is passed as an argument to the function.
* @throws ParseException
*
* {talendTypes} Date
*
* {Category} TalendDate
*
* {param} Date(new Date()) date :
*
* {param} String("HH") format :
*
* {param} int(2) amount :
*
* {example} ADD_TO_DATE(new Date(1464576463231l), "HH",2) #Mon May 30 12:47:43 CST 2016
*/
public static Date ADD_TO_DATE(Date date, String format, int amount) throws ParseException{
@@ -1485,10 +1540,21 @@ public class TalendDate {
}
/**
* Convert a Date to a formatted character string.
*
* @param date Date/Time datatype. Passes the date values you want to convert to character strings.
* @param format Enter a valid TO_CHAR format string. The format string defines the format of the return value,
* @return String. NULL if a value passed to the function is NULL.
* @param date the date value you want to convert to character strings.
* @param format the format of the return value,
* @return String. NULL if a value passed to the function is NULL.
*
* {talendTypes} String
*
* {Category} TalendDate
*
* {param} Date(new Date()) date : the date value you want to convert to character strings.
*
* {param} String("MM/DD/YYYY HH24:MI:SS") format : the format of the return value,
*
* {example} TO_CHAR(new Date(),"MM/DD/YYYY HH24:MI:SS") #
*/
public static String TO_CHAR(Date date, String format) {

View File

@@ -1,6 +1,6 @@
// ============================================================================
//
// Copyright (C) 2006-2019 Talend Inc. - www.talend.com
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
@@ -14,7 +14,6 @@ package routines.system;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.HashMap;
import java.util.List;
import org.dom4j.Element;
@@ -110,26 +109,31 @@ public class GetJarsToRegister {
private String addLibsPath(String line, java.util.Map<String, String> crcMap) {
for (java.util.Map.Entry<String, String> entry : crcMap.entrySet()) {
line = adaptLibPaths(line, entry);
if (new java.io.File(line).exists()) {
break;
}
}
return line;
}
private String adaptLibPaths(String line, java.util.Map.Entry<String, String> entry) {
line = line.replace("\\", "/");
String jarName = entry.getValue();
String crc = entry.getKey();
String libStringFinder = "../lib/" + jarName;
String libStringFinder2 = "./" + jarName; // for the job jar itself.
String replacement = "../../../cache/lib/" + crc + "/" + jarName;
if (line.contains(libStringFinder)) {
line = line.replace(libStringFinder, "../../../cache/lib/" + crc + "/" + jarName);
line = line.replace(libStringFinder, replacement);
} else if (line.toLowerCase().contains(libStringFinder2)) {
line = line.toLowerCase().replace(libStringFinder2, "../../../cache/lib/" + crc + "/" + jarName);
} else if (line.toLowerCase().equals(jarName)) {
line = "../../../cache/lib/" + crc + "/" + jarName;
line = line.toLowerCase().replace(libStringFinder2, replacement);
} else if (line.equalsIgnoreCase(jarName)) {
line = replacement;
} else if (line.contains(":$ROOT_PATH/" + jarName + ":")) {
line = line.replace(":$ROOT_PATH/" + jarName + ":", ":$ROOT_PATH/../../../cache/lib/" + crc + "/" + jarName + ":");
line = line.replace(":$ROOT_PATH/" + jarName + ":", ":$ROOT_PATH/" + replacement + ":");
} else if (line.contains(";" + jarName + ";")) {
line = line.replace(";" + jarName + ";", ";../../../cache/lib/" + crc + "/" + jarName + ";");
line = line.replace(";" + jarName + ";", ";" + replacement + ";");
}
return line;
}

View File

@@ -5,6 +5,9 @@ import java.io.DataOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import org.jboss.marshalling.Marshaller;
import org.jboss.marshalling.Unmarshaller;
public interface IPersistableLookupRow<R> {
public void writeKeysData(ObjectOutputStream out);
@@ -19,4 +22,25 @@ public interface IPersistableLookupRow<R> {
public void copyKeysDataTo(R other);
default public void writeKeysData(Marshaller marshaller){
//sub-class need to override this method
throw new UnsupportedOperationException("Method need to be override");
}
default public void readKeysData(Unmarshaller in){
throw new UnsupportedOperationException("Method need to be override");
}
default public void writeValuesData(DataOutputStream dataOut, Marshaller objectOut){
throw new UnsupportedOperationException("Method need to be override");
}
default public void readValuesData(DataInputStream dataIn, Unmarshaller objectIn){
throw new UnsupportedOperationException("Method need to be override");
}
default public boolean supportMarshaller(){
//Override this method to return true after implement the Jboss methods above
return false;
}
}

View File

@@ -3,10 +3,28 @@ package routines.system;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import org.jboss.marshalling.Marshaller;
import org.jboss.marshalling.Unmarshaller;
public interface IPersistableRow<R> {
public void writeData(ObjectOutputStream out);
public void readData(ObjectInputStream in);
default public void writeData(Marshaller marshaller){
//sub-class need to override this method
throw new UnsupportedOperationException("Method need to be override");
}
default public void readData(Unmarshaller in){
throw new UnsupportedOperationException("Method need to be override");
}
default public boolean supportJboss(){
//Override this method to return true after implement the Jboss methods above
return false;
}
}

View File

@@ -520,9 +520,13 @@ public class ResumeUtil {
private String lineSeparator = System.getProperty("line.separator");
private int capibility = 2 << 22; //8M
private final int capibility = 2 << 22; //8M
private final int FLUSH_FACTOR = 6 *1024 *1024; //6M
private final int SUBSTRING_SIZE = 2 << 20; //2M
private int FLUSH_FACTOR = 6 *1024 *1024; //6M
public SimpleCsvWriter(FileChannel channel) {
@@ -553,6 +557,16 @@ public class ResumeUtil {
content = replace(content, "" + TextQualifier, "" + TextQualifier + TextQualifier);
}
if (content.length() > SUBSTRING_SIZE) { //2M
int index = 0;
for (; content.length() - index > SUBSTRING_SIZE; index += SUBSTRING_SIZE) {
flush(true);
final String substring = content.substring(index, index + SUBSTRING_SIZE);
buf.put(substring.getBytes());
}
content = content.substring(index);
}
byte[] contentByte = content.getBytes();
if(contentByte.length > capibility - buf.position()) {
flush(true);

View File

@@ -1319,11 +1319,81 @@ public class LocalLibraryManager implements ILibraryManagerService, IChangedLibr
saveMavenIndex(mavenURIMap, monitorWrap);
savePlatfromURLIndex(platformURLMap, monitorWrap);
if (service != null) {
deployLibsFromCustomComponents(service, platformURLMap);
}
return mavenURIMap;
}
public void deployLibsFromCustomComponents(File componentFolder, List<ModuleNeeded> modulesNeeded) {
if (modulesNeeded == null || modulesNeeded.isEmpty()) {
return;
}
Map<File, Set<MavenArtifact>> needToDeploy = new HashMap<File, Set<MavenArtifact>>();
modulesNeeded.forEach(module -> {
if (module != null) {
boolean needDeploy = false;
String mvnUri = module.getMavenUri();
String jarPathFromMaven = getJarPathFromMaven(StringUtils.isNotBlank(mvnUri) ? mvnUri : module.getModuleName());
if (StringUtils.isBlank(jarPathFromMaven)) {
needDeploy = true;
} else {
File jarFromMaven = new File(jarPathFromMaven);
if (!jarFromMaven.exists()) {
needDeploy = true;
}
}
if (needDeploy) {
File deployFile = getDeployJarFileByModule(componentFolder, module);
if (deployFile != null) {
install(deployFile, mvnUri, false, true, null);
if (needToDeploy.get(deployFile) == null) {
needToDeploy.put(deployFile, new HashSet<MavenArtifact>());
}
if (StringUtils.isNotBlank(mvnUri)) {
MavenArtifact mavenArtifact = MavenUrlHelper.parseMvnUrl(mvnUri);
needToDeploy.get(deployFile).add(mavenArtifact);
} else {
Map<String, String> sourceAndMavenUri = new HashMap<>();
guessMavenRUIFromIndex(deployFile, true, sourceAndMavenUri);
Set<MavenArtifact> MavenArtifactSet = new HashSet<MavenArtifact>();
sourceAndMavenUri.keySet().forEach(mavenUri -> {
if (StringUtils.isNotBlank(mvnUri)) {
MavenArtifactSet.add(MavenUrlHelper.parseMvnUrl(mavenUri));
}
});
needToDeploy.get(deployFile).addAll(MavenArtifactSet);
}
}
}
}
});
if (!needToDeploy.isEmpty()) {
ShareComponentsLibsJob shareJob = new ShareComponentsLibsJob(
Messages.getString("LocalLibraryManager.shareLibsForCustomponents"), needToDeploy, deployer);
shareJob.schedule();
}
}
private File getDeployJarFileByModule(File componentFolder, ModuleNeeded module) {
String mvnUri = module.getMavenUri();
if (StringUtils.isNotBlank(mvnUri)) {
MavenArtifact mavenArtifact = MavenUrlHelper.parseMvnUrl(mvnUri);
String fileName = mavenArtifact.getFileName();
File jarFile = new File(componentFolder, fileName);
if (jarFile.exists()) {
return jarFile;
}
}
// try module name
File jarFile = new File(componentFolder, module.getModuleName());
if (jarFile.exists()) {
return jarFile;
}
return null;
}
/**
*
@@ -1382,86 +1452,6 @@ public class LocalLibraryManager implements ILibraryManagerService, IChangedLibr
return false;
}
private void deployLibsFromCustomComponents(IComponentsService service, Map<String, String> platformURLMap) {
boolean deployToRemote = true;
if (!LibrariesManagerUtils.shareLibsAtStartup()) {
log.info("Skip deploying libs from custom components");
deployToRemote = false;
}
Map<File, Set<MavenArtifact>> needToDeploy = new HashMap<File, Set<MavenArtifact>>();
List<ComponentProviderInfo> componentsFolders = service.getComponentsFactory().getComponentsProvidersInfo();
for (ComponentProviderInfo providerInfo : componentsFolders) {
String id = providerInfo.getId();
try {
File file = new File(providerInfo.getLocation());
if (isExtComponentProvider(id)) {
if (file.isDirectory()) {
List<File> jarFiles = FilesUtils.getJarFilesFromFolder(file, null);
if (jarFiles.size() > 0) {
for (File jarFile : jarFiles) {
String name = jarFile.getName();
if (!canDeployFromCustomComponentFolder(name) || platformURLMap.get(name) != null) {
continue;
}
collectLibModules(jarFile, needToDeploy);
}
}
} else {
if (!canDeployFromCustomComponentFolder(file.getName()) || platformURLMap.get(file.getName()) != null) {
continue;
}
collectLibModules(file, needToDeploy);
}
}
} catch (Exception e) {
ExceptionHandler.process(e);
continue;
}
}
// first install them locally
needToDeploy.forEach((k, v) -> {
try {
// install as release version if can't find mvn url from index
install(k, null, false, true);
} catch (Exception e) {
ExceptionHandler.process(e);
}
});
if (!deployToRemote) {
return;
}
ShareComponentsLibsJob shareJob = new ShareComponentsLibsJob(
Messages.getString("LocalLibraryManager.shareLibsForCustomponents"), needToDeploy, deployer);
shareJob.schedule();
}
private void collectLibModules(File jarFile, Map<File, Set<MavenArtifact>> needToDeploy) {
Map<String,String> mavenUris = new HashMap<String,String>();
guessMavenRUIFromIndex(jarFile, true, mavenUris);
Set<MavenArtifact> artifacts = new HashSet<MavenArtifact>();
for(String uri: mavenUris.keySet()) {
MavenArtifact art = MavenUrlHelper.parseMvnUrl(uri);
if(art!=null) {
artifacts.add(art);
}
}
needToDeploy.put(jarFile, artifacts);
}
private boolean canDeployFromCustomComponentFolder(String fileName) {
if (isSystemCacheFile(fileName) || isComponentDefinitionFileType(fileName)) {
return false;
}
return true;
}
private void warnDuplicated(List<ModuleNeeded> modules, Set<String> duplicates, String type) {
for (String lib : duplicates) {
Set<String> components = new HashSet<>();

View File

@@ -893,6 +893,7 @@ public final class DBConnectionContextUtils {
managerConnection.setValue(0, dbType, urlConnection, server, username, password, sidOrDatabase, port, filePath,
datasource, schemaOracle, additionParam, driverClassName, driverJarPath, dbVersionString);
managerConnection.setDbRootPath(dbRootPath);
managerConnection.setSupportNLS(dbConn.isSupportNLS());
return urlConnection;
}
@@ -1058,6 +1059,12 @@ public final class DBConnectionContextUtils {
cloneConn.setSQLMode(true);
}
if(dbConn.isSetSupportNLS()) {
cloneConn.setSupportNLS(dbConn.isSupportNLS());
} else {
cloneConn.setSupportNLS(false);
}
// cloneConn.setProperties(dbConn.getProperties());
// cloneConn.setCdcConns(dbConn.getCdcConns());
// cloneConn.setQueries(dbConn.getQueries());

View File

@@ -166,7 +166,8 @@ public class ExtendedNodeConnectionContextUtils {
KnoxUrl,
KnoxUsername,
KnoxPassword,
KnoxDirectory
KnoxDirectory,
KnoxTimeout
}
static List<IContextParameter> getContextVariables(final String prefixName, Connection conn, Set<IConnParamName> paramSet) {

View File

@@ -132,6 +132,22 @@
required="true"
uripath="platform:/plugin/org.talend.libraries.apache.common/lib/commons-lang-2.4.jar">
</libraryNeeded>
<libraryNeeded
context="plugin:org.talend.libraries.jdbc.oracle"
language="java"
message="Needed for Oracle jdbc plugin National Language Support (NLS)."
mvn_uri="mvn:com.oracle.database.nls/orai18n/19.3.0.0/jar"
name="orai18n-19.3.0.0.jar"
required="true">
</libraryNeeded>
<libraryNeeded
context="plugin:org.talend.metadata.managment"
language="java"
message="Needed for plugin org.talend.metadata.managment"
name="hsqldb.jar" mvn_uri="mvn:org.hsqldb/hsqldb/2.7.1"
required="true"
uripath="platform:/plugin/org.talend.libraries.jdbc.hsql/lib/hsqldb.jar">
</libraryNeeded>
</extension>
<extension
point="org.talend.core.migrationTask">

View File

@@ -19,8 +19,6 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import metadata.managment.i18n.Messages;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.talend.cwm.helper.ColumnSetHelper;
@@ -29,6 +27,7 @@ import org.talend.metadata.managment.utils.MetadataConnectionUtils;
import org.talend.utils.sql.metadata.constants.GetTable;
import org.talend.utils.sql.metadata.constants.TableType;
import metadata.managment.i18n.Messages;
import orgomg.cwm.resource.relational.NamedColumnSet;
/**
@@ -177,9 +176,9 @@ public abstract class AbstractTableBuilder<T extends NamedColumnSet> extends Cwm
String tableComment = tablesSet.getString(GetTable.REMARKS.name());
if (StringUtils.isBlank(tableComment)) {
String dbProductName = getConnectionMetadata(connection).getDatabaseProductName();
String selectRemarkOnTable = MetadataConnectionUtils.getCommonQueryStr(dbProductName, tableName);
String selectRemarkOnTable = MetadataConnectionUtils.getCommonQueryStr(dbProductName);
if (selectRemarkOnTable != null) {
tableComment = executeGetCommentStatement(selectRemarkOnTable);
tableComment = executeGetCommentStatement(selectRemarkOnTable, tableName);
}
}
return tableComment;

View File

@@ -14,14 +14,14 @@ package org.talend.core.model.metadata.builder.database;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import metadata.managment.i18n.Messages;
import org.apache.log4j.Logger;
import metadata.managment.i18n.Messages;
/**
* @author scorreia
*
@@ -73,14 +73,48 @@ abstract class CwmBuilder {
*/
protected String executeGetCommentStatement(String queryStmt) {
String comment = null;
Statement statement = null;
PreparedStatement statement = null;
ResultSet resultSet = null;
try {
statement = connection.createStatement();
statement.execute(queryStmt);
statement = connection.prepareStatement(queryStmt);
resultSet = statement.executeQuery();
if (resultSet != null) {
while (resultSet.next()) {
comment = (String) resultSet.getObject(1);
}
}
} catch (SQLException e) {
// do nothing here
} finally {
// -- release resources
if (resultSet != null) {
try {
resultSet.close();
} catch (SQLException e) {
log.error(e, e);
}
}
if (statement != null) {
try {
statement.close();
} catch (SQLException e) {
log.error(e, e);
}
}
}
return comment;
}
protected String executeGetCommentStatement(String queryStmt, String tableName) {
String comment = null;
PreparedStatement statement = null;
ResultSet resultSet = null;
try {
statement = connection.prepareStatement(queryStmt);
statement.setString(1, tableName);
resultSet = statement.executeQuery();
// get the results
resultSet = statement.getResultSet();
if (resultSet != null) {
while (resultSet.next()) {
comment = (String) resultSet.getObject(1);

View File

@@ -291,6 +291,7 @@ public class ExtractMetaDataFromDataBase {
* DOC cantoine. Method to test DataBaseConnection.
*
* @param dbVersionString
* @param supportNLS
*
* @param String driverClass
* @param String urlString pwd
@@ -299,14 +300,14 @@ public class ExtractMetaDataFromDataBase {
* @return ConnectionStatus : the result of connection(boolean Result, String messageException)
*/
public static ConnectionStatus testConnection(String dbType, String url, String username, String pwd, String schema,
final String driverClassName, final String driverJarPath, String dbVersionString, String additionalParam) {
final String driverClassName, final String driverJarPath, String dbVersionString, String additionalParam, boolean supportNLS) {
return testConnection(dbType, url, username, pwd, schema, driverClassName, driverJarPath, dbVersionString,
additionalParam, null, null);
additionalParam, supportNLS, null, null);
}
public static ConnectionStatus testConnection(String dbType, String url, String username, String pwd, String schema,
final String driverClassName, final String driverJarPath, String dbVersionString, String additionalParam,
StringBuffer retProposedSchema, String sidOrDatabase) {
boolean supportNLS, StringBuffer retProposedSchema, String sidOrDatabase) {
Connection connection = null;
ConnectionStatus connectionStatus = new ConnectionStatus();
connectionStatus.setResult(false);
@@ -315,7 +316,7 @@ public class ExtractMetaDataFromDataBase {
List list = new ArrayList();
list = ExtractMetaDataUtils.getInstance().connect(dbType, url, username, pwd, driverClassName, driverJarPath,
dbVersionString, additionalParam);
dbVersionString, additionalParam, supportNLS);
if (list != null && list.size() > 0) {
for (int i = 0; i < list.size(); i++) {
if (list.get(i) instanceof Connection) {
@@ -498,7 +499,7 @@ public class ExtractMetaDataFromDataBase {
List list = metaData.getConnection(iMetadataConnection.getDbType(), url, iMetadataConnection.getUsername(),
iMetadataConnection.getPassword(), iMetadataConnection.getDatabase(), iMetadataConnection.getSchema(),
iMetadataConnection.getDriverClass(), iMetadataConnection.getDriverJarPath(),
iMetadataConnection.getDbVersionString(), iMetadataConnection.getAdditionalParams());
iMetadataConnection.getDbVersionString(), iMetadataConnection.getAdditionalParams(), iMetadataConnection.isSupportNLS());
Connection conn = null;
DriverShim wapperDriver = null;
@@ -582,7 +583,7 @@ public class ExtractMetaDataFromDataBase {
List list = extractMeta.getConnection(iMetadataConnection.getDbType(), iMetadataConnection.getUrl(),
iMetadataConnection.getUsername(), iMetadataConnection.getPassword(), iMetadataConnection.getDatabase(),
iMetadataConnection.getSchema(), iMetadataConnection.getDriverClass(), iMetadataConnection.getDriverJarPath(),
iMetadataConnection.getDbVersionString(), iMetadataConnection.getAdditionalParams());
iMetadataConnection.getDbVersionString(), iMetadataConnection.getAdditionalParams(), iMetadataConnection.isSupportNLS());
DriverShim wapperDriver = null;
if (list != null && list.size() > 0) {
for (int i = 0; i < list.size(); i++) {

View File

@@ -121,6 +121,8 @@ public class ExtractMetaDataUtils {
private String[] ORACLE_SSL_JARS = new String[] { "oraclepki-12.2.0.1.jar", "osdt_cert-12.2.0.1.jar", //$NON-NLS-1$//$NON-NLS-2$
"osdt_core-12.2.0.1.jar" }; //$NON-NLS-1$
private String ORACLE_NLS_JARS = "orai18n-19.3.0.0.jar";
public static final String SNOWFLAKE = "Snowflake"; //$NON-NLS-1$
@@ -829,6 +831,11 @@ public class ExtractMetaDataUtils {
*/
public List getConnection(String dbType, String url, String username, String pwd, String dataBase, String schemaBase,
final String driverClassName, final String driverJarPath, String dbVersion, String additionalParams) {
return getConnection(dbType, url, username, pwd, dataBase, schemaBase, driverClassName, driverJarPath, dbVersion, additionalParams, false);
}
public List getConnection(String dbType, String url, String username, String pwd, String dataBase, String schemaBase,
final String driverClassName, final String driverJarPath, String dbVersion, String additionalParams, boolean supportNLS) {
boolean isColsed = false;
List conList = new ArrayList();
try {
@@ -846,7 +853,7 @@ public class ExtractMetaDataUtils {
closeConnection(true); // colse before connection.
checkDBConnectionTimeout();
list = connect(dbType, url, username, pwd, driverClassName, driverJarPath, dbVersion, additionalParams);
list = connect(dbType, url, username, pwd, driverClassName, driverJarPath, dbVersion, additionalParams, supportNLS);
if (list != null && list.size() > 0) {
for (int i = 0; i < list.size(); i++) {
if (list.get(i) instanceof Connection) {
@@ -938,7 +945,7 @@ public class ExtractMetaDataUtils {
* @throws Exception
*/
public List connect(String dbType, String url, String username, String pwd, final String driverClassNameArg,
final String driverJarPathArg, String dbVersion, String additionalParams) throws Exception {
final String driverJarPathArg, String dbVersion, String additionalParams, boolean supportNLS) throws Exception {
Connection connection = null;
DriverShim wapperDriver = null;
List conList = new ArrayList();
@@ -953,11 +960,18 @@ public class ExtractMetaDataUtils {
if ((driverJarPathArg == null || driverJarPathArg.equals(""))) { //$NON-NLS-1$
List<String> driverNames = EDatabaseVersion4Drivers.getDrivers(dbType, dbVersion);
if (driverNames != null) {
if(EDatabaseTypeName.ORACLEFORSID.getProduct().equals(EDatabaseTypeName.getTypeFromDbType(dbType).getProduct())) {
if(supportNLS){
driverNames.add(ORACLE_NLS_JARS);
}
}
if (EDatabaseTypeName.ORACLE_CUSTOM.getDisplayName().equals(dbType)
&& StringUtils.isNotEmpty(additionalParams)) {
if (additionalParams.contains(SSLPreferenceConstants.TRUSTSTORE_TYPE)) {
driverNames.addAll(Arrays.asList(ORACLE_SSL_JARS));
}
} else if (SNOWFLAKE.equals(dbType)) { // $NON-NLS-1$
// TDQ-17294 msjian Support of Snowflake for DQ Datamart
driverNames.add(SNOWFLAKE_DRIVER_JAR);
@@ -1285,7 +1299,7 @@ public class ExtractMetaDataUtils {
List list = getConnection(metadataConnection.getDbType(), metadataConnection.getUrl(), metadataConnection.getUsername(),
metadataConnection.getPassword(), metadataConnection.getDatabase(), metadataConnection.getSchema(),
metadataConnection.getDriverClass(), metadataConnection.getDriverJarPath(),
metadataConnection.getDbVersionString(), metadataConnection.getAdditionalParams());
metadataConnection.getDbVersionString(), metadataConnection.getAdditionalParams(), metadataConnection.isSupportNLS());
return list;
}

View File

@@ -17,6 +17,7 @@ import java.nio.charset.Charset;
import java.security.Provider;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -166,6 +167,11 @@ public class JDBCDriverLoader {
info.put("charSet", systemCharset.displayName()); //$NON-NLS-1$
}
}
//TUP-37016:Upgrade hsqldb to 2.7.1
if (dbType.equals(EDatabaseTypeName.ACCESS.getXmlName()) || ConnectionUtils.isHsql(url)) {
System.setProperty("hsqldb.method_class_names", "net.ucanaccess.converters.*");
}
if (additionalParams != null && !"".equals(additionalParams) && dbType.toUpperCase().contains("ORACLE")) {//$NON-NLS-1$//$NON-NLS-2$
if (additionalParams.contains(SSLPreferenceConstants.TRUSTSTORE_TYPE)) {
@@ -198,10 +204,20 @@ public class JDBCDriverLoader {
}
connection = wapperDriver.connect(url, info);
}
try {
ResultSet schemas = connection.getMetaData().getSchemas();
if(schemas.next()) {
schemas.getString(1);
}
} catch (Exception e) {
}
// }
// DriverManager.deregisterDriver(wapperDriver);
// bug 9162
list.add(connection);
list.add(wapperDriver);
return list;
} catch (Throwable e) {

View File

@@ -470,7 +470,7 @@ public class ExtractManager {
metadataConnection.getUsername(), metadataConnection.getPassword(), metadataConnection.getDatabase(),
metadataConnection.getSchema(), metadataConnection.getDriverClass(),
metadataConnection.getDriverJarPath(), metadataConnection.getDbVersionString(),
metadataConnection.getAdditionalParams());
metadataConnection.getAdditionalParams(), metadataConnection.isSupportNLS());
if (list != null && list.size() > 0) {
for (int i = 0; i < list.size(); i++) {
if (list.get(i) instanceof Driver) {
@@ -574,7 +574,7 @@ public class ExtractManager {
metadataConnection.getUsername(), metadataConnection.getPassword(), metadataConnection.getDatabase(),
metadataConnection.getSchema(), metadataConnection.getDriverClass(),
metadataConnection.getDriverJarPath(), metadataConnection.getDbVersionString(),
metadataConnection.getAdditionalParams());
metadataConnection.getAdditionalParams(), metadataConnection.isSupportNLS());
if (list != null && list.size() > 0) {
for (int i = 0; i < list.size(); i++) {
if (list.get(i) instanceof DriverShim) {
@@ -1064,7 +1064,7 @@ public class ExtractManager {
List connList = extractMeta.getConnection(metadataConnection.getDbType(), metadataConnection.getUrl(),
metadataConnection.getUsername(), metadataConnection.getPassword(), metadataConnection.getDatabase(),
metadataConnection.getSchema(), metadataConnection.getDriverClass(), metadataConnection.getDriverJarPath(),
metadataConnection.getDbVersionString(), metadataConnection.getAdditionalParams());
metadataConnection.getDbVersionString(), metadataConnection.getAdditionalParams(), metadataConnection.isSupportNLS());
try {
if (!tableInfoParameters.isUsedName()) {
if (tableInfoParameters.getSqlFiter() != null && !"".equals(tableInfoParameters.getSqlFiter())) { //$NON-NLS-1$

View File

@@ -40,7 +40,7 @@ public enum EHiveWithTezJars {
"api-asn1-api-1.0.0-M20.jar", "api-util-1.0.0-M20.jar", "asm-3.1.jar", "avro-1.7.4.jar",
"commons-beanutils-1.7.0.jar", "commons-beanutils-core-1.8.0.jar", "commons-compress-1.4.1.jar",
"commons-configuration-1.6.jar", "commons-digester-1.8.jar", "commons-net-3.1.jar", "curator-client-2.6.0.jar",
"curator-framework-2.6.0.jar", "curator-recipes-2.6.0.jar", "gson-2.2.4.jar", "guice-3.0.jar",
"curator-framework-2.6.0.jar", "curator-recipes-2.6.0.jar", "gson-2.9.0.jar", "guice-3.0.jar",
"guice-servlet-3.0.jar", "hadoop-auth-2.6.0.2.2.0.0-2041.jar", "hadoop-common-2.6.0.2.2.0.0-2041.jar",
"hadoop-hdfs-2.6.0.2.2.0.0-2041.jar", "hadoop-yarn-api-2.6.0.2.2.0.0-2041.jar",
"hadoop-yarn-client-2.6.0.2.2.0.0-2041.jar", "hadoop-yarn-common-2.6.0.2.2.0.0-2041.jar", "htrace-core-3.0.4.jar",
@@ -56,7 +56,7 @@ public enum EHiveWithTezJars {
"api-asn1-api-1.0.0-M20.jar", "api-util-1.0.0-M20.jar", "asm-3.2.jar", "avro-1.7.5.jar",
"commons-beanutils-1.7.0.jar", "commons-beanutils-core-1.8.0.jar", "commons-compress-1.4.1.jar",
"commons-configuration-1.6.jar", "commons-digester-1.8.jar", "commons-net-3.1.jar", "curator-client-2.7.1.jar",
"curator-framework-2.7.1.jar", "curator-recipes-2.7.1.jar", "gson-2.2.4.jar", "guice-3.0.jar",
"curator-framework-2.7.1.jar", "curator-recipes-2.7.1.jar", "gson-2.9.0.jar", "guice-3.0.jar",
"guice-servlet-3.0.jar", "hadoop-auth-2.7.1.2.3.2.0-2950.jar", "hadoop-common-2.7.1.2.3.2.0-2950.jar",
"hadoop-hdfs-2.7.1.2.3.2.0-2950.jar", "hadoop-yarn-api-2.7.1.2.3.2.0-2950.jar",
"hadoop-yarn-client-2.7.1.2.3.2.0-2950.jar", "hadoop-yarn-common-2.7.1.2.3.2.0-2950.jar",

View File

@@ -83,6 +83,7 @@ import org.talend.metadata.managment.connection.manager.HiveConnectionManager;
import org.talend.metadata.managment.hive.EmbeddedHiveDataBaseMetadata;
import org.talend.metadata.managment.repository.ManagerConnection;
import org.talend.metadata.managment.utils.DatabaseConstant;
import org.talend.metadata.managment.utils.EDataBaseType;
import org.talend.metadata.managment.utils.ManagementTextUtils;
import org.talend.metadata.managment.utils.MetadataConnectionUtils;
import org.talend.utils.sql.ConnectionUtils;
@@ -1038,8 +1039,12 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
// for CDH4 HIVE2 , the table type are MANAGED_TABLE and EXTERNAL_TABLE ......
// tableType = null;
}
Map<String,String> tableComments = null;
if (!isOracle8i) {
tableComments = this.getTableComments(dbJDBCMetadata, catalogName, schemaPattern);
}
ResultSet tables = dbJDBCMetadata.getTables(catalogName, schemaPattern, tablePattern, tableType);
boolean hasRemarksCol = hasRemarksColumn(tables);
while (tables.next()) {
String coloumnName = GetTable.TABLE_SCHEM.name();
if (schemaPattern != null) {
@@ -1064,8 +1069,13 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
// if (!isOracle && !isOracle8i && !isOracleJdbc && tableName.startsWith("/")) { //$NON-NLS-1$
// continue;
// }
if (!isOracle8i) {
tableComment = getTableComment(dbJDBCMetadata, tables, tableName, catalogName, schemaPattern);
if (hasRemarksCol) {
tableComment = getRemarksFromResultSet(tables);
}
if (tableComments != null) {
if (StringUtils.isEmpty(tableComment)) {
tableComment = tableComments.get(tableName);
}
}
MetadataTable metadatatable = null;
if (TableType.VIEW.toString().equals(temptableType) || ETableTypes.VIRTUAL_VIEW.getName().equals(temptableType)) {
@@ -1209,6 +1219,69 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
}
return tableComment;
}
private Map<String, String> getTableComments(DatabaseMetaData dbJDBCMetadata, String catalogName, String schemaPattern) {
Map<String, String> ret = new HashMap<String, String>();
PreparedStatement ps = null;
ResultSet rs = null;
try {
String productName = dbJDBCMetadata.getDatabaseProductName();
if (StringUtils.isEmpty(productName)) {
return ret;
}
productName = productName.replaceAll(" ", "_"); //$NON-NLS-1$ //$NON-NLS-2$
EDataBaseType eDataBaseType = null;
try {
eDataBaseType = EDataBaseType.valueOf(productName);
} catch (Exception e) {
eDataBaseType = EDataBaseType.Microsoft_SQL_Server;
}
String sqlStr = ""; //$NON-NLS-1$
switch (eDataBaseType) {
case Oracle:
sqlStr = "SELECT TABLE_NAME,COMMENTS FROM ALL_TAB_COMMENTS WHERE OWNER=?";
ps = dbJDBCMetadata.getConnection().prepareStatement(sqlStr);
ps.setString(1, schemaPattern.toUpperCase());
break;
case MySQL:
sqlStr = "SELECT TABLE_NAME,TABLE_COMMENT FROM information_schema.TABLES WHERE TABLE_SCHEMA=?";
ps = dbJDBCMetadata.getConnection().prepareStatement(sqlStr);
ps.setString(1, catalogName);
break;
default:
break;
}
if (ps != null) {
rs = ps.executeQuery();
while (rs != null && rs.next()) {
String comment = rs.getString(2);
if (!StringUtils.isEmpty(comment)) {
ret.put(rs.getString(1), comment);
}
}
}
} catch (SQLException e) {
log.error(e, e);
} finally {
if (ps != null) {
try {
ps.close();
} catch (SQLException e) {
CommonExceptionHandler.process(e);
}
}
if (rs != null) {
try {
rs.close();
} catch (SQLException e) {
CommonExceptionHandler.process(e);
}
}
}
return ret;
}
/**
* get the Column Comment especially for oracle type.
@@ -1295,9 +1368,12 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
}
}
}
Map<String, String> tableComments = null;
if (!flag) {
tableComments = this.getTableComments(dbJDBCMetadata, catalogName, schemaPattern);
}
ResultSet tables = dbJDBCMetadata.getTables(catalogName, schemaPattern, tablePattern, tableType);
boolean hasRemarksCol = hasRemarksColumn(tables);
while (tables.next()) {
String tableName = getStringFromResultSet(tables, GetTable.TABLE_NAME.name());
String temptableType = getStringFromResultSet(tables, GetTable.TABLE_TYPE.name());
@@ -1316,8 +1392,13 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
if (tableName == null || tablesToFilter.contains(tableName) || tableName.startsWith("/")) { //$NON-NLS-1$
continue;
}
if (!flag) {
tableComment = getTableComment(dbJDBCMetadata, tables, tableName, catalogName, schemaPattern);
if (hasRemarksCol) {
tableComment = getRemarksFromResultSet(tables);
}
if (tableComments != null) {
if (StringUtils.isEmpty(tableComment)) {
tableComment = tableComments.get(tableName);
}
}
// create table
TdTable table = RelationalFactory.eINSTANCE.createTdTable();
@@ -1367,8 +1448,17 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
}
}
try {
boolean flag = true;
if (pack != null) {
Connection c = ConnectionHelper.getConnection(pack);
flag = MetadataConnectionUtils.isOracle8i(c);
}
Map<String, String> tableComments = null;
if (!flag) {
tableComments = this.getTableComments(dbJDBCMetadata, catalogName, schemaPattern);
}
ResultSet tables = dbJDBCMetadata.getTables(catalogName, schemaPattern, viewPattern, tableType);
boolean hasRemarksCol = hasRemarksColumn(tables);
while (tables.next()) {
String tableName = getStringFromResultSet(tables, GetTable.TABLE_NAME.name());
@@ -1379,14 +1469,14 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
continue;
}
// common
boolean flag = true;
String tableComment = null;
if (pack != null) {
Connection c = ConnectionHelper.getConnection(pack);
flag = MetadataConnectionUtils.isOracle8i(c);
if (hasRemarksCol) {
tableComment = getRemarksFromResultSet(tables);
}
if (!flag) {
tableComment = getTableComment(dbJDBCMetadata, tables, tableName, catalogName, schemaPattern);
if (tableComments != null) {
if (StringUtils.isEmpty(tableComment)) {
tableComment = tableComments.get(tableName);
}
}
// create table
TdView table = RelationalFactory.eINSTANCE.createTdView();
@@ -1425,6 +1515,24 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
return valueOfString;
}
private boolean hasRemarksColumn(ResultSet resultSet) {
try {
if (resultSet == null || resultSet.getMetaData() == null) {
return false;
}
int numOfCols = resultSet.getMetaData().getColumnCount();
for (int i = 1; i < numOfCols + 1; i++) {
String colName = resultSet.getMetaData().getColumnLabel(i);
if (StringUtils.equals(colName, GetColumn.REMARKS.name())) {
return true;
}
}
} catch (SQLException e) {
CommonExceptionHandler.process(e);
}
return false;
}
private String getRemarksFromResultSet(ResultSet resultSet) {
String valueOfString = null;
try {

View File

@@ -90,6 +90,8 @@ public class ManagerConnection {
Integer id = null;
String additionalParams;
private boolean supportNLS;
private String schemaOracle;
@@ -288,7 +290,7 @@ public class ManagerConnection {
}
// test the connection
testConnection = ExtractMetaDataFromDataBase.testConnection(dbTypeString, urlConnectionString, username, password,
schemaName, driverClassName, driverJarPath, dbVersionString, additionalParams, retProposedSchema,
schemaName, driverClassName, driverJarPath, dbVersionString, additionalParams, supportNLS, retProposedSchema,
sidOrDatabase);
isValide = testConnection.getResult();
messageException = testConnection.getMessageException();
@@ -388,7 +390,7 @@ public class ManagerConnection {
metadataConnection.getUrl(), metadataConnection.getUsername(), metadataConnection.getPassword(),
metadataConnection.getSchema(), metadataConnection.getDriverClass(),
metadataConnection.getDriverJarPath(), metadataConnection.getDbVersionString(),
metadataConnection.getAdditionalParams(), retProposedSchema, metadataConnection.getDatabase());
metadataConnection.getAdditionalParams(), metadataConnection.isSupportNLS(), retProposedSchema, metadataConnection.getDatabase());
}
// qli
// record this metadataConnection as old connection.
@@ -475,4 +477,13 @@ public class ManagerConnection {
this.isValide = isValide;
}
/**
* Sets the supportNLS.
* @param supportNLS the supportNLS to set
*/
public void setSupportNLS(boolean supportNLS) {
this.supportNLS = supportNLS;
}
}

View File

@@ -188,7 +188,7 @@ public class MetadataConnectionUtils {
}
list = ExtractMetaDataUtils.getInstance().connect(metadataBean.getDbType(), metadataBean.getUrl(),
metadataBean.getUsername(), metadataBean.getPassword(), metadataBean.getDriverClass(),
metadataBean.getDriverJarPath(), metadataBean.getDbVersionString(), metadataBean.getAdditionalParams());
metadataBean.getDriverJarPath(), metadataBean.getDbVersionString(), metadataBean.getAdditionalParams(), metadataBean.isSupportNLS());
} catch (Exception e) {
rc.setMessage("fail to connect database!"); //$NON-NLS-1$
CommonExceptionHandler.process(e);
@@ -274,6 +274,7 @@ public class MetadataConnectionUtils {
String dataBase = databaseConnection.getSID();
String dbVersionString = databaseConnection.getDbVersionString();
String additionalParams = databaseConnection.getAdditionalParams();
boolean supportNLS = databaseConnection.isSupportNLS();
// MOD qiongli 2011-9-6,TDQ 3317.handle context mode
if (databaseConnection.isContextMode()) {
@@ -313,6 +314,7 @@ public class MetadataConnectionUtils {
metadataConnection.setUsername(userName);
metadataConnection.setPassword(password);
metadataConnection.setUrl(dbUrl);
metadataConnection.setSupportNLS(supportNLS);
// TDQ-12299: transfer the OtherParameters to metadataConnection, because create impala connection use that
// values
@@ -803,6 +805,33 @@ public class MetadataConnectionUtils {
}
public static String getCommonQueryStr(String productName) {
if (productName == null) {
return null;
}
productName = productName.replaceAll(" ", "_"); //$NON-NLS-1$ //$NON-NLS-2$
EDataBaseType eDataBaseType = null;
try {
eDataBaseType = EDataBaseType.valueOf(productName);
} catch (Exception e) {
eDataBaseType = EDataBaseType.Microsoft_SQL_Server;
}
String sqlStr = ""; //$NON-NLS-1$
switch (eDataBaseType) {
case Oracle:
sqlStr = "SELECT COMMENTS FROM USER_TAB_COMMENTS WHERE TABLE_NAME= ? "; //$NON-NLS-1$
break;
case MySQL:
sqlStr = "SELECT TABLE_COMMENT FROM information_schema.TABLES WHERE TABLE_NAME= ? "; //$NON-NLS-1$
break;
default:
sqlStr = null;
}
return sqlStr;
}
/**
* get Comment Query Str.
*
@@ -1312,7 +1341,7 @@ public class MetadataConnectionUtils {
return ExtractMetaDataUtils.getInstance().getConnection(metadataBean.getDbType(), metadataBean.getUrl(),
metadataBean.getUsername(), metadataBean.getPassword(), metadataBean.getDatabase(), metadataBean.getSchema(),
metadataBean.getDriverClass(), metadataBean.getDriverJarPath(), metadataBean.getDbVersionString(),
metadataBean.getAdditionalParams());
metadataBean.getAdditionalParams(), metadataBean.isSupportNLS());
}
/**

View File

@@ -80,6 +80,7 @@ public class DatabaseConnectionItemProvider extends ConnectionItemProvider imple
addCdcTypeModePropertyDescriptor(object);
addSQLModePropertyDescriptor(object);
addUiSchemaPropertyDescriptor(object);
addSupportNLSPropertyDescriptor(object);
}
return itemPropertyDescriptors;
}
@@ -468,6 +469,22 @@ public class DatabaseConnectionItemProvider extends ConnectionItemProvider imple
false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null));
}
/**
* This adds a property descriptor for the Support NLS feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addSupportNLSPropertyDescriptor(Object object) {
itemPropertyDescriptors
.add(createItemPropertyDescriptor(((ComposeableAdapterFactory) adapterFactory).getRootAdapterFactory(),
getResourceLocator(), getString("_UI_DatabaseConnection_supportNLS_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_DatabaseConnection_supportNLS_feature",
"_UI_DatabaseConnection_type"),
ConnectionPackage.Literals.DATABASE_CONNECTION__SUPPORT_NLS, true, false, false,
ItemPropertyDescriptor.BOOLEAN_VALUE_IMAGE, null, null));
}
/**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
@@ -558,6 +575,7 @@ public class DatabaseConnectionItemProvider extends ConnectionItemProvider imple
case ConnectionPackage.DATABASE_CONNECTION__CDC_TYPE_MODE:
case ConnectionPackage.DATABASE_CONNECTION__SQL_MODE:
case ConnectionPackage.DATABASE_CONNECTION__UI_SCHEMA:
case ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true));
return;
case ConnectionPackage.DATABASE_CONNECTION__CDC_CONNS:

View File

@@ -348,6 +348,8 @@
<eStructuralFeatures xsi:type="ecore:EAttribute" name="UiSchema" eType="ecore:EDataType http://www.eclipse.org/emf/2002/Ecore#//EString"/>
<eStructuralFeatures xsi:type="ecore:EReference" name="parameters" upperBound="-1"
eType="#//AdditionalProperties" containment="true"/>
<eStructuralFeatures xsi:type="ecore:EAttribute" name="supportNLS" eType="ecore:EDataType http://www.eclipse.org/emf/2002/Ecore#//EBoolean"
defaultValueLiteral="false" unsettable="true"/>
</eClassifiers>
<eClassifiers xsi:type="ecore:EClass" name="SAPConnection" eSuperTypes="#//Connection">
<eStructuralFeatures xsi:type="ecore:EAttribute" name="Host" eType="ecore:EDataType http://www.eclipse.org/emf/2002/Ecore#//EString"/>

View File

@@ -187,6 +187,7 @@
<genFeatures createChild="false" ecoreFeature="ecore:EAttribute metadata.ecore#//DatabaseConnection/SQLMode"/>
<genFeatures createChild="false" ecoreFeature="ecore:EAttribute metadata.ecore#//DatabaseConnection/UiSchema"/>
<genFeatures property="None" children="true" createChild="true" ecoreFeature="ecore:EReference metadata.ecore#//DatabaseConnection/parameters"/>
<genFeatures createChild="false" ecoreFeature="ecore:EAttribute metadata.ecore#//DatabaseConnection/supportNLS"/>
</genClasses>
<genClasses ecoreClass="metadata.ecore#//SAPConnection">
<genFeatures createChild="false" ecoreFeature="ecore:EAttribute metadata.ecore#//SAPConnection/Host"/>

View File

@@ -5238,6 +5238,15 @@ public interface ConnectionPackage extends EPackage {
*/
int DATABASE_CONNECTION__PARAMETERS = CONNECTION_FEATURE_COUNT + 25;
/**
* The feature id for the '<em><b>Support NLS</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int DATABASE_CONNECTION__SUPPORT_NLS = CONNECTION_FEATURE_COUNT + 26;
/**
* The number of structural features of the '<em>Database Connection</em>' class.
* <!-- begin-user-doc --> <!--
@@ -5245,7 +5254,7 @@ public interface ConnectionPackage extends EPackage {
* @generated
* @ordered
*/
int DATABASE_CONNECTION_FEATURE_COUNT = CONNECTION_FEATURE_COUNT + 26;
int DATABASE_CONNECTION_FEATURE_COUNT = CONNECTION_FEATURE_COUNT + 27;
/**
* The meta object id for the '{@link org.talend.core.model.metadata.builder.connection.impl.SAPConnectionImpl <em>SAP Connection</em>}' class.
@@ -21887,6 +21896,17 @@ public interface ConnectionPackage extends EPackage {
*/
EReference getDatabaseConnection_Parameters();
/**
* Returns the meta object for the attribute '{@link org.talend.core.model.metadata.builder.connection.DatabaseConnection#isSupportNLS <em>Support NLS</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Support NLS</em>'.
* @see org.talend.core.model.metadata.builder.connection.DatabaseConnection#isSupportNLS()
* @see #getDatabaseConnection()
* @generated
*/
EAttribute getDatabaseConnection_SupportNLS();
/**
* Returns the meta object for class '{@link org.talend.core.model.metadata.builder.connection.SAPConnection <em>SAP Connection</em>}'.
* <!-- begin-user-doc --> <!-- end-user-doc -->
@@ -26439,6 +26459,14 @@ public interface ConnectionPackage extends EPackage {
*/
EReference DATABASE_CONNECTION__PARAMETERS = eINSTANCE.getDatabaseConnection_Parameters();
/**
* The meta object literal for the '<em><b>Support NLS</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute DATABASE_CONNECTION__SUPPORT_NLS = eINSTANCE.getDatabaseConnection_SupportNLS();
/**
* The meta object literal for the '{@link org.talend.core.model.metadata.builder.connection.impl.SAPConnectionImpl <em>SAP Connection</em>}' class.
* <!-- begin-user-doc --> <!-- end-user-doc -->

View File

@@ -755,4 +755,54 @@ public interface DatabaseConnection extends Connection {
*/
EMap<String, String> getParameters();
/**
* Returns the value of the '<em><b>Support NLS</b></em>' attribute.
* The default value is <code>"false"</code>.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the value of the '<em>Support NLS</em>' attribute.
* @see #isSetSupportNLS()
* @see #unsetSupportNLS()
* @see #setSupportNLS(boolean)
* @see org.talend.core.model.metadata.builder.connection.ConnectionPackage#getDatabaseConnection_SupportNLS()
* @model default="false" unsettable="true"
* @generated
*/
boolean isSupportNLS();
/**
* Sets the value of the '{@link org.talend.core.model.metadata.builder.connection.DatabaseConnection#isSupportNLS <em>Support NLS</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Support NLS</em>' attribute.
* @see #isSetSupportNLS()
* @see #unsetSupportNLS()
* @see #isSupportNLS()
* @generated
*/
void setSupportNLS(boolean value);
/**
* Unsets the value of the '{@link org.talend.core.model.metadata.builder.connection.DatabaseConnection#isSupportNLS <em>Support NLS</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetSupportNLS()
* @see #isSupportNLS()
* @see #setSupportNLS(boolean)
* @generated
*/
void unsetSupportNLS();
/**
* Returns whether the value of the '{@link org.talend.core.model.metadata.builder.connection.DatabaseConnection#isSupportNLS <em>Support NLS</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Support NLS</em>' attribute is set.
* @see #unsetSupportNLS()
* @see #isSupportNLS()
* @see #setSupportNLS(boolean)
* @generated
*/
boolean isSetSupportNLS();
} // DatabaseConnection

View File

@@ -1562,6 +1562,15 @@ public class ConnectionPackageImpl extends EPackageImpl implements ConnectionPac
return (EReference) databaseConnectionEClass.getEStructuralFeatures().get(25);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getDatabaseConnection_SupportNLS() {
return (EAttribute) databaseConnectionEClass.getEStructuralFeatures().get(26);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
@@ -4516,6 +4525,7 @@ public class ConnectionPackageImpl extends EPackageImpl implements ConnectionPac
createEAttribute(databaseConnectionEClass, DATABASE_CONNECTION__SQL_MODE);
createEAttribute(databaseConnectionEClass, DATABASE_CONNECTION__UI_SCHEMA);
createEReference(databaseConnectionEClass, DATABASE_CONNECTION__PARAMETERS);
createEAttribute(databaseConnectionEClass, DATABASE_CONNECTION__SUPPORT_NLS);
sapConnectionEClass = createEClass(SAP_CONNECTION);
createEAttribute(sapConnectionEClass, SAP_CONNECTION__HOST);
@@ -5294,6 +5304,9 @@ public class ConnectionPackageImpl extends EPackageImpl implements ConnectionPac
initEReference(getDatabaseConnection_Parameters(), this.getAdditionalProperties(), null, "parameters", null, 0, -1,
DatabaseConnection.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, IS_RESOLVE_PROXIES,
!IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getDatabaseConnection_SupportNLS(), ecorePackage.getEBoolean(), "supportNLS", "false", 0, 1,
DatabaseConnection.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_UNSETTABLE, !IS_ID, IS_UNIQUE,
!IS_DERIVED, IS_ORDERED);
initEClass(sapConnectionEClass, SAPConnection.class, "SAPConnection", !IS_ABSTRACT, !IS_INTERFACE,
IS_GENERATED_INSTANCE_CLASS);

View File

@@ -51,6 +51,7 @@ import org.talend.core.model.metadata.builder.connection.DatabaseConnection;
* <li>{@link org.talend.core.model.metadata.builder.connection.impl.DatabaseConnectionImpl#isSQLMode <em>SQL Mode</em>}</li>
* <li>{@link org.talend.core.model.metadata.builder.connection.impl.DatabaseConnectionImpl#getUiSchema <em>Ui Schema</em>}</li>
* <li>{@link org.talend.core.model.metadata.builder.connection.impl.DatabaseConnectionImpl#getParameters <em>Parameters</em>}</li>
* <li>{@link org.talend.core.model.metadata.builder.connection.impl.DatabaseConnectionImpl#isSupportNLS <em>Support NLS</em>}</li>
* </ul>
*
* @generated
@@ -563,6 +564,35 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
*/
protected EMap<String, String> parameters;
/**
* The default value of the '{@link #isSupportNLS() <em>Support NLS</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSupportNLS()
* @generated
* @ordered
*/
protected static final boolean SUPPORT_NLS_EDEFAULT = false;
/**
* The cached value of the '{@link #isSupportNLS() <em>Support NLS</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSupportNLS()
* @generated
* @ordered
*/
protected boolean supportNLS = SUPPORT_NLS_EDEFAULT;
/**
* This is true if the Support NLS attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean supportNLSESet = true;
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
@@ -1173,6 +1203,54 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
return parameters;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSupportNLS() {
return supportNLS;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setSupportNLS(boolean newSupportNLS) {
boolean oldSupportNLS = supportNLS;
supportNLS = newSupportNLS;
boolean oldSupportNLSESet = supportNLSESet;
supportNLSESet = true;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS,
oldSupportNLS, supportNLS, !oldSupportNLSESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetSupportNLS() {
boolean oldSupportNLS = supportNLS;
boolean oldSupportNLSESet = supportNLSESet;
supportNLS = SUPPORT_NLS_EDEFAULT;
supportNLSESet = false;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.UNSET, ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS,
oldSupportNLS, SUPPORT_NLS_EDEFAULT, oldSupportNLSESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetSupportNLS() {
return supportNLSESet;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
@@ -1267,6 +1345,8 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
return getParameters();
else
return getParameters().map();
case ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS:
return isSupportNLS();
}
return super.eGet(featureID, resolve, coreType);
}
@@ -1335,6 +1415,8 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
return getParameters();
else
return getParameters().map();
case ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS:
return isSupportNLS();
}
return super.eGet(featureID, resolve, coreType);
}
@@ -1424,6 +1506,9 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
case ConnectionPackage.DATABASE_CONNECTION__PARAMETERS:
((EStructuralFeature.Setting) getParameters()).set(newValue);
return;
case ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS:
setSupportNLS((Boolean) newValue);
return;
}
super.eSet(featureID, newValue);
}
@@ -1513,6 +1598,9 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
case ConnectionPackage.DATABASE_CONNECTION__PARAMETERS:
getParameters().clear();
return;
case ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS:
unsetSupportNLS();
return;
}
super.eUnset(featureID);
}
@@ -1578,6 +1666,8 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
return UI_SCHEMA_EDEFAULT == null ? uiSchema != null : !UI_SCHEMA_EDEFAULT.equals(uiSchema);
case ConnectionPackage.DATABASE_CONNECTION__PARAMETERS:
return parameters != null && !parameters.isEmpty();
case ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS:
return isSetSupportNLS();
}
return super.eIsSet(featureID);
}
@@ -1643,6 +1733,11 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
result.append("<unset>");
result.append(", UiSchema: ");
result.append(uiSchema);
result.append(", supportNLS: ");
if (supportNLSESet)
result.append(supportNLS);
else
result.append("<unset>");
result.append(')');
return result.toString();
}

Some files were not shown because too many files have changed in this diff Show More