Compare commits

...

19 Commits

Author SHA1 Message Date
jzhao
6a813e2a73 chore(TDI-48946):Bump TCK connector 1.27.16 for 7.3.1-R2022-12 (#5839) 2022-11-28 16:33:54 +08:00
Chao MENG
4b7a59b497 fix(TUP-37228): Studio errors when attempting to create a Cloudera Dynamic Distro when repository.apache.org port 80 is blocked (#5823)
* fix(TUP-37228): Studio errors when attempting to create a Cloudera
Dynamic Distro when repository.apache.org port 80 is blocked
https://jira.talendforge.org/browse/TUP-37228

* fix(TUP-37228): Studio errors when attempting to create a Cloudera
Dynamic Distro when repository.apache.org port 80 is blocked
https://jira.talendforge.org/browse/TUP-37228
2022-11-25 14:49:33 +08:00
Jane Ding
d5490d2663 fix(TUP-36933):[7.3.1] import dependencies is not working when we do a copy from brunch (#5794)
* fix(TUP-36933):[7.3.1] import dependencies is not working when we do a
copy from brunch
https://jira.talendforge.org/browse/TUP-36933

* fix(TUP-36933):[7.3.1] import dependencies is not working when we do a
copy from brunch
https://jira.talendforge.org/browse/TUP-36933

Conflicts:
	main/plugins/org.talend.repository.items.importexport.ui/src/main/java/org/talend/repository/items/importexport/ui/wizard/imports/ImportItemsWizardPage.java

* fix(TUP-36933):[7.3.1] import dependencies is not working when we do a
copy from brunch
https://jira.talendforge.org/browse/TUP-36933

Conflicts:
	main/plugins/org.talend.repository.items.importexport.ui/src/main/java/org/talend/repository/items/importexport/ui/wizard/imports/ImportItemsWizardPage.java

* fix(TUP-36933):[7.3.1] import dependencies is not working when we do a
copy from brunch
https://jira.talendforge.org/browse/TUP-36933
2022-11-22 11:08:29 +08:00
zyuan-talend
fbfc3735ad fix(TUP-37016):CVE-2022-41853, upgrade org.hsqldb:hsqldb:2.3.1 to 2.7.1. (#5809) 2022-11-17 15:30:45 +08:00
zyuan-talend
76fbd6fd32 fix(TUP-37016):CVE-2022-41853, upgrade org.hsqldb:hsqldb:2.3.1 to 2.7.1. (#5766) (#5797) 2022-11-15 16:14:56 +08:00
sbliu
43ef7fa5e0 chore(TUP-36964) upgrade commons-text to 1.10.0 (#5770) 2022-11-09 14:39:39 +08:00
pyzhou
be28a0d122 fix(48799) Upgrade tck in studio 7.3 (#5771) 2022-11-03 23:13:04 +08:00
sbliu
e44522bb69 feat(TUP-35340) fix ui problem of show checkbox (#5755) 2022-10-31 16:59:10 +08:00
sbliu
67f04b7db3 feat(TUP-36340) oracle add support of orai18n.jar (#5703) 2022-10-31 16:04:37 +08:00
pyzhou
2502688e64 Pyzhou/tdi 48676 resume util oom 7.3 (#5742)
* fix(TDI-48676):ResumeUtil OOM

* replace duplicate variable
2022-10-27 14:51:53 +08:00
sbliu
6175aca630 chore(TUP-36930) upgrade jackson-databind to 2.13.4.2 (#5724)
upgrade jackson-databind to 2.13.4.2, jackson-core/jackson-annotations to 2.13.4
2022-10-27 11:06:25 +08:00
zyuan-talend
55f7531d68 fix(TUP-36820):Improve performance of importing large size metadata file (#5677)
xml.
2022-10-24 16:01:24 +08:00
Svitlana Anulich
d6c888e235 fix(TBD-14194): add knox session timeout variable (#5685) (#5718) 2022-10-24 10:24:32 +03:00
Svitlana Anulich
7db18b198f fix(TBD-14328): ClassNotFoundException when check service for CDP 7.x knox (#5715) 2022-10-24 10:20:15 +03:00
pyzhou
039ed90481 fix(TDI-48746) Upgrade tck in studio 7.3 (#5737) 2022-10-24 13:05:33 +08:00
apoltavtsev
eea9c85609 fix(APPINT-35054) Build type for child Jobs is corrected (#5675) 2022-10-10 11:54:35 +02:00
Jane Ding
ca9d09e04e fix(APPINT-35054) Add optional mechanism to align project models (#5671)
BUILD_TYPE
https://jira.talendforge.org/browse/APPINT-35054
2022-10-09 16:19:37 +08:00
jiezhang-tlnd
b5dadab5a5 chore(TUP-36715)CVE: xerces:xercesImpl:2.12.0 (#5609) 2022-10-08 11:07:28 +08:00
apoltavtsev
2add3ffc0e fix(APPINT-35054) Add optional mechanism to align project models BUILD_TYPE (#5661)
* Add files via upload

* Update MavenProjectSettingPage.java

* Update messages.properties

* Add files via upload

* Update MANIFEST.MF

* Update CorrectBuildTypeForRoutesMigrationTask.java

* Update CorrectBuildTypeForRoutesMigrationTask.java

* Update CorrectBuildTypeForDIJobMigrationTask.java

* Update CorrectBuildTypeForDIJobMigrationTask.java

* Update CorrectBuildTypeForDsRestMigrationTask.java

* Update CorrectBuildTypeForRoutesMigrationTask.java

* Update CorrectBuildTypeForSOAPServiceJobMigrationTask.java

* Update BuildTypeManager.java

* Update MavenProjectSettingPage.java

* Update MavenProjectSettingPage.java

* Add files via upload

* Update MavenProjectSettingPage.java

* Update BuildTypeManager.java

* Update AbstractCorrectBuildItemMigrationTask.java

* Update CorrectBuildTypeForDIJobMigrationTask.java

* Update CorrectBuildTypeForDsRestMigrationTask.java

* Update CorrectBuildTypeForRoutesMigrationTask.java

* Update CorrectBuildTypeForSOAPServiceJobMigrationTask.java

* Update CorrectBuildTypeForDIJobMigrationTask.java

* Update BuildTypeManager.java

* Update BuildTypeManager.java

* Update CorrectBuildTypeForDIJobMigrationTask.java

* Update AbstractCorrectBuildItemMigrationTask.java

* Update CorrectBuildTypeForDIJobMigrationTask.java

* Update CorrectBuildTypeForDsRestMigrationTask.java

* Update CorrectBuildTypeForRoutesMigrationTask.java

* Update CorrectBuildTypeForSOAPServiceJobMigrationTask.java
2022-10-07 09:43:15 +02:00
69 changed files with 2350 additions and 68 deletions

View File

@@ -92,7 +92,7 @@
<dependency>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
<version>2.12.0</version>
<version>2.12.2</version>
</dependency>
<dependency>
<groupId>ch.qos.reload4j</groupId>

View File

@@ -380,5 +380,6 @@ public class ConnParameterKeys {
public static final String CONN_PARA_KEY_KNOX_DIRECTORY="CONN_PARA_KEY_KNOX_DIRECTORY";
public static final String CONN_PARA_KEY_KNOX_TIMEOUT="CONN_PARA_KEY_KNOX_TIMEOUT";
}

View File

@@ -25,8 +25,8 @@ import org.talend.core.database.conn.DatabaseConnConstants;
public enum EDatabaseVersion4Drivers {
// access
ACCESS_JDBC(new DbVersion4Drivers(EDatabaseTypeName.ACCESS, new String[] {
"jackcess-2.1.0.jar", "ucanaccess-2.0.9.5.jar", "commons-lang-2.6.jar", "commons-logging-1.1.1.jar", "hsqldb.jar", //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
"jackcess-encrypt-2.1.0.jar", "bcprov-jdk15on-1.51.jar", "talend-ucanaccess-utils-1.0.0.jar" })),
"jackcess-2.1.12.jar", "ucanaccess-2.0.9.5.jar", "commons-lang-2.6.jar", "commons-logging-1.1.3.jar", "hsqldb.jar", //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
"jackcess-encrypt-2.1.4.jar", "bcprov-jdk15on-1.69.jar", "talend-ucanaccess-utils-1.0.0.jar" })),
ACCESS_2003(new DbVersion4Drivers(EDatabaseTypeName.ACCESS, "Access 2003", "Access_2003")), //$NON-NLS-1$ //$NON-NLS-2$
ACCESS_2007(new DbVersion4Drivers(EDatabaseTypeName.ACCESS, "Access 2007", "Access_2007")), //$NON-NLS-1$ //$NON-NLS-2$
// oracle
@@ -171,8 +171,10 @@ public enum EDatabaseVersion4Drivers {
REDSHIFT(new DbVersion4Drivers(EDatabaseTypeName.REDSHIFT, "redshift", "REDSHIFT", //$NON-NLS-1$ //$NON-NLS-2$
new String[]{ "redshift-jdbc42-no-awssdk-1.2.55.1083.jar", "antlr4-runtime-4.8-1.jar" })), //$NON-NLS-1$ //$NON-NLS-2$
REDSHIFT_SSO(new DbVersion4Drivers(EDatabaseTypeName.REDSHIFT_SSO, "redshift sso", "REDSHIFT_SSO", //$NON-NLS-1$ //$NON-NLS-2$
new String[] { "redshift-jdbc42-no-awssdk-1.2.55.1083.jar", "antlr4-runtime-4.8-1.jar", "aws-java-sdk-1.11.848.jar", "jackson-core-2.10.1.jar", //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
"jackson-databind-2.10.1.jar", "jackson-annotations-2.10.1.jar", "httpcore-4.4.11.jar", "httpclient-4.5.9.jar", //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$//$NON-NLS-4$
new String[] { "redshift-jdbc42-no-awssdk-1.2.55.1083.jar", "antlr4-runtime-4.8-1.jar", "aws-java-sdk-1.11.848.jar", //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
"jackson-core-2.13.4.jar", //$NON-NLS-1$
"jackson-databind-2.13.4.2.jar", "jackson-annotations-2.13.4.jar", "httpcore-4.4.11.jar", //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
"httpclient-4.5.9.jar", //$NON-NLS-1$
"joda-time-2.8.1.jar", "commons-logging-1.2.jar", "commons-codec-1.11.jar" })), //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
AMAZON_AURORA(new DbVersion4Drivers(EDatabaseTypeName.AMAZON_AURORA, "mysql-connector-java-5.1.49.jar")); //$NON-NLS-1$

View File

@@ -38,7 +38,10 @@ public class HadoopClassLoaderFactory2 {
public static ClassLoader getHDFSClassLoader(String relatedClusterId, String distribution, String version, boolean useKrb) {
return getClassLoader(relatedClusterId, EHadoopCategory.HDFS, distribution, version, useKrb);
}
public static ClassLoader getHDFSKnoxClassLoader(String relatedClusterId, String distribution, String version, boolean useKrb) {
return HadoopClassLoaderFactory2.getClassLoader(relatedClusterId, EHadoopCategory.HDFS, distribution, version, useKrb,
IHadoopArgs.HDFS_ARG_KNOX);
}
public static ClassLoader getMRClassLoader(String relatedClusterId, String distribution, String version, boolean useKrb) {
return getClassLoader(relatedClusterId, EHadoopCategory.MAP_REDUCE, distribution, version, useKrb);
}

View File

@@ -22,4 +22,6 @@ public interface IHadoopArgs {
public static final String HIVE_ARG_STANDALONE = "STANDALONE"; //$NON-NLS-1$
public static final String HDFS_ARG_KNOX = "USE_KNOX"; //$NON-NLS-1$
}

View File

@@ -194,6 +194,10 @@ public interface IMetadataConnection extends IMetadata {
public String getContextName();
public void setContextName(String contextName);
public boolean isSupportNLS();
public void setSupportNLS(boolean newSupportNLS);
/**
* Returns the value that you stored in the data collection by the key. Normally, it is like this key-value. For

View File

@@ -254,6 +254,7 @@ public final class ConvertionHelper {
result.setContentModel(connection.isContextMode());
result.setContextId(sourceConnection.getContextId());
result.setContextName(sourceConnection.getContextName());
result.setSupportNLS(sourceConnection.isSupportNLS());
// handle oracle database connnection of general_jdbc.
result.setSchema(getMeataConnectionSchema(result));
convertOtherParameters(result, connection);

View File

@@ -113,6 +113,7 @@ public class MetadataConnection implements IMetadataConnection {
private String contextName;
private boolean supportNLS = false;
// ~
private String comment;
@@ -729,6 +730,14 @@ public class MetadataConnection implements IMetadataConnection {
public void setContextName(String contextName) {
this.contextName = contextName;
}
public boolean isSupportNLS() {
return supportNLS;
}
public void setSupportNLS(boolean supportNLS) {
this.supportNLS = supportNLS;
}
/*
* (non-Javadoc)

View File

@@ -1225,6 +1225,11 @@ public class RepositoryToComponentProperty {
return value2;
}
if(value.equals("SUPPORT_NLS")) {
return connection.isSupportNLS();
}
if (value.equals("CDC_TYPE_MODE")) { //$NON-NLS-1$
return new Boolean(CDCTypeMode.LOG_MODE.getName().equals(connection.getCdcTypeMode()));
}

View File

@@ -46,6 +46,8 @@ import org.talend.designer.maven.aether.util.TalendAetherProxySelector;
*/
public class RepositorySystemFactory {
private static Boolean ignoreArtifactDescriptorRepositories;
private static Map<LocalRepository, DefaultRepositorySystemSession> sessions = new HashMap<LocalRepository, DefaultRepositorySystemSession>();
private static DefaultRepositorySystemSession newRepositorySystemSession(String localRepositoryPath)
@@ -61,6 +63,8 @@ public class RepositorySystemFactory {
repositorySystemSession.setTransferListener(new ChainedTransferListener());
repositorySystemSession.setRepositoryListener(new ChainedRepositoryListener());
repositorySystemSession.setProxySelector(new TalendAetherProxySelector());
repositorySystemSession.setIgnoreArtifactDescriptorRepositories(
RepositorySystemFactory.isIgnoreArtifactDescriptorRepositories());
sessions.put(localRepo, repositorySystemSession);
}
@@ -157,4 +161,13 @@ public class RepositorySystemFactory {
doDeploy(content, pomFile, localRepository, repositoryId, repositoryUrl, userName, password, groupId, artifactId,
classifier, extension, version);
}
public static boolean isIgnoreArtifactDescriptorRepositories() {
if (ignoreArtifactDescriptorRepositories == null) {
ignoreArtifactDescriptorRepositories = Boolean.valueOf(
System.getProperty("talend.studio.aether.ignoreArtifactDescriptorRepositories", Boolean.TRUE.toString()));
}
return ignoreArtifactDescriptorRepositories;
}
}

View File

@@ -63,6 +63,7 @@ import org.eclipse.m2e.core.MavenPlugin;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.designer.maven.aether.DummyDynamicMonitor;
import org.talend.designer.maven.aether.IDynamicMonitor;
import org.talend.designer.maven.aether.RepositorySystemFactory;
import org.talend.designer.maven.aether.node.DependencyNode;
import org.talend.designer.maven.aether.node.ExclusionNode;
import org.talend.designer.maven.aether.selector.DynamicDependencySelector;
@@ -513,6 +514,7 @@ public class DynamicDistributionAetherUtils {
LocalRepository localRepo = new LocalRepository(repositoryPath);
session.setLocalRepositoryManager(system.newLocalRepositoryManager(session, localRepo));
session.setProxySelector(new TalendAetherProxySelector());
session.setIgnoreArtifactDescriptorRepositories(RepositorySystemFactory.isIgnoreArtifactDescriptorRepositories());
updateDependencySelector(session, monitor);

View File

@@ -34,7 +34,6 @@ import org.codehaus.plexus.PlexusContainerException;
import org.eclipse.aether.DefaultRepositorySystemSession;
import org.eclipse.aether.RepositorySystem;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.connector.basic.BasicRepositoryConnectorFactory;
import org.eclipse.aether.impl.DefaultServiceLocator;
@@ -57,6 +56,7 @@ import org.talend.core.nexus.ArtifactRepositoryBean;
import org.talend.core.nexus.NexusConstants;
import org.talend.core.nexus.TalendLibsServerManager;
import org.talend.core.runtime.maven.MavenArtifact;
import org.talend.designer.maven.aether.RepositorySystemFactory;
public class MavenLibraryResolverProvider {
@@ -283,8 +283,9 @@ public class MavenLibraryResolverProvider {
LocalRepository localRepo = new LocalRepository( /* "target/local-repo" */target);
session.setLocalRepositoryManager(system.newLocalRepositoryManager(session, localRepo));
session.setProxySelector(new TalendAetherProxySelector());
session.setIgnoreArtifactDescriptorRepositories(RepositorySystemFactory.isIgnoreArtifactDescriptorRepositories());
return session;
return session;
}
private String getLocalMVNRepository() {

View File

@@ -151,6 +151,12 @@
<groupId>org.talend.components</groupId>
<artifactId>components-marklogic-runtime</artifactId>
<version>${components.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.talend.components</groupId>
@@ -210,6 +216,11 @@
<artifactId>commons-beanutils</artifactId>
<version>1.9.4</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>1.10.0</version>
</dependency>
</dependencies>
<build>
<plugins>

View File

@@ -20,7 +20,7 @@
<module>zip/pom.xml</module>
</modules>
<properties>
<m2.fasterxml.jackson.version>2.13.2</m2.fasterxml.jackson.version>
<m2.fasterxml.jackson.version>2.13.4</m2.fasterxml.jackson.version>
<jackson-codehaus.version>1.9.16-TALEND</jackson-codehaus.version>
</properties>
</project>

View File

@@ -10,7 +10,7 @@
<artifactId>studio-tacokit-dependencies</artifactId>
<packaging>pom</packaging>
<properties>
<tacokit.components.version>1.27.13</tacokit.components.version>
<tacokit.components.version>1.27.16</tacokit.components.version>
</properties>
<repositories>
<repository>

View File

@@ -10,10 +10,11 @@ ProjectPomProjectSettingPage_ConfirmMessage=Will apply and update for project PO
ProjectPomProjectSettingPage_FilterPomLabel=Filter to use to generate poms:
ProjectPomProjectSettingPage_FilterErrorMessage=Invalid filter: {0}
ProjectPomProjectSettingPage.syncAllPomsButtonText=Force full re-synchronize poms
ProjectPomProjectSettingPage.syncBuildTypesButtonText=Force full re-synchronize build types
AbstractPersistentProjectSettingPage.syncAllPoms=Do you want to update all poms? \n This operation might take long time depends on your project size.
MavenProjectSettingPage.filterExampleMessage=Filter examples:\nlabel=myJob \t\t\t\t=> Generate only the job named "myJob"\n!(label=myJob) \t\t\t\t=> Generate any job except the one named "myJob"\n(path=folder1/folder2) \t\t\t=> Generate any job in the folder "folder1/folder2"\n(path=folder1/folder2)or(label=myJob)\t=> Generate any job in the folder "folder1/folder2" or named "myJob"\n(label=myJob)and(version=0.2) \t=> Generate only the job named "myJob" with version 0.2\n!((label=myJob)and(version=0.1)) \t=> Generate every jobs except the "myJob" version 0.1
MavenProjectSettingPage.refModuleText=Set reference project modules in profile
MavenProjectSettingPage.excludeDeletedItems=Exclude deleted items
MavenProjectSettingPage.syncAllPomsWarning=Click the Force full re-synchronize poms button to apply the new settings.
MavenProjectSettingPage.skipFolders=Skip folders
BuildProjectSettingPage.allowRecursiveJobs=Allow recursive jobs (Not recommended)
BuildProjectSettingPage.allowRecursiveJobs=Allow recursive jobs (Not recommended)

View File

@@ -34,6 +34,7 @@ import org.talend.core.runtime.projectsetting.AbstractProjectSettingPage;
import org.talend.core.runtime.services.IFilterService;
import org.talend.designer.maven.DesignerMavenPlugin;
import org.talend.designer.maven.tools.AggregatorPomsHelper;
import org.talend.designer.maven.tools.BuildTypeManager;
import org.talend.designer.maven.ui.i18n.Messages;
/**
@@ -169,7 +170,25 @@ public class MavenProjectSettingPage extends AbstractProjectSettingPage {
});
}
if (isSyncBuildTypeAllowed()) {
Button syncBuildTypes = new Button(parent, SWT.NONE);
syncBuildTypes.setText(Messages.getString("ProjectPomProjectSettingPage.syncBuildTypesButtonText")); //$NON-NLS-1$
syncBuildTypes.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent event) {
try {
new BuildTypeManager().syncBuildTypes(getCurrentPage());
} catch (Exception e) {
ExceptionHandler.process(e);
}
}
});
}
}
private void addSyncWarning() {
setMessage(Messages.getString("MavenProjectSettingPage.syncAllPomsWarning"), IMessage.WARNING); //$NON-NLS-1$
@@ -203,4 +222,12 @@ public class MavenProjectSettingPage extends AbstractProjectSettingPage {
return displayVersion;
}
private static boolean isSyncBuildTypeAllowed() {
return Boolean.getBoolean("talend.builtype.syncallowed");
}
private MavenProjectSettingPage getCurrentPage() {
return this;
}
}

View File

@@ -23,7 +23,8 @@ Require-Bundle: org.eclipse.core.runtime,
org.talend.common.ui.runtime,
org.talend.core.runtime,
org.eclipse.jface,
org.talend.core.repository
org.talend.core.repository,
org.talend.core
Bundle-ActivationPolicy: lazy
Export-Package: org.talend.designer.maven,
org.talend.designer.maven.launch,

View File

@@ -0,0 +1,149 @@
// ============================================================================
//
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.designer.maven.migration.common;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.exception.PersistenceException;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.model.properties.Item;
import org.talend.core.model.properties.Property;
import org.talend.core.model.repository.ERepositoryObjectType;
import org.talend.core.ui.ITestContainerCoreService;
/**
* DOC jding class global comment. Detailled comment
*/
public class ItemReportRecorder {
protected Item item;
protected String detailMessage;
public String getItemType() {
String type = "";
ERepositoryObjectType itemType = ERepositoryObjectType.getItemType(item);
if (itemType != null) {
if (ERepositoryObjectType.getAllTypesOfTestContainer().contains(itemType)) {
Item parentJobItem = getTestCaseParentJobItem(item);
if (parentJobItem != null) {
ERepositoryObjectType parentJobType = ERepositoryObjectType.getItemType(parentJobItem);
if (parentJobType != null) {
String parentTypePath = getCompleteObjectTypePath(parentJobType);
if (StringUtils.isNotBlank(parentTypePath)) {
type = parentTypePath + "/";
}
}
}
type += itemType;
} else {
type = getCompleteObjectTypePath(itemType);
}
}
return type;
}
public String getItemPath() {
String path = "";
StringBuffer buffer = new StringBuffer();
ERepositoryObjectType itemType = ERepositoryObjectType.getItemType(item);
if (ERepositoryObjectType.getAllTypesOfTestContainer().contains(itemType)) {
StringBuffer testcaseBuffer = new StringBuffer();
Item parentJobItem = getTestCaseParentJobItem(item);
if (parentJobItem != null) {
if (parentJobItem.getState() != null && StringUtils.isNotBlank(parentJobItem.getState().getPath())) {
testcaseBuffer.append(parentJobItem.getState().getPath()).append("/");
}
testcaseBuffer.append(parentJobItem.getProperty() != null ? parentJobItem.getProperty().getLabel() : "");
if (StringUtils.isNotBlank(testcaseBuffer.toString())) {
buffer.append(testcaseBuffer.toString()).append("/");
}
}
} else {
if (item.getState() != null && StringUtils.isNotBlank(item.getState().getPath())) {
buffer.append(item.getState().getPath()).append("/");
}
}
Property property = item.getProperty();
if (property != null) {
buffer.append(property.getLabel() + "_" + property.getVersion());
}
path = buffer.toString();
return path;
}
private Item getTestCaseParentJobItem(Item testcaseItem) {
Item parentJobItem = null;
if (GlobalServiceRegister.getDefault().isServiceRegistered(ITestContainerCoreService.class)) {
ITestContainerCoreService testcaseService = GlobalServiceRegister.getDefault()
.getService(ITestContainerCoreService.class);
if (testcaseService != null) {
try {
parentJobItem = testcaseService.getParentJobItem(item);
} catch (PersistenceException e) {
ExceptionHandler.process(e);
}
}
}
return parentJobItem;
}
private String getCompleteObjectTypePath(ERepositoryObjectType itemType) {
ERepositoryObjectType rootItemType = itemType;
if (ERepositoryObjectType.JDBC != null && ERepositoryObjectType.JDBC.equals(rootItemType)) {
rootItemType = ERepositoryObjectType.METADATA_CONNECTIONS;
}
List<String> typeLabels = new ArrayList<String>();
findOutCompleteTypePath(rootItemType, typeLabels);
StringBuffer buffer = new StringBuffer();
if (!typeLabels.isEmpty()) {
for (int i = 0; i < typeLabels.size(); i++) {
if (i != 0) {
buffer.append("/");
}
buffer.append(typeLabels.get(i));
}
}
return buffer.toString();
}
private void findOutCompleteTypePath(ERepositoryObjectType type, List<String> typeLabels) {
ERepositoryObjectType parentType = ERepositoryObjectType.findParentType(type);
if (parentType != null) {
findOutCompleteTypePath(parentType, typeLabels);
}
typeLabels.add(type.getLabel());
}
public Item getItem() {
return item;
}
public void setItem(Item item) {
this.item = item;
}
public String getDetailMessage() {
return detailMessage;
}
public void setDetailMessage(String detailMessage) {
this.detailMessage = detailMessage;
}
}

View File

@@ -0,0 +1,81 @@
// ============================================================================
//
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.designer.maven.migration.common;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.eclipse.core.runtime.Path;
import org.talend.core.utils.TalendQuoteUtils;
/**
* DOC jding class global comment. Detailled comment
*/
public class ItemsReportUtil {
public static boolean generateReportFile(File reportFile, String header, List<String> records) throws Exception {
boolean generateDone = false;
BufferedWriter printWriter = null;
try {
File parentFolder = new Path(reportFile.getAbsolutePath()).removeLastSegments(1).toFile();
// File parentFolder = new File(parentPath);
if (!parentFolder.exists()) {
parentFolder.mkdirs();
}
if (!reportFile.exists()) {
reportFile.createNewFile();
}
FileOutputStream fos = new FileOutputStream(reportFile);
fos.write(new byte[] { (byte) 0xEF, (byte) 0xBB, (byte) 0xBF });
OutputStreamWriter outputWriter = new OutputStreamWriter(fos, "UTF-8");
printWriter = new BufferedWriter(outputWriter);
printWriter.write(header);
printWriter.newLine();
for (String recordStr : records) {
printWriter.write(recordStr);
printWriter.newLine();
}
printWriter.flush();
generateDone = true;
} finally {
if (printWriter != null) {
printWriter.close();
}
}
return generateDone;
}
public static String handleColumnQuotes(String text) {
String quoteMark = TalendQuoteUtils.QUOTATION_MARK;
text = StringUtils.isBlank(text) ? "" : text;
if (text.contains(quoteMark)) {
// replace to double quote surround
text = text.replace(quoteMark, quoteMark + quoteMark);
}
return quoteMark + text + quoteMark;
}
public static String getCurrentTimeString() {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
String time = dateFormat.format(new Date());
return time;
}
}

View File

@@ -0,0 +1,177 @@
// ============================================================================
//
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.designer.maven.migration.common;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.preferences.ConfigurationScope;
import org.eclipse.core.runtime.preferences.IEclipsePreferences;
import org.eclipse.core.runtime.preferences.IScopeContext;
import org.osgi.service.prefs.BackingStoreException;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.core.PluginChecker;
import org.talend.core.model.properties.Item;
import org.talend.core.model.properties.Property;
import org.talend.migration.IProjectMigrationTask;
/**
* DOC jding class global comment. Detailled comment
*/
public class MigrationReportHelper {
private static final String COMMA = ",";
private static final String PLUGIN_ID = "org.talend.designer.maven";
private static final String DO_NOT_SHOW_PREF_KEY = "talend.migrationReportDialog.doNotShowAgain";
private static final String MIGRATION_REPORT_HEAD = "Task name,Task description,Item type,Path to migrated item,Migration details";
private static final MigrationReportHelper instance = new MigrationReportHelper();
public static MigrationReportHelper getInstance() {
return instance;
}
private String reportGeneratedPath = "";
private Set<String> taskItemRecords = new HashSet<String>();
private List<MigrationReportRecorder> migrationReportRecorders = new ArrayList<MigrationReportRecorder>();
public void generateMigrationReport(String projectTecName) {
if (migrationReportRecorders == null || migrationReportRecorders.isEmpty()) {
return;
}
if (!PluginChecker.isTIS()) {
clearRecorders();
return;
}
File exportFolder = null;
File reportFile = null;
try {
String currentTime = getCurrentTime();
String filePath = getReportExportFolder(currentTime) + "/" + getReportFileName(currentTime, projectTecName);
reportGeneratedPath = filePath;
reportFile = new File(filePath);
List<String> recordLines = new ArrayList<String>();
for (MigrationReportRecorder record : migrationReportRecorders) {
StringBuffer buffer = new StringBuffer();
buffer.append(ItemsReportUtil.handleColumnQuotes(record.getTaskClassName())).append(COMMA);
buffer.append(ItemsReportUtil.handleColumnQuotes(record.getTaskDescription())).append(COMMA);
buffer.append(ItemsReportUtil.handleColumnQuotes(record.getItemType())).append(COMMA);
buffer.append(ItemsReportUtil.handleColumnQuotes(record.getItemPath())).append(COMMA);
buffer.append(ItemsReportUtil.handleColumnQuotes(record.getDetailMessage()));
recordLines.add(buffer.toString());
}
ItemsReportUtil.generateReportFile(reportFile, MIGRATION_REPORT_HEAD, recordLines);
} catch (Exception e) {
ExceptionHandler.process(e);
if (reportFile != null && reportFile.exists()) {
reportFile.delete();
}
if (exportFolder != null && exportFolder.exists()) {
exportFolder.delete();
}
} finally {
migrationReportRecorders.clear();
taskItemRecords.clear();
}
}
public boolean isRequireDefaultRecord(IProjectMigrationTask task, Item item) {
boolean require = true;
if (task.getOrder() != null) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(task.getOrder());
int year = calendar.get(Calendar.YEAR);
if (year <= 2016) {
return false;
}
}
if (item.getProperty() != null) {
Property property = item.getProperty();
String key = task.getId() + "_" + property.getId() + "_" + property.getVersion();
if (taskItemRecords.contains(key)) {
return false;
}
}
return require;
}
private String getCurrentTime() {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
String time = dateFormat.format(new Date());
return time;
}
private String getReportExportFolder(String time) {
String folderName = "migrationReport" + "_" + time;
String path = ResourcesPlugin.getWorkspace().getRoot().getLocation().toString() + "/report/" + folderName;
return path;
}
private String getReportFileName(String time, String projectTecName) {
String fileName = time + "_" + projectTecName + "_" + "Migration" + "_" + "Report.csv";
return fileName;
}
public static void storeDoNotShowAgainPref(boolean selected) {
IScopeContext scopeContext = ConfigurationScope.INSTANCE;
IEclipsePreferences pref = scopeContext.getNode(PLUGIN_ID);
pref.putBoolean(DO_NOT_SHOW_PREF_KEY, selected);
try {
pref.flush();
} catch (BackingStoreException e) {
ExceptionHandler.process(e);
}
}
public static boolean isReportDialogDisable() {
IScopeContext scopeContext = ConfigurationScope.INSTANCE;
IEclipsePreferences pref = scopeContext.getNode(PLUGIN_ID);
return pref.getBoolean(DO_NOT_SHOW_PREF_KEY, false);
}
public void addRecorder(MigrationReportRecorder recorder) {
if (recorder != null) {
migrationReportRecorders.add(recorder);
if (recorder.getItem() != null && recorder.getItem().getProperty() != null) {
Property property = recorder.getItem().getProperty();
taskItemRecords.add(recorder.getTask().getId() + "_" + property.getId() + "_" + property.getVersion());
}
}
}
public void clearRecorders() {
reportGeneratedPath = "";
migrationReportRecorders.clear();
taskItemRecords.clear();
}
public String getReportGeneratedPath() {
return reportGeneratedPath;
}
}

View File

@@ -0,0 +1,197 @@
// ============================================================================
//
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.designer.maven.migration.common;
import org.apache.commons.lang.StringUtils;
import org.talend.core.model.components.ComponentUtilities;
import org.talend.core.model.properties.Item;
import org.talend.core.utils.TalendQuoteUtils;
import org.talend.designer.core.model.utils.emf.talendfile.ElementParameterType;
import org.talend.designer.core.model.utils.emf.talendfile.NodeType;
import org.talend.migration.IProjectMigrationTask;
/**
* DOC jding class global comment. Detailled comment
*/
public class MigrationReportRecorder extends ItemReportRecorder {
private IProjectMigrationTask task;
private MigrationOperationType operationType;
private NodeType node;
private String paramName;
private String oldValue;
private String newValue;
public enum MigrationOperationType {
ADD,
MODIFY,
DELETE
}
public MigrationReportRecorder(IProjectMigrationTask task, Item item) {
super();
this.task = task;
this.item = item;
}
public MigrationReportRecorder(IProjectMigrationTask task, MigrationOperationType operationType, Item item, NodeType node,
String paramName, String oldValue, String newValue) {
super();
this.task = task;
this.operationType = operationType;
this.item = item;
this.node = node;
this.paramName = paramName;
this.oldValue = oldValue;
this.newValue = newValue;
}
public MigrationReportRecorder(IProjectMigrationTask task, Item item, String detailMessage) {
super();
this.task = task;
this.item = item;
this.detailMessage = detailMessage;
}
public String getTaskClassName() {
return task.getClass().getSimpleName();
}
public String getTaskDescription() {
String description = "";
if (StringUtils.isNotBlank(task.getDescription())) {
description = task.getDescription();
}
return description;
}
public String getDetailMessage() {
String details = detailMessage;
if (StringUtils.isNotBlank(detailMessage)) {
return details;
}
if (operationType == null || StringUtils.isBlank(paramName)) {
details = getTaskClassName() + " task is applied";
return details;
}
StringBuffer detailBuffer = new StringBuffer();
if (node != null) {
// migration for node, e.g. tRESTClient component "tRESTClient_2":
detailBuffer.append(node.getComponentName()).append(" component ");
ElementParameterType uniqueName = ComponentUtilities.getNodeProperty(node, "UNIQUE_NAME");
detailBuffer.append(TalendQuoteUtils.addQuotes(uniqueName.getValue())).append(":");
} else {
// migration for item, e.g. context item "testContext":
detailBuffer.append(getItemType() + " item ")
.append(TalendQuoteUtils.addQuotes(item.getProperty().getLabel())).append(":");
}
detailBuffer.append(paramName).append(" was ");
switch (operationType) {
case ADD:
detailBuffer.append("added");
if (StringUtils.isNotBlank(newValue)) {
detailBuffer.append(" with ").append(newValue);
}
break;
case MODIFY:
detailBuffer.append("changed");
if (StringUtils.isNotBlank(oldValue)) {
detailBuffer.append(" from ").append(oldValue);
}
if (StringUtils.isNotBlank(newValue)) {
detailBuffer.append(" to ").append(newValue);
}
break;
case DELETE:
detailBuffer.append("deleted");
break;
default:
break;
}
details = detailBuffer.toString();
return details;
}
/**
* Getter for task.
*
* @return the task
*/
public IProjectMigrationTask getTask() {
return task;
}
/**
* Sets the task.
*
* @param task the task to set
*/
public void setTask(IProjectMigrationTask task) {
this.task = task;
}
/**
* Sets the operationType.
*
* @param operationType the operationType to set
*/
public void setOperationType(MigrationOperationType operationType) {
this.operationType = operationType;
}
/**
* Sets the node.
*
* @param node the node to set
*/
public void setNode(NodeType node) {
this.node = node;
}
/**
* Sets the paramName.
*
* @param paramName the paramName to set
*/
public void setParamName(String paramName) {
this.paramName = paramName;
}
/**
* Sets the oldValue.
*
* @param oldValue the oldValue to set
*/
public void setOldValue(String oldValue) {
this.oldValue = oldValue;
}
/**
* Sets the newValue.
*
* @param newValue the newValue to set
*/
public void setNewValue(String newValue) {
this.newValue = newValue;
}
}

View File

@@ -0,0 +1,121 @@
package org.talend.designer.maven.migration.tasks;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.talend.commons.exception.PersistenceException;
import org.talend.commons.runtime.model.emf.EmfHelper;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.model.components.filters.IComponentFilter;
import org.talend.core.model.migration.AbstractItemMigrationTask;
import org.talend.core.model.properties.Item;
import org.talend.core.model.properties.JobletProcessItem;
import org.talend.core.model.properties.ProcessItem;
import org.talend.core.model.repository.ERepositoryObjectType;
import org.talend.core.repository.utils.ConvertJobsUtil;
import org.talend.designer.core.model.utils.emf.talendfile.ElementParameterType;
import org.talend.designer.core.model.utils.emf.talendfile.NodeType;
import org.talend.designer.core.model.utils.emf.talendfile.ProcessType;
import org.talend.designer.maven.migration.common.MigrationReportHelper;
import org.talend.designer.maven.migration.common.MigrationReportRecorder;
import org.talend.repository.model.IProxyRepositoryFactory;
import org.talend.repository.model.IRepositoryService;
public abstract class AbstractCorrectBuildItemMigrationTask extends AbstractItemMigrationTask implements ICorrectBuildTypeMigrationTask {
protected static final String BUILD_TYPE_PROPERTY = "BUILD_TYPE";
protected static final String BUILD_TYPE_STANDALONE = "STANDALONE";
protected static final String BUILD_TYPE_OSGI = "OSGI";
protected static final String BUILD_TYPE_ROUTE = "ROUTE";
protected static final String BUILD_TYPE_ROUTE_MICROSERVICE = "ROUTE_MICROSERVICE";
protected static final String REST_MS = "REST_MS";
protected static Map<String, String> migratedJobs = new HashMap<String, String>();
protected static void clearMigratedJobs () {
migratedJobs.clear();
}
protected static void skipMigrationForJob (String jobName, String migrationTask) {
migratedJobs.put(jobName, migrationTask);
}
protected static void storeMigratedJob (String jobName, String migrationTask) {
migratedJobs.put(jobName, migrationTask);
}
protected static boolean isJobMigrated (String jobName) {
return migratedJobs.containsKey(jobName);
}
protected static String getStoredJobMigraionTask (String jobName) {
return migratedJobs.get(jobName);
}
public void generateReportRecord(MigrationReportRecorder recorder) {
MigrationReportHelper.getInstance().addRecorder(recorder);
}
public static List<NodeType> searchComponent(ProcessType processType, IComponentFilter filter) {
List<NodeType> list = new ArrayList<NodeType>();
if (filter == null || processType == null) {
return list;
}
for (Object o : processType.getNode()) {
if (filter.accept((NodeType) o)) {
list.add((NodeType) o);
}
}
return list;
}
public void save(Item item) throws PersistenceException {
IRepositoryService service = (IRepositoryService) GlobalServiceRegister.getDefault()
.getService(IRepositoryService.class);
IProxyRepositoryFactory factory = service.getProxyRepositoryFactory();
factory.save(item, true);
}
public ProcessType getProcessType(Item item) {
ProcessType processType = null;
if (item instanceof ProcessItem) {
processType = ((ProcessItem) item).getProcess();
}
if (item instanceof JobletProcessItem) {
processType = ((JobletProcessItem) item).getJobletProcess();
}
if (processType != null) {
EmfHelper.visitChilds(processType);
ERepositoryObjectType itemType = ERepositoryObjectType.getItemType(item);
if (itemType == ERepositoryObjectType.TEST_CONTAINER
&& !ConvertJobsUtil.JobType.STANDARD.getDisplayName().equalsIgnoreCase(processType.getJobType())) {
return null;
}
}
return processType;
}
/**
* Find element parameter with a given parameter name
*
* @param paramName
* @param elementParameterTypes
* @return
*/
public static final ElementParameterType findElementParameterByName(String paramName, NodeType node) {
for (Object obj : node.getElementParameter()) {
ElementParameterType cpType = (ElementParameterType) obj;
if (paramName.equals(cpType.getName())) {
return cpType;
}
}
return null;
}
abstract public void clear ();
}

View File

@@ -0,0 +1,17 @@
package org.talend.designer.maven.migration.tasks;
import java.util.ArrayList;
import java.util.List;
import org.talend.core.model.repository.ERepositoryObjectType;
public abstract class AbstractDataServiceJobMigrationTask extends AbstractCorrectBuildItemMigrationTask {
@Override
public List<ERepositoryObjectType> getTypes() {
List<ERepositoryObjectType> toReturn = new ArrayList<ERepositoryObjectType>();
toReturn.add(ERepositoryObjectType.PROCESS);
return toReturn;
}
}

View File

@@ -0,0 +1,18 @@
package org.talend.designer.maven.migration.tasks;
import java.util.ArrayList;
import java.util.List;
import org.talend.core.model.repository.ERepositoryObjectType;
public abstract class AbstractRouteMigrationTask extends AbstractCorrectBuildItemMigrationTask {
@Override
public List<ERepositoryObjectType> getTypes() {
List<ERepositoryObjectType> toReturn = new ArrayList<ERepositoryObjectType>();
toReturn.add(ERepositoryObjectType.PROCESS_ROUTE);
toReturn.add(ERepositoryObjectType.PROCESS_ROUTE_MICROSERVICE);
return toReturn;
}
}

View File

@@ -0,0 +1,268 @@
package org.talend.designer.maven.migration.tasks;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.exception.PersistenceException;
import org.talend.core.model.components.filters.IComponentFilter;
import org.talend.core.model.components.filters.NameComponentFilter;
import org.talend.core.model.general.Project;
import org.talend.core.model.properties.Item;
import org.talend.core.model.properties.ProcessItem;
import org.talend.designer.core.model.utils.emf.talendfile.NodeType;
import org.talend.designer.core.model.utils.emf.talendfile.ProcessType;
import org.talend.designer.maven.migration.common.MigrationReportRecorder;
import org.talend.designer.runprocess.ItemCacheManager;
import org.talend.repository.ProjectManager;
/*
* If Job does not contain any of the following components: "tRouteInput", "tRESTClient", "tESBConsumer"
* then BUILD_TYPE must be STANDALONE
* Else (job contains one of "tRouteInput", "tRESTClient" or "tESBConsumer")
* If no BUILD_TYPE is set then default BUILD_TYPE must be STANDALONE
* Manage child jobs for jobs ( parent, target BUILD_TYPE = STANDALONE )
* If BUILD_TYPE is STANDALONE
* Manage child jobs for jobs ( parent, target BUILD_TYPE = STANDALONE )
* If BUILD_TYPE is ROUTE > EXCEPTION: need warning message! BUILD_TYPE was wrongly set to ROUTE from a previous migration task and has to be manually updated (all subjobs have to be checked manually). Value should be either STANDALONE (in most cases) or OSGI.
*/
public class CorrectBuildTypeForDIJobMigrationTask extends AbstractDataServiceJobMigrationTask {
private static final String[] ESB_COMPONENTS = { "tRouteInput", "tRESTClient", "tESBConsumer" };
private static final String T_RUB_JOB_COMPONENT = "tRunJob";
boolean failure = false;
/*
* (non-Javadoc)
*
* @see org.talend.migration.IMigrationTask#getOrder()
*/
@Override
public Date getOrder() {
GregorianCalendar gc = new GregorianCalendar(2021, 7, 25, 12, 0, 0);
return gc.getTime();
}
@SuppressWarnings("unchecked")
@Override
public ExecutionResult execute(Item item) {
final ProcessType processType = getProcessType(item);
String jobName = item.getProperty().getLabel();
/*
* Migrating remaining jobs only (which was not migrated previously)
*/
if (isJobMigrated(jobName)) {
return ExecutionResult.NOTHING_TO_DO;
}
Object originalBuildType = item.getProperty().getAdditionalProperties().get(BUILD_TYPE_PROPERTY);
/*
* If BUILD_TYPE is ROUTE > EXCEPTION: need warning message! BUILD_TYPE was
* wrongly set to ROUTE from a previous migration task and has to be manually
* updated (all subjobs have to be checked manually). Value should be either
* STANDALONE (in most cases) or OSGI.
*/
if (originalBuildType != null && BUILD_TYPE_ROUTE.equalsIgnoreCase((String) originalBuildType)) {
ExceptionHandler.process(new RuntimeException("Job [" + jobName + "] has incorrect BUILD_TYPE ["
+ BUILD_TYPE_ROUTE
+ "] which has to be manually updated (all subjobs have to be checked manually). Value should be either STANDALONE (in most cases) or OSGI"));
return ExecutionResult.FAILURE;
}
for (String name : ESB_COMPONENTS) {
boolean modified = false;
IComponentFilter filter = new NameComponentFilter(name);
List<NodeType> c = searchComponent(processType, filter);
if (!c.isEmpty()) {
/*
* job contains one of "tRouteInput", "tRESTClient" or "tESBConsumer") If no
* BUILD_TYPE is set then default BUILD_TYPE must be STANDALONE
*/
if (null == originalBuildType) {
item.getProperty().getAdditionalProperties().put(BUILD_TYPE_PROPERTY, BUILD_TYPE_STANDALONE);
try {
save(item);
modified |= true;
generateReportRecord(
new MigrationReportRecorder(this, MigrationReportRecorder.MigrationOperationType.MODIFY,
item, null, "Build Type", null, BUILD_TYPE_STANDALONE));
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
/*
* Manage child jobs for jobs ( parent, target BUILD_TYPE = STANDALONE )
*/
String currentParentJobBuildType = (String) item.getProperty().getAdditionalProperties()
.get(BUILD_TYPE_PROPERTY);
if (BUILD_TYPE_STANDALONE.equalsIgnoreCase(currentParentJobBuildType)) {
updateBuildTypeForSubJobs(item, currentParentJobBuildType);
}
if (failure) {
return ExecutionResult.FAILURE;
}
if (modified) {
return ExecutionResult.SUCCESS_NO_ALERT;
}
return ExecutionResult.NOTHING_TO_DO;
}
}
/*
* If Job does not contain any of the following components: "tRouteInput",
* "tRESTClient", "tESBConsumer" then BUILD_TYPE must be STANDALONE
* Manage child jobs for jobs ( parent, target BUILD_TYPE = STANDALONE )
*/
if (null == originalBuildType || !BUILD_TYPE_STANDALONE.equalsIgnoreCase(originalBuildType.toString())) {
item.getProperty().getAdditionalProperties().put(BUILD_TYPE_PROPERTY, BUILD_TYPE_STANDALONE);
boolean modified = false;
try {
save(item);
modified |= true;
generateReportRecord(new MigrationReportRecorder(this,
MigrationReportRecorder.MigrationOperationType.MODIFY, item, null, "Build Type",
(null == originalBuildType) ? null : originalBuildType.toString(), BUILD_TYPE_STANDALONE));
updateBuildTypeForSubJobs(item, BUILD_TYPE_STANDALONE);
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
if (failure) {
return ExecutionResult.FAILURE;
}
if (modified) {
return ExecutionResult.SUCCESS_NO_ALERT;
}
}
/*
* If Job does not contain any of the following components: "tRouteInput",
* "tRESTClient", "tESBConsumer" and BUILD_TYPE is STANDALONE
* Manage child jobs for jobs ( parent, target BUILD_TYPE = STANDALONE )
*/
if (BUILD_TYPE_STANDALONE.equalsIgnoreCase(originalBuildType.toString())) {
updateBuildTypeForSubJobs(item, BUILD_TYPE_STANDALONE);
if (failure) {
return ExecutionResult.FAILURE;
}
}
return ExecutionResult.NOTHING_TO_DO;
}
@Override
public String getDescription() {
return "Synchronize build types for DI jobs";
}
@Override
public void clear() {
clearMigratedJobs();
}
@SuppressWarnings("unchecked")
private void updateBuildTypeForSubJobs(Item parentJobItem, String parentJobBuiltType) {
IComponentFilter filter = new NameComponentFilter(T_RUB_JOB_COMPONENT);
ProcessType processType = getProcessType(parentJobItem);
List<NodeType> c = searchComponent(processType, filter);
if (!c.isEmpty()) {
for (NodeType tRunJobComponent : c) {
String processID = findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_PROCESS",
tRunJobComponent) == null ? null
: findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_PROCESS", tRunJobComponent)
.getValue();
if (processID == null) {
processID = findElementParameterByName("PROCESS:PROCESS_TYPE_PROCESS",
tRunJobComponent) == null ? null
: findElementParameterByName("PROCESS:PROCESS_TYPE_PROCESS", tRunJobComponent)
.getValue();
}
String processVersion = findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_VERSION",
tRunJobComponent) == null ? null
: findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_VERSION", tRunJobComponent)
.getValue();
if (processVersion == null) {
processVersion = findElementParameterByName("PROCESS:PROCESS_TYPE_VERSION",
tRunJobComponent) == null ? null
: findElementParameterByName("PROCESS:PROCESS_TYPE_VERSION", tRunJobComponent)
.getValue();
}
if (processID != null && processVersion != null) {
ProcessItem childItem = ItemCacheManager.getProcessItem(processID, processVersion);
Project childItemProject = ProjectManager.getInstance().getCurrentProject();
if (childItem == null) {
for (Project refProject : ProjectManager.getInstance().getAllReferencedProjects()) {
childItem = ItemCacheManager.getRefProcessItem(getProject(), processID);
if (childItem != null) {
childItemProject = refProject;
break;
}
}
}
if (childItem != null) {
Object currentChildBuildType = childItem.getProperty().getAdditionalProperties()
.get(BUILD_TYPE_PROPERTY);
// String jobID = childItem.getProperty().getLabel();
String currentChildBuildTypeStr = (null == currentChildBuildType) ? null
: (String) currentChildBuildType;
if (BUILD_TYPE_STANDALONE.equalsIgnoreCase(parentJobBuiltType)
&& !BUILD_TYPE_STANDALONE.equalsIgnoreCase(currentChildBuildTypeStr)) {
childItem.getProperty().getAdditionalProperties().put(BUILD_TYPE_PROPERTY,
BUILD_TYPE_STANDALONE);
try {
save(childItem);
generateReportRecord(new MigrationReportRecorder(this,
MigrationReportRecorder.MigrationOperationType.MODIFY, childItem, null,
"Build Type", currentChildBuildTypeStr, BUILD_TYPE_STANDALONE));
} catch (PersistenceException e) {
ExceptionHandler.process(e);
failure = true;
}
}
updateBuildTypeForSubJobs(childItem, parentJobBuiltType);
}
}
}
}
}
}

View File

@@ -0,0 +1,107 @@
package org.talend.designer.maven.migration.tasks;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.exception.PersistenceException;
import org.talend.core.model.components.filters.IComponentFilter;
import org.talend.core.model.components.filters.NameComponentFilter;
import org.talend.core.model.properties.Item;
import org.talend.designer.core.model.utils.emf.talendfile.NodeType;
import org.talend.designer.core.model.utils.emf.talendfile.ProcessType;
import org.talend.designer.maven.migration.common.MigrationReportRecorder;
/*
* (non-Javadoc)
* Data service REST (process) = Job with "tRESTRequest"
* If no BUILD_TYPE is set then default BUILD_TYPE must be OSGI
* If Job does not contain any of the following components: "tRouteInput", "tRESTClient", "tESBConsumer" then BUILD_TYPE must be STANDALONE
*/
public class CorrectBuildTypeForDsRestMigrationTask extends AbstractDataServiceJobMigrationTask {
private static final String T_REST_REQUEST = "tRESTRequest";
/*
* (non-Javadoc)
*
* @see org.talend.migration.IMigrationTask#getOrder()
*/
@Override
public Date getOrder() {
GregorianCalendar gc = new GregorianCalendar(2021, 7, 25, 12, 0, 0);
return gc.getTime();
}
/*
* (non-Javadoc)
*
* @see
* org.talend.core.model.migration.AbstractDataserviceMigrationTask#execute(org
* .talend.core.model.properties.Item)
*/
@SuppressWarnings("unchecked")
@Override
public ExecutionResult execute(Item item) {
final ProcessType processType = getProcessType(item);
boolean modified = false;
/*
* If no BUILD_TYPE is set then default BUILD_TYPE must be OSGI
*/
IComponentFilter filter = new NameComponentFilter(T_REST_REQUEST);
List<NodeType> c = searchComponent(processType, filter);
if (!c.isEmpty()) {
Object buildType = item.getProperty().getAdditionalProperties().get(BUILD_TYPE_PROPERTY);
if (null == buildType) {
item.getProperty().getAdditionalProperties().put(BUILD_TYPE_PROPERTY, BUILD_TYPE_OSGI);
try {
save(item);
modified |= true;
generateReportRecord(
new MigrationReportRecorder(this, MigrationReportRecorder.MigrationOperationType.MODIFY,
item, null, "Build Type", null, BUILD_TYPE_OSGI));
storeMigratedJob(item.getProperty().getLabel(), this.getClass().getName());
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
return ExecutionResult.SUCCESS_NO_ALERT;
} else if (BUILD_TYPE_OSGI.equalsIgnoreCase((String)buildType)){
// current job has correct build type
// skip this job during next migrations
skipMigrationForJob(item.getProperty().getLabel(), this.getClass().getName());
} else if (REST_MS.equalsIgnoreCase((String)buildType)){
// current job has correct build type
// skip this job during next migrations
skipMigrationForJob(item.getProperty().getLabel(), this.getClass().getName());
}
}
if (modified) {
return ExecutionResult.SUCCESS_NO_ALERT;
} else {
return ExecutionResult.NOTHING_TO_DO;
}
}
@Override
public String getDescription() {
return "Synchronize build types for DS Rest jobs";
}
@Override
public void clear () {
clearMigratedJobs();
}
}

View File

@@ -0,0 +1,255 @@
package org.talend.designer.maven.migration.tasks;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.exception.PersistenceException;
import org.talend.core.model.components.filters.IComponentFilter;
import org.talend.core.model.components.filters.NameComponentFilter;
import org.talend.core.model.general.Project;
import org.talend.core.model.properties.Item;
import org.talend.core.model.properties.ProcessItem;
import org.talend.core.repository.model.ProxyRepositoryFactory;
import org.talend.core.runtime.process.TalendProcessArgumentConstant;
import org.talend.designer.core.model.utils.emf.talendfile.NodeType;
import org.talend.designer.core.model.utils.emf.talendfile.ProcessType;
import org.talend.designer.maven.migration.common.MigrationReportRecorder;
import org.talend.designer.runprocess.ItemCacheManager;
import org.talend.repository.ProjectManager;
/*
* Routes
* If no BUILD_TYPE is set then Default BUILD_TYPE must be ROUTE
* - Manage child jobs for routes ( parent, target BUILD_TYPE = OSGI )
* Else if BUILD_TYPE is ROUTE
* - Manage child jobs for routes ( parent, target BUILD_TYPE = OSGI )
* Else if BUILD_TYPE is ROUTE_MICROSERVICE
* -Manage child jobs for jobs ( parent, target BUILD_TYPE = STANDALONE )
*/
public class CorrectBuildTypeForRoutesMigrationTask extends AbstractRouteMigrationTask {
private static final String C_TALEND_JOB = "cTalendJob";
protected Map<String, String> migratedChildJobs = new HashMap<String, String>();
/*
* (non-Javadoc)
*
* @see org.talend.migration.IMigrationTask#getOrder()
*/
@Override
public Date getOrder() {
GregorianCalendar gc = new GregorianCalendar(2021, 7, 25, 12, 0, 0);
return gc.getTime();
}
@SuppressWarnings("unchecked")
@Override
public ExecutionResult execute(Item item) {
final ProcessType processType = getProcessType(item);
boolean modified = false;
boolean migrationFailure = false;
/*
* If no BUILD_TYPE is set then default BUILD_TYPE must be ROUTE
*/
Object buildType = item.getProperty().getAdditionalProperties().get(BUILD_TYPE_PROPERTY);
if (null == buildType) {
item.getProperty().getAdditionalProperties().put(BUILD_TYPE_PROPERTY, BUILD_TYPE_ROUTE);
try {
save(item);
modified |= true;
generateReportRecord(
new MigrationReportRecorder(this, MigrationReportRecorder.MigrationOperationType.MODIFY, item,
null, "Build Type", "null", BUILD_TYPE_OSGI));
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
/*
* If no BUILD_TYPE is set then Default BUILD_TYPE must be ROUTE - Manage child
* jobs for routes ( parent, target BUILD_TYPE = OSGI ) Else if BUILD_TYPE is
* ROUTE - Manage child jobs for routes ( parent, target BUILD_TYPE = OSGI )
* Else if BUILD_TYPE is ROUTE_MICROSERVICE -Manage child jobs for jobs (
* parent, target BUILD_TYPE = STANDALONE )
*/
String currentRouteBuildType = (String) item.getProperty().getAdditionalProperties().get(BUILD_TYPE_PROPERTY);
String currentRouteID = item.getProperty().getLabel();
IComponentFilter filter = new NameComponentFilter(C_TALEND_JOB);
List<NodeType> c = searchComponent(processType, filter);
if (!c.isEmpty()) {
for (NodeType cTalendJobComponent : c) {
String processID = findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_PROCESS",
cTalendJobComponent) == null ? null
: findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_PROCESS",
cTalendJobComponent).getValue();
String processVersion = findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_VERSION",
cTalendJobComponent) == null ? null
: findElementParameterByName("SELECTED_JOB_NAME:PROCESS_TYPE_VERSION",
cTalendJobComponent).getValue();
if (processID != null && processVersion != null) {
ProcessItem childItem = ItemCacheManager.getProcessItem(processID, processVersion);
Project childItemProject = ProjectManager.getInstance().getCurrentProject();
if (childItem == null) {
for (Project refProject : ProjectManager.getInstance().getAllReferencedProjects()) {
childItem = ItemCacheManager.getRefProcessItem(getProject(), processID);
if (childItem != null) {
childItemProject = refProject;
break;
}
}
}
if (childItem != null) {
Object currentChildBuildType = childItem.getProperty().getAdditionalProperties()
.get(BUILD_TYPE_PROPERTY);
String jobID = childItem.getProperty().getLabel();
String currentChildBuildTypeStr = (null == currentChildBuildType) ? null
: (String) currentChildBuildType;
if (BUILD_TYPE_ROUTE.equalsIgnoreCase(currentRouteBuildType)
&& BUILD_TYPE_OSGI.equalsIgnoreCase(currentChildBuildTypeStr)) {
storeMigratedModel(jobID, currentRouteID);
}
if (BUILD_TYPE_ROUTE.equalsIgnoreCase(currentRouteBuildType)
&& !BUILD_TYPE_OSGI.equalsIgnoreCase(currentChildBuildTypeStr)) {
try {
if (isModelMigrated(jobID)) {
ExceptionHandler.process(new RuntimeException(
"Child Job is called by 2 or more different Routes which have different build types. Build type for child Job ["
+ jobID + "] was previously updated to [" + currentChildBuildTypeStr
+ "] to be compatible with parent Route ["
+ getStoredMigratedModelParentRoute(jobID)
+ "] which is not compatible with current parent Route ["
+ currentRouteID + "] with build type [" + currentRouteBuildType
+ "]. "));
migrationFailure = true;
continue;
} else {
childItem.getProperty().getAdditionalProperties()
.put(TalendProcessArgumentConstant.ARG_BUILD_TYPE, BUILD_TYPE_OSGI);
generateReportRecord(new MigrationReportRecorder(this,
MigrationReportRecorder.MigrationOperationType.MODIFY, childItem, null,
"Build Type", currentChildBuildTypeStr, BUILD_TYPE_OSGI));
ProxyRepositoryFactory.getInstance().save(childItemProject, childItem, true);
}
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
if (BUILD_TYPE_ROUTE_MICROSERVICE.equalsIgnoreCase(currentRouteBuildType)
&& !BUILD_TYPE_STANDALONE.equalsIgnoreCase(currentChildBuildTypeStr)) {
try {
if (isModelMigrated(jobID)) {
ExceptionHandler.process(new RuntimeException(
"Child Job is called by 2 or more different Routes which have different build types. Build type for child Job ["
+ jobID + "] was previously updated to [" + currentChildBuildTypeStr
+ "] to be compatible with parent Route ["
+ getStoredMigratedModelParentRoute(jobID)
+ "] which is not compatible with current parent Route ["
+ currentRouteID + "] with build type [" + currentRouteBuildType
+ "]. "));
migrationFailure = true;
continue;
} else {
childItem.getProperty().getAdditionalProperties()
.put(TalendProcessArgumentConstant.ARG_BUILD_TYPE, BUILD_TYPE_STANDALONE);
generateReportRecord(new MigrationReportRecorder(this,
MigrationReportRecorder.MigrationOperationType.MODIFY, childItem, null,
"Build Type", currentChildBuildTypeStr, BUILD_TYPE_STANDALONE));
ProxyRepositoryFactory.getInstance().save(childItemProject, childItem, true);
}
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
if(!isModelMigrated(jobID)) {
storeMigratedModel(jobID, currentRouteID);
}
}
}
}
}
if (migrationFailure) {
return ExecutionResult.FAILURE;
}
if (modified) {
return ExecutionResult.SUCCESS_NO_ALERT;
}
return ExecutionResult.NOTHING_TO_DO;
}
protected void clearMigratedChildJobs() {
migratedChildJobs.clear();
}
protected void storeMigratedModel(String jobName, String parentRouteName) {
migratedChildJobs.put(jobName, parentRouteName);
}
protected boolean isModelMigrated(String jobName) {
return migratedChildJobs.containsKey(jobName);
}
protected String getStoredMigratedModelParentRoute(String jobName) {
return migratedChildJobs.get(jobName);
}
@Override
public String getDescription() {
return "Synchronize build types for Routes (incuding child jobs)";
}
@Override
public void clear() {
clearMigratedChildJobs();
clearMigratedJobs();
}
}

View File

@@ -0,0 +1,103 @@
package org.talend.designer.maven.migration.tasks;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.exception.PersistenceException;
import org.talend.core.model.components.filters.IComponentFilter;
import org.talend.core.model.components.filters.NameComponentFilter;
import org.talend.core.model.properties.Item;
import org.talend.designer.core.model.utils.emf.talendfile.NodeType;
import org.talend.designer.core.model.utils.emf.talendfile.ProcessType;
import org.talend.designer.maven.migration.common.MigrationReportRecorder;
/*
* Data service SOAP = Job with "tESBProviderRequest"
* Set BUILD_TYPE as OSGI
* Manage child jobs for jobs ( parent job, target BUILD_TYPE = OSGI )
*/
public class CorrectBuildTypeForSOAPServiceJobMigrationTask extends AbstractDataServiceJobMigrationTask {
private static final String T_ESB_PROVIDER_REQUEST = "tESBProviderRequest";
private static final String BUILD_TYPE_PROPERTY = "BUILD_TYPE";
private static final String BUILD_TYPE_OSGI = "OSGI";
/*
* (non-Javadoc)
*
* @see org.talend.migration.IMigrationTask#getOrder()
*/
@Override
public Date getOrder() {
GregorianCalendar gc = new GregorianCalendar(2021, 7, 25, 12, 0, 0);
return gc.getTime();
}
/*
* (non-Javadoc)
*
* @see
* org.talend.core.model.migration.AbstractDataserviceMigrationTask#execute(org
* .talend.core.model.properties.Item)
*/
@SuppressWarnings("unchecked")
@Override
public ExecutionResult execute(Item item) {
final ProcessType processType = getProcessType(item);
boolean modified = false;
/*
* If no BUILD_TYPE is set then default BUILD_TYPE must be OSGI
*/
IComponentFilter filter = new NameComponentFilter(T_ESB_PROVIDER_REQUEST);
List<NodeType> c = searchComponent(processType, filter);
if (!c.isEmpty()) {
Object originalBuildType = item.getProperty().getAdditionalProperties().get(BUILD_TYPE_PROPERTY);
if (null == originalBuildType || !BUILD_TYPE_OSGI.equalsIgnoreCase(originalBuildType.toString())) {
item.getProperty().getAdditionalProperties().put(BUILD_TYPE_PROPERTY, BUILD_TYPE_OSGI);
try {
save(item);
modified |= true;
generateReportRecord(new MigrationReportRecorder(this,
MigrationReportRecorder.MigrationOperationType.MODIFY, item, null, "Build Type",
(null == originalBuildType) ? null : originalBuildType.toString(), BUILD_TYPE_OSGI));
storeMigratedJob(item.getProperty().getLabel(), this.getClass().getName());
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
return ExecutionResult.SUCCESS_NO_ALERT;
} else if (BUILD_TYPE_OSGI.equalsIgnoreCase((String)originalBuildType)){
// current job has correct build type
// skip this job during next migrations
skipMigrationForJob(item.getProperty().getLabel(), this.getClass().getName());
}
}
if (modified) {
return ExecutionResult.SUCCESS_NO_ALERT;
}
return ExecutionResult.NOTHING_TO_DO;
}
@Override
public String getDescription() {
return "Synchronize build types for SOAP service Jobs";
}
@Override
public void clear () {
clearMigratedJobs();
}
}

View File

@@ -0,0 +1,7 @@
package org.talend.designer.maven.migration.tasks;
import org.talend.migration.IProjectMigrationTask;
public interface ICorrectBuildTypeMigrationTask extends IProjectMigrationTask {
public void clear ();
}

View File

@@ -0,0 +1,118 @@
package org.talend.designer.maven.tools;
import java.lang.reflect.InvocationTargetException;
import org.eclipse.core.resources.IWorkspace;
import org.eclipse.core.resources.IWorkspaceRunnable;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.SubMonitor;
import org.eclipse.core.runtime.jobs.ISchedulingRule;
import org.eclipse.jface.dialogs.ProgressMonitorDialog;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.preference.FieldEditorPreferencePage;
import org.eclipse.swt.widgets.Display;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.core.model.general.Project;
import org.talend.core.repository.model.ProxyRepositoryFactory;
import org.talend.designer.maven.migration.common.MigrationReportHelper;
import org.talend.designer.maven.migration.tasks.CorrectBuildTypeForDIJobMigrationTask;
import org.talend.designer.maven.migration.tasks.CorrectBuildTypeForDsRestMigrationTask;
import org.talend.designer.maven.migration.tasks.CorrectBuildTypeForRoutesMigrationTask;
import org.talend.designer.maven.migration.tasks.CorrectBuildTypeForSOAPServiceJobMigrationTask;
import org.talend.designer.maven.migration.tasks.ICorrectBuildTypeMigrationTask;
import org.talend.migration.IMigrationTask;
import org.talend.migration.IProjectMigrationTask;
import org.talend.repository.ProjectManager;
import org.talend.repository.RepositoryWorkUnit;
public class BuildTypeManager {
private ICorrectBuildTypeMigrationTask[] syncBuildTypeMigrationTasks = {
new CorrectBuildTypeForRoutesMigrationTask(), new CorrectBuildTypeForSOAPServiceJobMigrationTask(),
new CorrectBuildTypeForDsRestMigrationTask(), new CorrectBuildTypeForDIJobMigrationTask() };
private boolean hasErrors = false;
public void syncBuildTypes(FieldEditorPreferencePage page) throws Exception {
IRunnableWithProgress runnableWithProgress = new IRunnableWithProgress() {
@Override
public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
RepositoryWorkUnit<Object> workUnit = new RepositoryWorkUnit<Object>("Synchronize all build types") { //$NON-NLS-1$
@Override
protected void run() {
final IWorkspaceRunnable op = new IWorkspaceRunnable() {
@Override
public void run(final IProgressMonitor monitor) throws CoreException {
try {
syncAllBuildTypesWithProgress(monitor, page);
} catch (Exception e) {
ExceptionHandler.process(e);
}
}
};
IWorkspace workspace = ResourcesPlugin.getWorkspace();
try {
ISchedulingRule schedulingRule = workspace.getRoot();
workspace.run(op, schedulingRule, IWorkspace.AVOID_UPDATE, monitor);
} catch (CoreException e) {
ExceptionHandler.process(e);
}
}
};
workUnit.setAvoidUnloadResources(true);
ProxyRepositoryFactory.getInstance().executeRepositoryWorkUnit(workUnit);
}
};
hasErrors = false;
new ProgressMonitorDialog(Display.getDefault().getActiveShell()).run(true, true, runnableWithProgress);
if (hasErrors) {
page.setErrorMessage("Build types synchronization finished with errors. Check workspace logs for details.");
} else {
page.setErrorMessage(null);
}
}
public void syncAllBuildTypesWithProgress(IProgressMonitor monitor, FieldEditorPreferencePage page)
throws Exception {
Project project = ProjectManager.getInstance().getCurrentProject();
SubMonitor subMonitor = SubMonitor.convert(monitor, syncBuildTypeMigrationTasks.length);
for (ICorrectBuildTypeMigrationTask task : syncBuildTypeMigrationTasks) {
task.clear();
}
for (ICorrectBuildTypeMigrationTask task : syncBuildTypeMigrationTasks) {
subMonitor.beginTask(task.getDescription(), syncBuildTypeMigrationTasks.length);
IMigrationTask.ExecutionResult result = task.execute(project);
if (IMigrationTask.ExecutionResult.FAILURE.equals(result)) {
hasErrors = true;
}
subMonitor.worked(1);
}
subMonitor.beginTask("Generate migration report", syncBuildTypeMigrationTasks.length);
MigrationReportHelper.getInstance().generateMigrationReport(project.getTechnicalLabel());
monitor.done();
}
}

View File

@@ -1,5 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry exported="true" kind="lib" path="lib/commons-text-1.10.0.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-pool2-2.4.2.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-validator-1.5.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-math3-3.3.jar"/>
@@ -8,6 +9,5 @@
<classpathentry exported="true" kind="lib" path="lib/commons-digester-2.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-cli-2.0-SNAPSHOT.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-codec-1.15.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-text-1.1.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@@ -10,7 +10,7 @@ Bundle-ClassPath: .,
lib/commons-math3-3.3.jar,
lib/commons-validator-1.5.1.jar,
lib/commons-pool2-2.4.2.jar,
lib/commons-text-1.1.jar
lib/commons-text-1.10.0.jar
Export-Package: org.apache.commons.cli2,
org.apache.commons.cli2.builder,
org.apache.commons.cli2.commandline,

View File

@@ -7,4 +7,4 @@ bin.includes = META-INF/,\
lib/commons-math3-3.3.jar,\
lib/commons-validator-1.5.1.jar,\
lib/commons-pool2-2.4.2.jar,\
lib/commons-text-1.1.jar
lib/commons-text-1.10.0.jar

View File

@@ -62,6 +62,11 @@
<artifactId>commons-collections</artifactId>
<version>3.2.2</version>
</artifactItem>
<artifactItem>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>1.10.0</version>
</artifactItem>
<artifactItem>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>

View File

@@ -35,7 +35,7 @@
<!-- same as xercesImpl.jar-->
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
<version>2.12.0</version>
<version>2.12.2</version>
</artifactItem>
<artifactItem>
<groupId>org.apache.ws.xmlschema</groupId>

View File

@@ -6,9 +6,42 @@ COPYRIGHTS AND LICENSES
ORIGINAL LICENSE (a.k.a. "hypersonic_lic.txt")
For content, code, and products originally developed by Thomas Mueller and the Hypersonic SQL Group:
For work developed by the HSQL Development Group:
Copyright (c) 1995-2000 by the Hypersonic SQL Group.
Copyright (c) 2001-2022, The HSQL Development Group
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
Neither the name of the HSQL Development Group nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
For work originally developed by the Hypersonic SQL Group:
Copyright (c) 1995-2000, The Hypersonic SQL Group.
All rights reserved.
Redistribution and use in source and binary forms, with or without
@@ -37,12 +70,12 @@ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
This software consists of voluntary contributions made by many individuals on behalf of the
Hypersonic SQL Group.
This software consists of voluntary contributions made by many individuals
on behalf of the Hypersonic SQL Group.
For work added by the HSQL Development Group (a.k.a. hsqldb_lic.txt):
Copyright (c) 2001-2005, The HSQL Development Group
Copyright (c) 2001-2022, The HSQL Development Group
All rights reserved.
Redistribution and use in source and binary forms, with or without

29
main/plugins/org.talend.libraries.jdbc.hsql/pom.xml Normal file → Executable file
View File

@@ -9,4 +9,33 @@
</parent>
<artifactId>org.talend.libraries.jdbc.hsql</artifactId>
<packaging>eclipse-plugin</packaging>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>copy</id>
<phase>generate-sources</phase>
<goals>
<goal>copy</goal>
</goals>
<configuration>
<artifactItems>
<artifactItem>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<version>2.7.1</version>
<classifier>jdk8</classifier>
<outputDirectory>${project.basedir}/lib</outputDirectory>
<destFileName>hsqldb.jar</destFileName>
</artifactItem>
</artifactItems>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -520,9 +520,13 @@ public class ResumeUtil {
private String lineSeparator = System.getProperty("line.separator");
private int capibility = 2 << 22; //8M
private final int capibility = 2 << 22; //8M
private final int FLUSH_FACTOR = 6 *1024 *1024; //6M
private final int SUBSTRING_SIZE = 2 << 20; //2M
private int FLUSH_FACTOR = 6 *1024 *1024; //6M
public SimpleCsvWriter(FileChannel channel) {
@@ -553,6 +557,16 @@ public class ResumeUtil {
content = replace(content, "" + TextQualifier, "" + TextQualifier + TextQualifier);
}
if (content.length() > SUBSTRING_SIZE) { //2M
int index = 0;
for (; content.length() - index > SUBSTRING_SIZE; index += SUBSTRING_SIZE) {
flush(true);
final String substring = content.substring(index, index + SUBSTRING_SIZE);
buf.put(substring.getBytes());
}
content = content.substring(index);
}
byte[] contentByte = content.getBytes();
if(contentByte.length > capibility - buf.position()) {
flush(true);

View File

@@ -893,6 +893,7 @@ public final class DBConnectionContextUtils {
managerConnection.setValue(0, dbType, urlConnection, server, username, password, sidOrDatabase, port, filePath,
datasource, schemaOracle, additionParam, driverClassName, driverJarPath, dbVersionString);
managerConnection.setDbRootPath(dbRootPath);
managerConnection.setSupportNLS(dbConn.isSupportNLS());
return urlConnection;
}
@@ -1058,6 +1059,12 @@ public final class DBConnectionContextUtils {
cloneConn.setSQLMode(true);
}
if(dbConn.isSetSupportNLS()) {
cloneConn.setSupportNLS(dbConn.isSupportNLS());
} else {
cloneConn.setSupportNLS(false);
}
// cloneConn.setProperties(dbConn.getProperties());
// cloneConn.setCdcConns(dbConn.getCdcConns());
// cloneConn.setQueries(dbConn.getQueries());

View File

@@ -166,7 +166,8 @@ public class ExtendedNodeConnectionContextUtils {
KnoxUrl,
KnoxUsername,
KnoxPassword,
KnoxDirectory
KnoxDirectory,
KnoxTimeout
}
static List<IContextParameter> getContextVariables(final String prefixName, Connection conn, Set<IConnParamName> paramSet) {

View File

@@ -132,6 +132,22 @@
required="true"
uripath="platform:/plugin/org.talend.libraries.apache.common/lib/commons-lang-2.4.jar">
</libraryNeeded>
<libraryNeeded
context="plugin:org.talend.libraries.jdbc.oracle"
language="java"
message="Needed for Oracle jdbc plugin National Language Support (NLS)."
mvn_uri="mvn:com.oracle.database.nls/orai18n/19.3.0.0/jar"
name="orai18n-19.3.0.0.jar"
required="true">
</libraryNeeded>
<libraryNeeded
context="plugin:org.talend.metadata.managment"
language="java"
message="Needed for plugin org.talend.metadata.managment"
name="hsqldb.jar" mvn_uri="mvn:org.hsqldb/hsqldb/2.7.1"
required="true"
uripath="platform:/plugin/org.talend.libraries.jdbc.hsql/lib/hsqldb.jar">
</libraryNeeded>
</extension>
<extension
point="org.talend.core.migrationTask">

View File

@@ -291,6 +291,7 @@ public class ExtractMetaDataFromDataBase {
* DOC cantoine. Method to test DataBaseConnection.
*
* @param dbVersionString
* @param supportNLS
*
* @param String driverClass
* @param String urlString pwd
@@ -299,14 +300,14 @@ public class ExtractMetaDataFromDataBase {
* @return ConnectionStatus : the result of connection(boolean Result, String messageException)
*/
public static ConnectionStatus testConnection(String dbType, String url, String username, String pwd, String schema,
final String driverClassName, final String driverJarPath, String dbVersionString, String additionalParam) {
final String driverClassName, final String driverJarPath, String dbVersionString, String additionalParam, boolean supportNLS) {
return testConnection(dbType, url, username, pwd, schema, driverClassName, driverJarPath, dbVersionString,
additionalParam, null, null);
additionalParam, supportNLS, null, null);
}
public static ConnectionStatus testConnection(String dbType, String url, String username, String pwd, String schema,
final String driverClassName, final String driverJarPath, String dbVersionString, String additionalParam,
StringBuffer retProposedSchema, String sidOrDatabase) {
boolean supportNLS, StringBuffer retProposedSchema, String sidOrDatabase) {
Connection connection = null;
ConnectionStatus connectionStatus = new ConnectionStatus();
connectionStatus.setResult(false);
@@ -315,7 +316,7 @@ public class ExtractMetaDataFromDataBase {
List list = new ArrayList();
list = ExtractMetaDataUtils.getInstance().connect(dbType, url, username, pwd, driverClassName, driverJarPath,
dbVersionString, additionalParam);
dbVersionString, additionalParam, supportNLS);
if (list != null && list.size() > 0) {
for (int i = 0; i < list.size(); i++) {
if (list.get(i) instanceof Connection) {
@@ -498,7 +499,7 @@ public class ExtractMetaDataFromDataBase {
List list = metaData.getConnection(iMetadataConnection.getDbType(), url, iMetadataConnection.getUsername(),
iMetadataConnection.getPassword(), iMetadataConnection.getDatabase(), iMetadataConnection.getSchema(),
iMetadataConnection.getDriverClass(), iMetadataConnection.getDriverJarPath(),
iMetadataConnection.getDbVersionString(), iMetadataConnection.getAdditionalParams());
iMetadataConnection.getDbVersionString(), iMetadataConnection.getAdditionalParams(), iMetadataConnection.isSupportNLS());
Connection conn = null;
DriverShim wapperDriver = null;
@@ -582,7 +583,7 @@ public class ExtractMetaDataFromDataBase {
List list = extractMeta.getConnection(iMetadataConnection.getDbType(), iMetadataConnection.getUrl(),
iMetadataConnection.getUsername(), iMetadataConnection.getPassword(), iMetadataConnection.getDatabase(),
iMetadataConnection.getSchema(), iMetadataConnection.getDriverClass(), iMetadataConnection.getDriverJarPath(),
iMetadataConnection.getDbVersionString(), iMetadataConnection.getAdditionalParams());
iMetadataConnection.getDbVersionString(), iMetadataConnection.getAdditionalParams(), iMetadataConnection.isSupportNLS());
DriverShim wapperDriver = null;
if (list != null && list.size() > 0) {
for (int i = 0; i < list.size(); i++) {

View File

@@ -121,6 +121,8 @@ public class ExtractMetaDataUtils {
private String[] ORACLE_SSL_JARS = new String[] { "oraclepki-12.2.0.1.jar", "osdt_cert-12.2.0.1.jar", //$NON-NLS-1$//$NON-NLS-2$
"osdt_core-12.2.0.1.jar" }; //$NON-NLS-1$
private String ORACLE_NLS_JARS = "orai18n-19.3.0.0.jar";
public static final String SNOWFLAKE = "Snowflake"; //$NON-NLS-1$
@@ -829,6 +831,11 @@ public class ExtractMetaDataUtils {
*/
public List getConnection(String dbType, String url, String username, String pwd, String dataBase, String schemaBase,
final String driverClassName, final String driverJarPath, String dbVersion, String additionalParams) {
return getConnection(dbType, url, username, pwd, dataBase, schemaBase, driverClassName, driverJarPath, dbVersion, additionalParams, false);
}
public List getConnection(String dbType, String url, String username, String pwd, String dataBase, String schemaBase,
final String driverClassName, final String driverJarPath, String dbVersion, String additionalParams, boolean supportNLS) {
boolean isColsed = false;
List conList = new ArrayList();
try {
@@ -846,7 +853,7 @@ public class ExtractMetaDataUtils {
closeConnection(true); // colse before connection.
checkDBConnectionTimeout();
list = connect(dbType, url, username, pwd, driverClassName, driverJarPath, dbVersion, additionalParams);
list = connect(dbType, url, username, pwd, driverClassName, driverJarPath, dbVersion, additionalParams, supportNLS);
if (list != null && list.size() > 0) {
for (int i = 0; i < list.size(); i++) {
if (list.get(i) instanceof Connection) {
@@ -938,7 +945,7 @@ public class ExtractMetaDataUtils {
* @throws Exception
*/
public List connect(String dbType, String url, String username, String pwd, final String driverClassNameArg,
final String driverJarPathArg, String dbVersion, String additionalParams) throws Exception {
final String driverJarPathArg, String dbVersion, String additionalParams, boolean supportNLS) throws Exception {
Connection connection = null;
DriverShim wapperDriver = null;
List conList = new ArrayList();
@@ -953,11 +960,18 @@ public class ExtractMetaDataUtils {
if ((driverJarPathArg == null || driverJarPathArg.equals(""))) { //$NON-NLS-1$
List<String> driverNames = EDatabaseVersion4Drivers.getDrivers(dbType, dbVersion);
if (driverNames != null) {
if(EDatabaseTypeName.ORACLEFORSID.getProduct().equals(EDatabaseTypeName.getTypeFromDbType(dbType).getProduct())) {
if(supportNLS){
driverNames.add(ORACLE_NLS_JARS);
}
}
if (EDatabaseTypeName.ORACLE_CUSTOM.getDisplayName().equals(dbType)
&& StringUtils.isNotEmpty(additionalParams)) {
if (additionalParams.contains(SSLPreferenceConstants.TRUSTSTORE_TYPE)) {
driverNames.addAll(Arrays.asList(ORACLE_SSL_JARS));
}
} else if (SNOWFLAKE.equals(dbType)) { // $NON-NLS-1$
// TDQ-17294 msjian Support of Snowflake for DQ Datamart
driverNames.add(SNOWFLAKE_DRIVER_JAR);
@@ -1285,7 +1299,7 @@ public class ExtractMetaDataUtils {
List list = getConnection(metadataConnection.getDbType(), metadataConnection.getUrl(), metadataConnection.getUsername(),
metadataConnection.getPassword(), metadataConnection.getDatabase(), metadataConnection.getSchema(),
metadataConnection.getDriverClass(), metadataConnection.getDriverJarPath(),
metadataConnection.getDbVersionString(), metadataConnection.getAdditionalParams());
metadataConnection.getDbVersionString(), metadataConnection.getAdditionalParams(), metadataConnection.isSupportNLS());
return list;
}

View File

@@ -17,6 +17,7 @@ import java.nio.charset.Charset;
import java.security.Provider;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -166,6 +167,11 @@ public class JDBCDriverLoader {
info.put("charSet", systemCharset.displayName()); //$NON-NLS-1$
}
}
//TUP-37016:Upgrade hsqldb to 2.7.1
if (dbType.equals(EDatabaseTypeName.ACCESS.getXmlName()) || ConnectionUtils.isHsql(url)) {
System.setProperty("hsqldb.method_class_names", "net.ucanaccess.converters.*");
}
if (additionalParams != null && !"".equals(additionalParams) && dbType.toUpperCase().contains("ORACLE")) {//$NON-NLS-1$//$NON-NLS-2$
if (additionalParams.contains(SSLPreferenceConstants.TRUSTSTORE_TYPE)) {
@@ -198,10 +204,20 @@ public class JDBCDriverLoader {
}
connection = wapperDriver.connect(url, info);
}
try {
ResultSet schemas = connection.getMetaData().getSchemas();
if(schemas.next()) {
schemas.getString(1);
}
} catch (Exception e) {
}
// }
// DriverManager.deregisterDriver(wapperDriver);
// bug 9162
list.add(connection);
list.add(wapperDriver);
return list;
} catch (Throwable e) {

View File

@@ -470,7 +470,7 @@ public class ExtractManager {
metadataConnection.getUsername(), metadataConnection.getPassword(), metadataConnection.getDatabase(),
metadataConnection.getSchema(), metadataConnection.getDriverClass(),
metadataConnection.getDriverJarPath(), metadataConnection.getDbVersionString(),
metadataConnection.getAdditionalParams());
metadataConnection.getAdditionalParams(), metadataConnection.isSupportNLS());
if (list != null && list.size() > 0) {
for (int i = 0; i < list.size(); i++) {
if (list.get(i) instanceof Driver) {
@@ -574,7 +574,7 @@ public class ExtractManager {
metadataConnection.getUsername(), metadataConnection.getPassword(), metadataConnection.getDatabase(),
metadataConnection.getSchema(), metadataConnection.getDriverClass(),
metadataConnection.getDriverJarPath(), metadataConnection.getDbVersionString(),
metadataConnection.getAdditionalParams());
metadataConnection.getAdditionalParams(), metadataConnection.isSupportNLS());
if (list != null && list.size() > 0) {
for (int i = 0; i < list.size(); i++) {
if (list.get(i) instanceof DriverShim) {
@@ -1064,7 +1064,7 @@ public class ExtractManager {
List connList = extractMeta.getConnection(metadataConnection.getDbType(), metadataConnection.getUrl(),
metadataConnection.getUsername(), metadataConnection.getPassword(), metadataConnection.getDatabase(),
metadataConnection.getSchema(), metadataConnection.getDriverClass(), metadataConnection.getDriverJarPath(),
metadataConnection.getDbVersionString(), metadataConnection.getAdditionalParams());
metadataConnection.getDbVersionString(), metadataConnection.getAdditionalParams(), metadataConnection.isSupportNLS());
try {
if (!tableInfoParameters.isUsedName()) {
if (tableInfoParameters.getSqlFiter() != null && !"".equals(tableInfoParameters.getSqlFiter())) { //$NON-NLS-1$

View File

@@ -40,7 +40,7 @@ public enum EHiveWithTezJars {
"api-asn1-api-1.0.0-M20.jar", "api-util-1.0.0-M20.jar", "asm-3.1.jar", "avro-1.7.4.jar",
"commons-beanutils-1.7.0.jar", "commons-beanutils-core-1.8.0.jar", "commons-compress-1.4.1.jar",
"commons-configuration-1.6.jar", "commons-digester-1.8.jar", "commons-net-3.1.jar", "curator-client-2.6.0.jar",
"curator-framework-2.6.0.jar", "curator-recipes-2.6.0.jar", "gson-2.2.4.jar", "guice-3.0.jar",
"curator-framework-2.6.0.jar", "curator-recipes-2.6.0.jar", "gson-2.9.0.jar", "guice-3.0.jar",
"guice-servlet-3.0.jar", "hadoop-auth-2.6.0.2.2.0.0-2041.jar", "hadoop-common-2.6.0.2.2.0.0-2041.jar",
"hadoop-hdfs-2.6.0.2.2.0.0-2041.jar", "hadoop-yarn-api-2.6.0.2.2.0.0-2041.jar",
"hadoop-yarn-client-2.6.0.2.2.0.0-2041.jar", "hadoop-yarn-common-2.6.0.2.2.0.0-2041.jar", "htrace-core-3.0.4.jar",
@@ -56,7 +56,7 @@ public enum EHiveWithTezJars {
"api-asn1-api-1.0.0-M20.jar", "api-util-1.0.0-M20.jar", "asm-3.2.jar", "avro-1.7.5.jar",
"commons-beanutils-1.7.0.jar", "commons-beanutils-core-1.8.0.jar", "commons-compress-1.4.1.jar",
"commons-configuration-1.6.jar", "commons-digester-1.8.jar", "commons-net-3.1.jar", "curator-client-2.7.1.jar",
"curator-framework-2.7.1.jar", "curator-recipes-2.7.1.jar", "gson-2.2.4.jar", "guice-3.0.jar",
"curator-framework-2.7.1.jar", "curator-recipes-2.7.1.jar", "gson-2.9.0.jar", "guice-3.0.jar",
"guice-servlet-3.0.jar", "hadoop-auth-2.7.1.2.3.2.0-2950.jar", "hadoop-common-2.7.1.2.3.2.0-2950.jar",
"hadoop-hdfs-2.7.1.2.3.2.0-2950.jar", "hadoop-yarn-api-2.7.1.2.3.2.0-2950.jar",
"hadoop-yarn-client-2.7.1.2.3.2.0-2950.jar", "hadoop-yarn-common-2.7.1.2.3.2.0-2950.jar",

View File

@@ -90,6 +90,8 @@ public class ManagerConnection {
Integer id = null;
String additionalParams;
private boolean supportNLS;
private String schemaOracle;
@@ -288,7 +290,7 @@ public class ManagerConnection {
}
// test the connection
testConnection = ExtractMetaDataFromDataBase.testConnection(dbTypeString, urlConnectionString, username, password,
schemaName, driverClassName, driverJarPath, dbVersionString, additionalParams, retProposedSchema,
schemaName, driverClassName, driverJarPath, dbVersionString, additionalParams, supportNLS, retProposedSchema,
sidOrDatabase);
isValide = testConnection.getResult();
messageException = testConnection.getMessageException();
@@ -388,7 +390,7 @@ public class ManagerConnection {
metadataConnection.getUrl(), metadataConnection.getUsername(), metadataConnection.getPassword(),
metadataConnection.getSchema(), metadataConnection.getDriverClass(),
metadataConnection.getDriverJarPath(), metadataConnection.getDbVersionString(),
metadataConnection.getAdditionalParams(), retProposedSchema, metadataConnection.getDatabase());
metadataConnection.getAdditionalParams(), metadataConnection.isSupportNLS(), retProposedSchema, metadataConnection.getDatabase());
}
// qli
// record this metadataConnection as old connection.
@@ -475,4 +477,13 @@ public class ManagerConnection {
this.isValide = isValide;
}
/**
* Sets the supportNLS.
* @param supportNLS the supportNLS to set
*/
public void setSupportNLS(boolean supportNLS) {
this.supportNLS = supportNLS;
}
}

View File

@@ -188,7 +188,7 @@ public class MetadataConnectionUtils {
}
list = ExtractMetaDataUtils.getInstance().connect(metadataBean.getDbType(), metadataBean.getUrl(),
metadataBean.getUsername(), metadataBean.getPassword(), metadataBean.getDriverClass(),
metadataBean.getDriverJarPath(), metadataBean.getDbVersionString(), metadataBean.getAdditionalParams());
metadataBean.getDriverJarPath(), metadataBean.getDbVersionString(), metadataBean.getAdditionalParams(), metadataBean.isSupportNLS());
} catch (Exception e) {
rc.setMessage("fail to connect database!"); //$NON-NLS-1$
CommonExceptionHandler.process(e);
@@ -274,6 +274,7 @@ public class MetadataConnectionUtils {
String dataBase = databaseConnection.getSID();
String dbVersionString = databaseConnection.getDbVersionString();
String additionalParams = databaseConnection.getAdditionalParams();
boolean supportNLS = databaseConnection.isSupportNLS();
// MOD qiongli 2011-9-6,TDQ 3317.handle context mode
if (databaseConnection.isContextMode()) {
@@ -313,6 +314,7 @@ public class MetadataConnectionUtils {
metadataConnection.setUsername(userName);
metadataConnection.setPassword(password);
metadataConnection.setUrl(dbUrl);
metadataConnection.setSupportNLS(supportNLS);
// TDQ-12299: transfer the OtherParameters to metadataConnection, because create impala connection use that
// values
@@ -1312,7 +1314,7 @@ public class MetadataConnectionUtils {
return ExtractMetaDataUtils.getInstance().getConnection(metadataBean.getDbType(), metadataBean.getUrl(),
metadataBean.getUsername(), metadataBean.getPassword(), metadataBean.getDatabase(), metadataBean.getSchema(),
metadataBean.getDriverClass(), metadataBean.getDriverJarPath(), metadataBean.getDbVersionString(),
metadataBean.getAdditionalParams());
metadataBean.getAdditionalParams(), metadataBean.isSupportNLS());
}
/**

View File

@@ -80,6 +80,7 @@ public class DatabaseConnectionItemProvider extends ConnectionItemProvider imple
addCdcTypeModePropertyDescriptor(object);
addSQLModePropertyDescriptor(object);
addUiSchemaPropertyDescriptor(object);
addSupportNLSPropertyDescriptor(object);
}
return itemPropertyDescriptors;
}
@@ -468,6 +469,22 @@ public class DatabaseConnectionItemProvider extends ConnectionItemProvider imple
false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null));
}
/**
* This adds a property descriptor for the Support NLS feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addSupportNLSPropertyDescriptor(Object object) {
itemPropertyDescriptors
.add(createItemPropertyDescriptor(((ComposeableAdapterFactory) adapterFactory).getRootAdapterFactory(),
getResourceLocator(), getString("_UI_DatabaseConnection_supportNLS_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_DatabaseConnection_supportNLS_feature",
"_UI_DatabaseConnection_type"),
ConnectionPackage.Literals.DATABASE_CONNECTION__SUPPORT_NLS, true, false, false,
ItemPropertyDescriptor.BOOLEAN_VALUE_IMAGE, null, null));
}
/**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
@@ -558,6 +575,7 @@ public class DatabaseConnectionItemProvider extends ConnectionItemProvider imple
case ConnectionPackage.DATABASE_CONNECTION__CDC_TYPE_MODE:
case ConnectionPackage.DATABASE_CONNECTION__SQL_MODE:
case ConnectionPackage.DATABASE_CONNECTION__UI_SCHEMA:
case ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true));
return;
case ConnectionPackage.DATABASE_CONNECTION__CDC_CONNS:

View File

@@ -348,6 +348,8 @@
<eStructuralFeatures xsi:type="ecore:EAttribute" name="UiSchema" eType="ecore:EDataType http://www.eclipse.org/emf/2002/Ecore#//EString"/>
<eStructuralFeatures xsi:type="ecore:EReference" name="parameters" upperBound="-1"
eType="#//AdditionalProperties" containment="true"/>
<eStructuralFeatures xsi:type="ecore:EAttribute" name="supportNLS" eType="ecore:EDataType http://www.eclipse.org/emf/2002/Ecore#//EBoolean"
defaultValueLiteral="false" unsettable="true"/>
</eClassifiers>
<eClassifiers xsi:type="ecore:EClass" name="SAPConnection" eSuperTypes="#//Connection">
<eStructuralFeatures xsi:type="ecore:EAttribute" name="Host" eType="ecore:EDataType http://www.eclipse.org/emf/2002/Ecore#//EString"/>

View File

@@ -187,6 +187,7 @@
<genFeatures createChild="false" ecoreFeature="ecore:EAttribute metadata.ecore#//DatabaseConnection/SQLMode"/>
<genFeatures createChild="false" ecoreFeature="ecore:EAttribute metadata.ecore#//DatabaseConnection/UiSchema"/>
<genFeatures property="None" children="true" createChild="true" ecoreFeature="ecore:EReference metadata.ecore#//DatabaseConnection/parameters"/>
<genFeatures createChild="false" ecoreFeature="ecore:EAttribute metadata.ecore#//DatabaseConnection/supportNLS"/>
</genClasses>
<genClasses ecoreClass="metadata.ecore#//SAPConnection">
<genFeatures createChild="false" ecoreFeature="ecore:EAttribute metadata.ecore#//SAPConnection/Host"/>

View File

@@ -5238,6 +5238,15 @@ public interface ConnectionPackage extends EPackage {
*/
int DATABASE_CONNECTION__PARAMETERS = CONNECTION_FEATURE_COUNT + 25;
/**
* The feature id for the '<em><b>Support NLS</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int DATABASE_CONNECTION__SUPPORT_NLS = CONNECTION_FEATURE_COUNT + 26;
/**
* The number of structural features of the '<em>Database Connection</em>' class.
* <!-- begin-user-doc --> <!--
@@ -5245,7 +5254,7 @@ public interface ConnectionPackage extends EPackage {
* @generated
* @ordered
*/
int DATABASE_CONNECTION_FEATURE_COUNT = CONNECTION_FEATURE_COUNT + 26;
int DATABASE_CONNECTION_FEATURE_COUNT = CONNECTION_FEATURE_COUNT + 27;
/**
* The meta object id for the '{@link org.talend.core.model.metadata.builder.connection.impl.SAPConnectionImpl <em>SAP Connection</em>}' class.
@@ -21887,6 +21896,17 @@ public interface ConnectionPackage extends EPackage {
*/
EReference getDatabaseConnection_Parameters();
/**
* Returns the meta object for the attribute '{@link org.talend.core.model.metadata.builder.connection.DatabaseConnection#isSupportNLS <em>Support NLS</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Support NLS</em>'.
* @see org.talend.core.model.metadata.builder.connection.DatabaseConnection#isSupportNLS()
* @see #getDatabaseConnection()
* @generated
*/
EAttribute getDatabaseConnection_SupportNLS();
/**
* Returns the meta object for class '{@link org.talend.core.model.metadata.builder.connection.SAPConnection <em>SAP Connection</em>}'.
* <!-- begin-user-doc --> <!-- end-user-doc -->
@@ -26439,6 +26459,14 @@ public interface ConnectionPackage extends EPackage {
*/
EReference DATABASE_CONNECTION__PARAMETERS = eINSTANCE.getDatabaseConnection_Parameters();
/**
* The meta object literal for the '<em><b>Support NLS</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute DATABASE_CONNECTION__SUPPORT_NLS = eINSTANCE.getDatabaseConnection_SupportNLS();
/**
* The meta object literal for the '{@link org.talend.core.model.metadata.builder.connection.impl.SAPConnectionImpl <em>SAP Connection</em>}' class.
* <!-- begin-user-doc --> <!-- end-user-doc -->

View File

@@ -755,4 +755,54 @@ public interface DatabaseConnection extends Connection {
*/
EMap<String, String> getParameters();
/**
* Returns the value of the '<em><b>Support NLS</b></em>' attribute.
* The default value is <code>"false"</code>.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the value of the '<em>Support NLS</em>' attribute.
* @see #isSetSupportNLS()
* @see #unsetSupportNLS()
* @see #setSupportNLS(boolean)
* @see org.talend.core.model.metadata.builder.connection.ConnectionPackage#getDatabaseConnection_SupportNLS()
* @model default="false" unsettable="true"
* @generated
*/
boolean isSupportNLS();
/**
* Sets the value of the '{@link org.talend.core.model.metadata.builder.connection.DatabaseConnection#isSupportNLS <em>Support NLS</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Support NLS</em>' attribute.
* @see #isSetSupportNLS()
* @see #unsetSupportNLS()
* @see #isSupportNLS()
* @generated
*/
void setSupportNLS(boolean value);
/**
* Unsets the value of the '{@link org.talend.core.model.metadata.builder.connection.DatabaseConnection#isSupportNLS <em>Support NLS</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetSupportNLS()
* @see #isSupportNLS()
* @see #setSupportNLS(boolean)
* @generated
*/
void unsetSupportNLS();
/**
* Returns whether the value of the '{@link org.talend.core.model.metadata.builder.connection.DatabaseConnection#isSupportNLS <em>Support NLS</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Support NLS</em>' attribute is set.
* @see #unsetSupportNLS()
* @see #isSupportNLS()
* @see #setSupportNLS(boolean)
* @generated
*/
boolean isSetSupportNLS();
} // DatabaseConnection

View File

@@ -1562,6 +1562,15 @@ public class ConnectionPackageImpl extends EPackageImpl implements ConnectionPac
return (EReference) databaseConnectionEClass.getEStructuralFeatures().get(25);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getDatabaseConnection_SupportNLS() {
return (EAttribute) databaseConnectionEClass.getEStructuralFeatures().get(26);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
@@ -4516,6 +4525,7 @@ public class ConnectionPackageImpl extends EPackageImpl implements ConnectionPac
createEAttribute(databaseConnectionEClass, DATABASE_CONNECTION__SQL_MODE);
createEAttribute(databaseConnectionEClass, DATABASE_CONNECTION__UI_SCHEMA);
createEReference(databaseConnectionEClass, DATABASE_CONNECTION__PARAMETERS);
createEAttribute(databaseConnectionEClass, DATABASE_CONNECTION__SUPPORT_NLS);
sapConnectionEClass = createEClass(SAP_CONNECTION);
createEAttribute(sapConnectionEClass, SAP_CONNECTION__HOST);
@@ -5294,6 +5304,9 @@ public class ConnectionPackageImpl extends EPackageImpl implements ConnectionPac
initEReference(getDatabaseConnection_Parameters(), this.getAdditionalProperties(), null, "parameters", null, 0, -1,
DatabaseConnection.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, IS_RESOLVE_PROXIES,
!IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getDatabaseConnection_SupportNLS(), ecorePackage.getEBoolean(), "supportNLS", "false", 0, 1,
DatabaseConnection.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_UNSETTABLE, !IS_ID, IS_UNIQUE,
!IS_DERIVED, IS_ORDERED);
initEClass(sapConnectionEClass, SAPConnection.class, "SAPConnection", !IS_ABSTRACT, !IS_INTERFACE,
IS_GENERATED_INSTANCE_CLASS);

View File

@@ -51,6 +51,7 @@ import org.talend.core.model.metadata.builder.connection.DatabaseConnection;
* <li>{@link org.talend.core.model.metadata.builder.connection.impl.DatabaseConnectionImpl#isSQLMode <em>SQL Mode</em>}</li>
* <li>{@link org.talend.core.model.metadata.builder.connection.impl.DatabaseConnectionImpl#getUiSchema <em>Ui Schema</em>}</li>
* <li>{@link org.talend.core.model.metadata.builder.connection.impl.DatabaseConnectionImpl#getParameters <em>Parameters</em>}</li>
* <li>{@link org.talend.core.model.metadata.builder.connection.impl.DatabaseConnectionImpl#isSupportNLS <em>Support NLS</em>}</li>
* </ul>
*
* @generated
@@ -563,6 +564,35 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
*/
protected EMap<String, String> parameters;
/**
* The default value of the '{@link #isSupportNLS() <em>Support NLS</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSupportNLS()
* @generated
* @ordered
*/
protected static final boolean SUPPORT_NLS_EDEFAULT = false;
/**
* The cached value of the '{@link #isSupportNLS() <em>Support NLS</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSupportNLS()
* @generated
* @ordered
*/
protected boolean supportNLS = SUPPORT_NLS_EDEFAULT;
/**
* This is true if the Support NLS attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean supportNLSESet = true;
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
@@ -1173,6 +1203,54 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
return parameters;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSupportNLS() {
return supportNLS;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setSupportNLS(boolean newSupportNLS) {
boolean oldSupportNLS = supportNLS;
supportNLS = newSupportNLS;
boolean oldSupportNLSESet = supportNLSESet;
supportNLSESet = true;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS,
oldSupportNLS, supportNLS, !oldSupportNLSESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetSupportNLS() {
boolean oldSupportNLS = supportNLS;
boolean oldSupportNLSESet = supportNLSESet;
supportNLS = SUPPORT_NLS_EDEFAULT;
supportNLSESet = false;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.UNSET, ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS,
oldSupportNLS, SUPPORT_NLS_EDEFAULT, oldSupportNLSESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetSupportNLS() {
return supportNLSESet;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
@@ -1267,6 +1345,8 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
return getParameters();
else
return getParameters().map();
case ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS:
return isSupportNLS();
}
return super.eGet(featureID, resolve, coreType);
}
@@ -1335,6 +1415,8 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
return getParameters();
else
return getParameters().map();
case ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS:
return isSupportNLS();
}
return super.eGet(featureID, resolve, coreType);
}
@@ -1424,6 +1506,9 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
case ConnectionPackage.DATABASE_CONNECTION__PARAMETERS:
((EStructuralFeature.Setting) getParameters()).set(newValue);
return;
case ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS:
setSupportNLS((Boolean) newValue);
return;
}
super.eSet(featureID, newValue);
}
@@ -1513,6 +1598,9 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
case ConnectionPackage.DATABASE_CONNECTION__PARAMETERS:
getParameters().clear();
return;
case ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS:
unsetSupportNLS();
return;
}
super.eUnset(featureID);
}
@@ -1578,6 +1666,8 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
return UI_SCHEMA_EDEFAULT == null ? uiSchema != null : !UI_SCHEMA_EDEFAULT.equals(uiSchema);
case ConnectionPackage.DATABASE_CONNECTION__PARAMETERS:
return parameters != null && !parameters.isEmpty();
case ConnectionPackage.DATABASE_CONNECTION__SUPPORT_NLS:
return isSetSupportNLS();
}
return super.eIsSet(featureID);
}
@@ -1643,6 +1733,11 @@ public class DatabaseConnectionImpl extends ConnectionImpl implements DatabaseCo
result.append("<unset>");
result.append(", UiSchema: ");
result.append(uiSchema);
result.append(", supportNLS: ");
if (supportNLSESet)
result.append(supportNLS);
else
result.append("<unset>");
result.append(')');
return result.toString();
}

View File

@@ -721,6 +721,10 @@ public class ImportItemsWizardPage extends WizardPage {
ImportDependencyRelationsHelper.getInstance().checkImportRelationDependency(checkedNodeList, toSelectSet,
nodesBuilder.getAllImportItemNode());
// to make doCheckStateChanged execute from ContainerCheckedTreeViewer.setCheckedElements(Object[])
filteredCheckboxTree.getViewer().setCheckedElements(new Object[0]);
filteredCheckboxTree.getViewer().setCheckedElements(toSelectSet.toArray());
}

View File

@@ -23,6 +23,7 @@ import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
@@ -51,6 +52,8 @@ import org.eclipse.emf.ecore.impl.EObjectImpl;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.eclipse.emf.ecore.xmi.XMLResource;
import org.eclipse.emf.ecore.xmi.impl.XMLParserPoolImpl;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.exception.PersistenceException;
import org.talend.commons.runtime.model.repository.ERepositoryStatus;
@@ -1121,6 +1124,13 @@ public class ImportBasicHandler extends AbstractImportExecutableHandler {
}
stream = manager.getStream(itemPath, importItem);
Resource resource = createResource(importItem, itemPath, byteArray);
//TUP-36820:Add options for improving performance for deserialization (loading) of large XML resource
Map optionMap = new HashMap();
optionMap.put(XMLResource.OPTION_DEFER_ATTACHMENT, Boolean.TRUE);
optionMap.put(XMLResource.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
optionMap.put(XMLResource.OPTION_USE_PARSER_POOL, new XMLParserPoolImpl());
optionMap.put(XMLResource.OPTION_USE_XML_NAME_TO_FEATURE_MAP, new HashMap());
optionMap.put(XMLResource.OPTION_USE_DEPRECATED_METHODS, Boolean.FALSE);
if (byteArray) {
// TDI-24612
@@ -1133,16 +1143,16 @@ public class ImportBasicHandler extends AbstractImportExecutableHandler {
baos.write(buf, 0, i);
}
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
resource.load(bais, null);
resource.load(bais, optionMap);
} else {
resource.load(stream, null);
resource.load(stream, optionMap);
}
for (ReferenceFileItem rfItem : (List<ReferenceFileItem>) item.getReferenceResources()) {
itemPath = getReferenceItemPath(importItem.getPath(), rfItem);
stream = manager.getStream(itemPath, importItem);
Resource rfResource = createResource(importItem, itemPath, true);
rfResource.load(stream, null);
rfResource.load(stream, optionMap);
}
Iterator<EObject> itRef = item.eCrossReferences().iterator();

View File

@@ -132,11 +132,12 @@ public class ImportDependencyRelationsHelper {
id = split[1];
}
}
boolean isGlobalRoutine = RelationshipItemBuilder.ROUTINE_RELATION.equals(relation.getType());
if (RelationshipItemBuilder.LATEST_VERSION.equals(relation.getVersion())) {
relatedNode = getLatestVersionItemImportNode(id, projectLabel, allImportItemNodesList);
relatedNode = getLatestVersionItemImportNode(id, projectLabel, allImportItemNodesList, isGlobalRoutine);
} else {
relatedNode = getItemImportNodeByIdVersion(id, projectLabel, relation.getVersion(),
allImportItemNodesList);
relatedNode = getItemImportNodeByIdVersion(id, relation.getVersion(), projectLabel,
allImportItemNodesList, isGlobalRoutine);
}
if (relatedNode != null && !toSelectSet.contains(relatedNode)) {
// avoid loop
@@ -148,13 +149,14 @@ public class ImportDependencyRelationsHelper {
}
public ItemImportNode getLatestVersionItemImportNode(String id, String projectTecLabel,
List<ItemImportNode> allImportItemNodesList) {
List<ItemImportNode> allImportItemNodesList, boolean isGlobalRoutine) {
List<ItemImportNode> allItemImportNodesById = getItemImportNode(allImportItemNodesList, node -> {
Property property = node.getItemRecord().getProperty();
boolean projectFlag = true;
if (StringUtils.isNotBlank(projectTecLabel)) {
projectFlag = node.getProjectNode().getProject().getTechnicalLabel().equals(projectTecLabel);
}
return node.getItemRecord().getProperty().getId().equals(id) && projectFlag;
return (isGlobalRoutine ? property.getLabel().equals(id) : property.getId().equals(id)) && projectFlag;
});
Optional<ItemImportNode> optional = allItemImportNodesById.stream().max((node1, node2) -> VersionUtils
.compareTo(node1.getItemRecord().getProperty().getVersion(), node2.getItemRecord().getProperty().getVersion()));
@@ -162,14 +164,15 @@ public class ImportDependencyRelationsHelper {
}
public ItemImportNode getItemImportNodeByIdVersion(String id, String version, String projectTecLabel,
List<ItemImportNode> allImportItemNodesList) {
List<ItemImportNode> allImportItemNodesList, boolean isGlobalRoutine) {
List<ItemImportNode> importNodeList = getItemImportNode(allImportItemNodesList, node -> {
boolean projectFlag = true;
if (StringUtils.isNotBlank(projectTecLabel)) {
projectFlag = node.getProjectNode().getProject().getTechnicalLabel().equals(projectTecLabel);
}
Property property = node.getItemRecord().getProperty();
return property.getId().equals(id) && property.getVersion().equals(version) && projectFlag;
return (isGlobalRoutine ? property.getLabel().equals(id) : property.getId().equals(id))
&& property.getVersion().equals(version) && projectFlag;
});
return importNodeList == null || importNodeList.isEmpty() ? null : importNodeList.get(0);
}

View File

@@ -1008,6 +1008,8 @@ DatabaseForm.hc.link.repository=Repository
DatabaseForm.hc.link.title=Hadoop Cluster
DatabaseForm.helpInfo.installDriverLink.url=https://document-link.us.cloud.talend.com/ts_ig_install-external-modules?version=73&lang=en&env=prd
DatabaseForm.helpInfo.installDriverLink.label=How to install a driver
DatabaseForm.supportnls=Support NLS
DatabaseForm.supportnls.warntip=This setting takes effect only after restarting the Studio.
DatabaseTableFilterForm.allSynonyms=All synonyms
DatabaseTableFilterForm.edit=Edit...
DatabaseTableFilterForm.editFilterName=Edit Filter Name

View File

@@ -66,6 +66,7 @@ import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Link;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Text;
@@ -567,6 +568,8 @@ public class DatabaseForm extends AbstractForm {
private LabelledFileField dataprocPathToCredentialsForHiveTxt;
private Button isOracleSupportNLS;
/**
* Constructor to use by a Wizard to create a new database connection.
*
@@ -684,6 +687,8 @@ public class DatabaseForm extends AbstractForm {
if (getConnection().getDbVersionString() != null) {
dbVersionCombo.setText(getConnection().getDbVersionString());
}
isOracleSupportNLS.setSelection(getConnection().isSupportNLS());
fileField.setText(getConnection().getFileFieldName());
directoryField.setText(getConnection().getDBRootPath());
@@ -859,6 +864,7 @@ public class DatabaseForm extends AbstractForm {
dbVersionCombo.setReadOnly(isReadOnly());
datasourceText.setReadOnly(isReadOnly());
additionParamText.setReadOnly(isReadOnly());
isOracleSupportNLS.setEnabled(!isReadOnly());
fileField.setReadOnly(isReadOnly());
mappingFileText.setReadOnly(isReadOnly());
mappingSelectButton.setEnabled(isReadOnly());
@@ -1035,6 +1041,7 @@ public class DatabaseForm extends AbstractForm {
additionParamText = new LabelledText(typeDbCompositeParent, Messages.getString("DatabaseForm.AddParams"), 2); //$NON-NLS-1$
additionalJDBCSettingsText = new LabelledText(typeDbCompositeParent,
Messages.getString("DatabaseForm.hive.additionalJDBCSettings"), 2); //$NON-NLS-1$
createOracleUIForNLS(typeDbCompositeParent);
String[] extensions = { "*.*" }; //$NON-NLS-1$
fileField = new LabelledFileField(typeDbCompositeParent, Messages.getString("DatabaseForm.mdbFile"), extensions); //$NON-NLS-1$
@@ -1062,6 +1069,38 @@ public class DatabaseForm extends AbstractForm {
createHivePropertiesFields(typeDbCompositeParent);
}
private void createOracleUIForNLS(Composite parent) {
supportNLSContainer = new Composite(parent, SWT.NONE);
GridData containerLayoutData = new GridData();
containerLayoutData.exclude = true;
supportNLSContainer.setLayoutData(containerLayoutData);
supportNLSContainer.setVisible(false);
GridLayout containerLayout = new GridLayout();
containerLayout.numColumns = 2;
containerLayout.marginWidth = 0;
containerLayout.marginHeight = 0;
containerLayout.horizontalSpacing = 0;
containerLayout.makeColumnsEqualWidth = false;
supportNLSContainer.setLayout(containerLayout);
isOracleSupportNLS = new Button(supportNLSContainer, SWT.CHECK);
isOracleSupportNLS.setText(Messages.getString("DatabaseForm.supportnls"));//$NON-NLS-1$
GridData oracleSupportNLSLayoutData = new GridData();
isOracleSupportNLS.setLayoutData(oracleSupportNLSLayoutData);
isOracleSupportNLS.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
getConnection().setSupportNLS(isOracleSupportNLS.getSelection());
}
});
imageLabel = new Label(supportNLSContainer, SWT.NONE);
imageLabel.setImage(ImageProvider.getImage(EImage.WARNING_ICON));
imageLabel.setLayoutData(new GridData());
imageLabel.setToolTipText(Messages.getString("DatabaseForm.supportnls.warntip"));
}
private void createHiveDataprocField(Composite parent) {
dataprocProjectIdForHiveTxt = new LabelledText(parent, Messages.getString("DatabaseForm.dataproc.projectId"), 2);//$NON-NLS-1$
dataprocClusterIdForHiveTxt = new LabelledText(parent, Messages.getString("DatabaseForm.dataproc.clusterId"), 2); //$NON-NLS-1$
@@ -4464,6 +4503,7 @@ public class DatabaseForm extends AbstractForm {
enableDbVersion() ? versionStr : null, metadataconnection.getOtherParameters());
managerConnection.setDbRootPath(directoryField.getText());
managerConnection.setSupportNLS(isOracleSupportNLS.getSelection());
}
IPreferenceStore store = CoreUIPlugin.getDefault().getPreferenceStore();
@@ -5071,6 +5111,10 @@ public class DatabaseForm extends AbstractForm {
getConnection().setDbVersionString(version.getVersionValue());
boolean supportNLSOracleVersion = oracleVersionEnable()
&& isSupportNLSOracleVersion(dbVersionCombo.getText());
showOracleSupportNLS(supportNLSOracleVersion, supportNLSOracleVersion);
}
urlConnectionStringText.setText(getStringConnection());
checkFieldsValue();
@@ -6508,6 +6552,10 @@ public class DatabaseForm extends AbstractForm {
private static String DEFAULT_HIVE_METASTORE_PORT = "9083";
private Label imageLabel;
private Composite supportNLSContainer;
/**
* SetEditable fields.
*
@@ -6665,6 +6713,7 @@ public class DatabaseForm extends AbstractForm {
showIfHiveMetastore();
showIfSupportEncryption();
showIfAuthentication();
showOracleSupportNLS(oracleVersionEnable() && isSupportNLSOracleVersion(dbVersionCombo.getText()), visible);
hideHiveExecutionFields(!doSupportTez());
urlConnectionStringText.setEditable(!visible);
@@ -6969,6 +7018,34 @@ public class DatabaseForm extends AbstractForm {
compositeGroupDbSettings.layout();
}
private void showOracleSupportNLS(boolean show, boolean editable) {
GridData layoutData = (GridData) supportNLSContainer.getLayoutData();
layoutData.exclude = !show;
supportNLSContainer.setLayoutData(layoutData);
supportNLSContainer.setVisible(show);
imageLabel.setVisible(show);
if(!show) {
isOracleSupportNLS.setSelection(false);
getConnection().setSupportNLS(false);
} else {
isOracleSupportNLS.setEnabled(editable);
}
supportNLSContainer.getParent().layout();
}
private boolean isSupportNLSOracleVersion(String dbVersionString) {
if (!EDatabaseVersion4Drivers.ORACLE_8.getVersionDisplay().equals(dbVersionString)
&& !EDatabaseVersion4Drivers.ORACLE_9.getVersionDisplay().equals(dbVersionString)
&& !EDatabaseVersion4Drivers.ORACLE_10.getVersionDisplay().equals(dbVersionString)
&& !EDatabaseVersion4Drivers.ORACLE_11.getVersionDisplay().equals(dbVersionString)
&& !EDatabaseVersion4Drivers.ORACLE_12.getVersionDisplay().equals(dbVersionString)
) {
return true;
}
return false;
}
private void collectContextParams() {
collectHiveContextParams();
collectHBaseContextParams();
@@ -7365,6 +7442,7 @@ public class DatabaseForm extends AbstractForm {
jDBCschemaText.setEditable(!isContextMode());
isOracleSupportNLS.setEnabled(!isContextMode());
generalMappingFileText.setEditable(!isContextMode());
mappingFileText.setEditable(!isContextMode());
if (isContextMode()) {

View File

@@ -60,7 +60,7 @@
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>1.1</version>
<version>1.10.0</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>

View File

@@ -55,7 +55,7 @@
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>1.1</version>
<version>1.10.0</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>

View File

@@ -36,10 +36,6 @@ import java.util.Properties;
import java.util.Set;
import org.eclipse.emf.common.util.BasicEList;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
import org.junit.After;
import org.junit.Assert;
import org.junit.Ignore;
@@ -60,6 +56,7 @@ import org.talend.core.model.metadata.builder.database.manager.ExtractManagerFac
import org.talend.cwm.relational.RelationalFactory;
import org.talend.cwm.relational.TdColumn;
import org.talend.cwm.relational.TdTable;
import orgomg.cwm.objectmodel.core.Feature;
/**
@@ -650,7 +647,7 @@ public class AbstractTest4ExtractManager {
List list = extractMeta.getConnection(metadataConn.getDbType(), metadataConn.getUrl(), metadataConn.getUsername(),
metadataConn.getPassword(), metadataConn.getDatabase(), metadataConn.getSchema(), metadataConn.getDriverClass(),
metadataConn.getDriverJarPath(), metadataConn.getDbVersionString(), metadataConn.getAdditionalParams());
metadataConn.getDriverJarPath(), metadataConn.getDbVersionString(), metadataConn.getAdditionalParams(), metadataConn.isSupportNLS());
assertTrue(list.size() == 0);
list.add(conn);
assertTrue(list.size() != 0);

View File

@@ -16,6 +16,7 @@ import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import org.eclipse.core.resources.IProject;
@@ -41,6 +42,7 @@ import org.talend.core.model.properties.ItemRelations;
import org.talend.core.model.properties.ProcessItem;
import org.talend.core.model.properties.PropertiesFactory;
import org.talend.core.model.properties.Property;
import org.talend.core.model.properties.RoutineItem;
import org.talend.core.model.properties.User;
import org.talend.core.model.properties.impl.PropertiesFactoryImpl;
import org.talend.core.model.relationship.Relation;
@@ -93,8 +95,10 @@ public class ImportDependencyRelationsHelperTest {
allImportItemNodesList.add(importNode);
propertyList.add(property);
}
// test0 --> test1 --> test2
createRelations(propertyList);
Property property2 = propertyList.get(2);
// label test2 version 0.7
Property property3 = PropertiesFactory.eINSTANCE.createProperty();
property3.setId(property2.getId());
property3.setLabel(property2.getLabel());
@@ -108,6 +112,34 @@ public class ImportDependencyRelationsHelperTest {
projectNode.addChild(importNode);
allImportItemNodesList.add(importNode);
propertyList.add(property3);
Property routineProperty = PropertiesFactory.eINSTANCE.createProperty();
routineProperty.setId(ProxyRepositoryFactory.getInstance().getNextId());
routineProperty.setLabel("testRoutine");
routineProperty.setVersion("0.1");
RoutineItem routineItem = PropertiesFactory.eINSTANCE.createRoutineItem();
routineProperty.setItem(routineItem);
ImportItem routineItemRecord = new ImportItem(new Path(fakePath + "/" + technicalLabel + "/code/routines"
+ routineProperty.getLabel() + "_" + routineProperty.getVersion() + ".item"));
routineItemRecord.setProperty(routineProperty);
ItemImportNode routineImportNode = new ItemImportNode(routineItemRecord);
projectNode.addChild(routineImportNode);
allImportItemNodesList.add(routineImportNode);
propertyList.add(routineProperty);
Property routineProperty1 = PropertiesFactory.eINSTANCE.createProperty();
routineProperty1.setId(routineProperty.getId());
routineProperty1.setLabel("testRoutine");
routineProperty1.setVersion("0.7");
RoutineItem routineItem1 = PropertiesFactory.eINSTANCE.createRoutineItem();
routineProperty1.setItem(routineItem1);
ImportItem routineItemRecord1 = new ImportItem(new Path(fakePath + "/" + technicalLabel + "/code/routines"
+ routineProperty1.getLabel() + "_" + routineProperty1.getVersion() + ".item"));
routineItemRecord1.setProperty(routineProperty1);
ItemImportNode routineImportNode1 = new ItemImportNode(routineItemRecord1);
projectNode.addChild(routineImportNode1);
allImportItemNodesList.add(routineImportNode1);
propertyList.add(routineProperty1);
ImportCacheHelper.getInstance().getPathWithProjects().put(fakeProjectPath, project.getEmfProject());
}
@@ -131,7 +163,35 @@ public class ImportDependencyRelationsHelperTest {
helperInstance.checkImportRelationDependency(checkedNodeList, toSelectSet, allImportItemNodesList);
Assert.assertTrue(toSelectSet.size() == 3);
// to test loop dependency
Map<Relation, Set<Relation>> importItemsRelations = helperInstance.getImportItemsRelations(fakeProjectPath);
Property jobProperty = propertyList.get(3);
Relation baseRelation = new Relation();
baseRelation.setId(jobProperty.getId());
baseRelation.setType(RelationshipItemBuilder.JOB_RELATION);
baseRelation.setVersion(jobProperty.getVersion());
Relation relatedRelation = new Relation();
relatedRelation.setId(propertyList.get(4).getLabel());
relatedRelation.setType(RelationshipItemBuilder.ROUTINE_RELATION);
relatedRelation.setVersion(RelationshipItemBuilder.LATEST_VERSION);
Set<Relation> relationSet = new HashSet<Relation>();
relationSet.add(relatedRelation);
importItemsRelations.put(baseRelation, relationSet);
toSelectSet.clear();
toSelectSet.add(allImportItemNodesList.get(0));
helperInstance.checkImportRelationDependency(checkedNodeList, toSelectSet, allImportItemNodesList);
Assert.assertTrue(toSelectSet.size() == 4);
}
@Test
public void checkImportRelationWithLoopDependency() {
helperInstance.clear();
helperInstance.loadRelations(fakeProjectPath, project.getEmfProject().getItemsRelations());
Set<ItemImportNode> toSelectSet = new HashSet<ItemImportNode>();
List<ItemImportNode> checkedNodeList = new ArrayList<ItemImportNode>();
checkedNodeList.add(allImportItemNodesList.get(0));
toSelectSet.add(allImportItemNodesList.get(0));
// to test loop dependency test0 --> test1 --> test2 --> test0
Map<Relation, Set<Relation>> importItemsRelations = helperInstance.getImportItemsRelations(fakeProjectPath);
Property property3 = propertyList.get(3);
Relation baseRelation = new Relation();
@@ -145,20 +205,87 @@ public class ImportDependencyRelationsHelperTest {
Set<Relation> relationSet = new HashSet<Relation>();
relationSet.add(relatedRelation);
importItemsRelations.put(baseRelation, relationSet);
toSelectSet.clear();
toSelectSet.add(allImportItemNodesList.get(0));
helperInstance.checkImportRelationDependency(checkedNodeList, toSelectSet, allImportItemNodesList);
Assert.assertTrue(toSelectSet.size() == 3);
}
@Test
public void checkImportRelationDependencyWithMultiVersion() {
helperInstance.clear();
helperInstance.loadRelations(fakeProjectPath, project.getEmfProject().getItemsRelations());
Set<ItemImportNode> toSelectSet = new HashSet<ItemImportNode>();
List<ItemImportNode> checkedNodeList = new ArrayList<ItemImportNode>();
// test1 --> test2 latest (0.7)
// test1 --> test2 0.1
Relation relatedRelation = new Relation();
relatedRelation.setId(propertyList.get(3).getId());
relatedRelation.setType(RelationshipItemBuilder.JOB_RELATION);
relatedRelation.setVersion("0.1");
Map<Relation, Set<Relation>> importItemsRelations = helperInstance.getImportItemsRelations(fakeProjectPath);
String test1_id = propertyList.get(1).getId();
Optional<Relation> optional = importItemsRelations.keySet().stream().filter(relation -> relation.getId().equals(test1_id))
.findFirst();
Assert.assertTrue(optional.isPresent());
importItemsRelations.get(optional.get()).add(relatedRelation);
checkedNodeList.add(allImportItemNodesList.get(1));
toSelectSet.add(allImportItemNodesList.get(1));
helperInstance.checkImportRelationDependency(checkedNodeList, toSelectSet, allImportItemNodesList);
Assert.assertTrue(toSelectSet.size() == 3);
toSelectSet.clear();
checkedNodeList.add(allImportItemNodesList.get(0));
toSelectSet.add(allImportItemNodesList.get(0));
helperInstance.checkImportRelationDependency(checkedNodeList, toSelectSet, allImportItemNodesList);
Assert.assertTrue(toSelectSet.size() == 4);
}
@Test
public void testGetItemImportNodeByIdVersion() {
ItemImportNode theVersionNode = helperInstance.getItemImportNodeByIdVersion(propertyList.get(2).getId(), "0.1", null,
allImportItemNodesList, false);
Property imporRecordProperty = theVersionNode.getItemRecord().getProperty();
Property property3 = propertyList.get(3);
Assert.assertEquals(property3.getId(), imporRecordProperty.getId());
Assert.assertEquals("0.1", imporRecordProperty.getVersion());
theVersionNode = helperInstance.getItemImportNodeByIdVersion(propertyList.get(2).getId(), "0.7", null,
allImportItemNodesList, false);
imporRecordProperty = theVersionNode.getItemRecord().getProperty();
Assert.assertEquals(property3.getId(), imporRecordProperty.getId());
Assert.assertEquals("0.7", imporRecordProperty.getVersion());
Property routineProperty = propertyList.get(4);
ItemImportNode routineImportNode = helperInstance.getItemImportNodeByIdVersion(routineProperty.getLabel(), "0.1", null,
allImportItemNodesList, true);
Property routineImportProperty = routineImportNode.getItemRecord().getProperty();
Assert.assertEquals(routineProperty.getId(), routineImportProperty.getId());
Assert.assertEquals(routineProperty.getLabel(), routineImportProperty.getLabel());
Assert.assertEquals("0.1", routineImportProperty.getVersion());
routineImportNode = helperInstance.getItemImportNodeByIdVersion(routineProperty.getLabel(), "0.7", null,
allImportItemNodesList, true);
routineImportProperty = routineImportNode.getItemRecord().getProperty();
Assert.assertEquals(routineProperty.getId(), routineImportProperty.getId());
Assert.assertEquals(routineProperty.getLabel(), routineImportProperty.getLabel());
Assert.assertEquals("0.7", routineImportProperty.getVersion());
}
@Test
public void testGetLatestVersionItemImportNode() {
ItemImportNode latestVersionNode = helperInstance.getLatestVersionItemImportNode(propertyList.get(2).getId(),
null, allImportItemNodesList);
null, allImportItemNodesList, false);
Property latestVersionProperty = latestVersionNode.getItemRecord().getProperty();
Property property3 = propertyList.get(3);
Assert.assertEquals(latestVersionProperty.getId(), property3.getId());
Assert.assertEquals(latestVersionProperty.getVersion(), property3.getVersion());
ItemImportNode latestRoutineNode = helperInstance.getLatestVersionItemImportNode(propertyList.get(4).getLabel(), null,
allImportItemNodesList, true);
Property latestRoutineProperty = latestRoutineNode.getItemRecord().getProperty();
Property property5 = propertyList.get(5);
Assert.assertEquals(latestRoutineProperty.getId(), property5.getId());
Assert.assertEquals(latestRoutineProperty.getLabel(), property5.getLabel());
Assert.assertEquals(latestRoutineProperty.getVersion(), property5.getVersion());
}
private void createRelations(List<Property> propertyList) {

View File

@@ -5,3 +5,4 @@ Bundle-SymbolicName: org.talend.repository.metadata.test
Bundle-Version: 7.3.1.qualifier
Fragment-Host: org.talend.repository.metadata
Require-Bundle: org.talend.testutils
Import-Package: org.apache.commons.io

View File

@@ -21,8 +21,10 @@ import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.Set;
import org.apache.commons.io.FileUtils;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.Path;
import org.eclipse.emf.common.util.URI;
@@ -42,6 +44,7 @@ import org.talend.cwm.helper.ConnectionHelper;
import org.talend.cwm.helper.PackageHelper;
import org.talend.model.emf.CwmResource;
import org.talend.utils.io.FilesUtils;
import orgomg.cwm.objectmodel.core.Package;
/**
@@ -170,7 +173,7 @@ public class ConnectionUUIDHelperTest {
File testDataFile = getTestDataFile(TEST_DB_MYSQL_ITEM);
assertNotNull(testDataFile);
assertEquals(FilesUtils.getChecksumAlder32(testDataFile), FilesUtils.getChecksumAlder32(copiedFile));
FileUtils.contentEqualsIgnoreEOL(testDataFile, copiedFile, StandardCharsets.UTF_8.toString());
}
@Test
@@ -196,7 +199,7 @@ public class ConnectionUUIDHelperTest {
File testRemovedTableFile = getTestDataFile(TEST_DB_MYSQL_REMOVE_TABLE_ITEM);
assertNotNull(testRemovedTableFile);
assertEquals(FilesUtils.getChecksumAlder32(testRemovedTableFile), FilesUtils.getChecksumAlder32(removedFile));
FileUtils.contentEqualsIgnoreEOL(testRemovedTableFile, removedFile, StandardCharsets.UTF_8.toString());
}
@Test
@@ -219,7 +222,7 @@ public class ConnectionUUIDHelperTest {
originalItemResourse.save(changedFos, null);
File testDataFile = getTestDataFile(TEST_DB_MYSQL_DIFF_LABEL_ITEM);
assertEquals(FilesUtils.getChecksumAlder32(testDataFile), FilesUtils.getChecksumAlder32(changedFile));
FileUtils.contentEqualsIgnoreEOL(testDataFile, changedFile, StandardCharsets.UTF_8.toString());
}
}