Compare commits

...

17 Commits

Author SHA1 Message Date
jzhao
ab29cb303f fix(TDI-41506):High CVE Vulnerability found in Talend Components (#3119)
* upgrade commons-fileupload from 1.0 to 1.3.1
* upgrade xalan/serializer from 2.7.1 to 2.7.2
* remove serializer-2.7.1
2019-03-04 15:40:45 +08:00
jzhao
2c5e280c3d fix(TDI-41320): Fix oracle 11 migration task use existing connection (#2902) (#2933) (#3244) 2019-02-15 17:18:00 +08:00
hwang-talend
5cae20af15 bugfix(TUP-21895):An error has occurred. UNEXPECTED_EXCEPTION when (#3217)
opening job migrated 6.4.1 to 7.1.1 (job using tJDBCInput)
2019-02-15 17:02:37 +08:00
hcyi
7bbc06f153 fix(TUP-21888):NPE when importing job. (#3236) 2019-02-15 16:46:30 +08:00
Laurent BOURGEOIS
7a709f3056 TUP-21356 : "talend-bigdata-launcher-1.2.0-20170410.jar" not found when spark job launched from standard job by jobserver (#3218) 2019-02-04 00:13:36 +01:00
BOURGEOIS Laurent
55cd1934c9 Revert "fix(TUP-21356):talend-bigdata-launcher-1.2.0-20170410.jar not found (#3077) (#3209)"
This reverts commit b05d01e4fe.
2019-02-01 00:17:40 +01:00
Laurent BOURGEOIS
b05d01e4fe fix(TUP-21356):talend-bigdata-launcher-1.2.0-20170410.jar not found (#3077) (#3209) 2019-01-31 21:56:03 +01:00
Stéphane Bouchet
cb6334969d Merging missing fix from old patch branch (#3195)
* TESB-24093 : context values can be overriten by MS.

* fix(TESB-22833): Compile route before generate manifest (#2951)

* fix(TESB-22833): Compile route before generate manifest

* fix(TESB-22833): replace maven by jdt compiler for import packages

* fix(TESB-22833): Fixed packages not allowed: java.util" problem (#2960)
2019-01-25 10:32:36 +01:00
Zhiwei Xue
8c78159f7c fix(TUP-21532):re-fix child assembly problem. (#3181) 2019-01-22 14:19:36 +08:00
Zhiwei Xue
edaebf6bd7 fix(TUP-21532):fix StandardJobStandaloneBuildProviderTest (#3175) 2019-01-22 14:16:59 +08:00
Zhiwei Xue
931bc1d107 fix(TUP-21532): fix run job and assembly problem. (#3168) 2019-01-22 14:16:20 +08:00
Zhiwei Xue
f19a2f73ea fix(TUP-21532):fix junit. (#3141) 2019-01-22 14:07:42 +08:00
Zhiwei Xue
fc68a47bbb fix(TUP-21532):Standard DI job which refers to big data batch job having (#3120) (#3130)
* fix(TUP-21532):Standard DI job which refers to big data batch job having
S3 component and standard DI job having S3 connection component fails
with an error.

* fix(TUP-21532):add JUnit
2019-01-22 14:06:46 +08:00
jiezhang-tlnd
947666d009 fix(TUP-21005)In studio Java Debugging of Routes fails (#2871) 2019-01-07 14:51:39 +08:00
hcyi
e5c864c8ba fix(TUP-21160):refix JDBC connection will not commit even if autocommit (#3033) (#3041)
is set to True.
2018-12-10 10:24:30 +08:00
kjwang-talend
962c04b466 fix(TUP-21160):JDBC connection will not commit even if autocommit is set (#2976) (#2981)
to True.
2018-12-10 10:24:03 +08:00
Stéphane Bouchet
083f8b3ec7 TESB-24093 : context values can be overriten by MS. (#2947) (#2971) 2018-11-20 17:00:24 +01:00
23 changed files with 239 additions and 113 deletions

View File

@@ -506,7 +506,7 @@
if (inContext == null) {
inContext = <%=className%>.class.getClassLoader().getResourceAsStream("config/contexts/" + contextStr + ".properties");
}
if (inContext != null) {
if (inContext != null && context != null && context.isEmpty()) {
//defaultProps is in order to keep the original context value
defaultProps.load(inContext);
inContext.close();

View File

@@ -312,7 +312,7 @@ public <%=JavaTypesManager.getTypeToGenerate(ctxParam.getType(),true)%> get<%=Ch
}
%>
}
private ContextProperties context = new ContextProperties();
protected ContextProperties context = new ContextProperties(); // will be instanciated by MS.
public ContextProperties getContext() {
return this.context;
}

View File

@@ -144,8 +144,7 @@
REQUIRED_IF="GENERATION_MODE == 'DOM4J'" />
<IMPORT NAME="Java_xercesImpl" MODULE="xercesImpl.jar" MVN="mvn:org.talend.libraries/xercesImpl/6.0.0" UrlPath="platform:/plugin/org.talend.libraries.apache.xml/lib/xercesImpl.jar"
REQUIRED_IF="(GENERATION_MODE == 'XERCES') OR (GENERATION_MODE == 'SAX')" BundleID="" />
<IMPORT NAME="Java_xalan" MODULE="xalan.jar" MVN="mvn:org.talend.libraries/xalan/6.0.0" UrlPath="platform:/base/plugins/org.apache.xalan_2.7.1.v201005080400.jar"
REQUIRED_IF="GENERATION_MODE == 'XERCES'" BundleID="" />
<IMPORT NAME="Java_xalan" MODULE="xalan-2.7.2.jar" MVN="mvn:xalan/xalan/2.7.2" REQUIRED_IF="GENERATION_MODE == 'XERCES'" BundleID="" />
<IMPORT NAME="Java_talendSAX" MODULE="TalendSAX.jar" MVN="mvn:org.talend.libraries/TalendSAX/6.0.0" UrlPath="platform:/plugin/org.talend.libraries.xml/lib/TalendSAX.jar"
REQUIRED_IF="GENERATION_MODE == 'SAX'" />
<IMPORT NAME="xpathutil" MODULE="xpathutil-1.0.0.jar" MVN="mvn:org.talend.libraries/xpathutil-1.0.0/6.0.0" UrlPath="platform:/plugin/org.talend.libraries.xml/lib/xpathutil-1.0.0.jar"

View File

@@ -326,7 +326,7 @@
<IMPORT NAME="commons-log" MODULE="commons-logging-1.1.jar" MVN="mvn:org.talend.libraries/commons-logging-1.1/6.0.0" UrlPath="platform:/plugin/org.talend.libraries.apache.common/lib/commons-logging-1.1.jar" REQUIRED="true" />
<IMPORT NAME="commons-beanutils" MODULE="commons-beanutils-1.7.jar" MVN="mvn:org.talend.libraries/commons-beanutils-1.7/6.0.0" UrlPath="platform:/base/plugins/org.apache.commons.beanutils_1.7.0.v200902170505.jar" REQUIRED="true" />
<IMPORT NAME="commons-colections" MODULE="commons-collections-3.2.2.jar" MVN="mvn:commons-collections/commons-collections/3.2.2" UrlPath="platform:/plugin/org.talend.libraries.apache.common/lib/commons-collections-3.2.2.jar" REQUIRED="true" BundleID="" />
<IMPORT NAME="commons-fileupload" MODULE="commons-fileupload-1.0.jar" MVN="mvn:org.talend.libraries/commons-fileupload-1.0/6.0.0" UrlPath="platform:/plugin/org.talend.libraries.apache.common/lib/commons-fileupload-1.0.jar" REQUIRED="true" />
<IMPORT NAME="commons-fileupload" MODULE="commons-fileupload-1.3.1.jar" MVN="mvn:org.talend.libraries/commons-fileupload-1.3.1/6.0.0" REQUIRED="true" />
<IMPORT NAME="commons-pool" MODULE="commons-pool-1.2.jar" MVN="mvn:org.talend.libraries/commons-pool-1.2/6.0.0" UrlPath="platform:/plugin/org.talend.libraries.apache.common/lib/commons-pool-1.2.jar" REQUIRED="true" />
<IMPORT NAME="commons-digester" MODULE="commons-digester-1.7.jar" MVN="mvn:org.talend.libraries/commons-digester-1.7/6.0.0" UrlPath="platform:/plugin/org.talend.libraries.apache.common/lib/commons-digester-1.7.jar" REQUIRED="true" />
<IMPORT NAME="commons-httpclient" MODULE="commons-httpclient-3.0.1.jar" MVN="mvn:commons-httpclient/commons-httpclient/3.0.1" UrlPath="platform:/plugin/org.talend.libraries.apache.http/lib/commons-httpclient-3.0.1.jar" REQUIRED="true" />

View File

@@ -347,15 +347,14 @@
<IMPORT
NAME="xalan"
MODULE="xalan-2.7.1.jar"
MVN="mvn:org.talend.libraries/xalan-2.7.1/6.0.0"
UrlPath="platform:/base/plugins/org.apache.xalan_2.7.1.v201005080400.jar"
MODULE="xalan-2.7.2.jar"
MVN="mvn:xalan/xalan/2.7.2"
REQUIRED="true"
/>
<IMPORT
NAME="serializer"
MODULE="serializer-2.7.1.jar"
MVN="mvn:xalan/serializer/2.7.1"
MODULE="serializer-2.7.2.jar"
MVN="mvn:xalan/serializer/2.7.2"
REQUIRED="true"
/>

View File

@@ -61,6 +61,9 @@ public class GenericTableUtils {
List<String> valueList = new ArrayList<>();
if(dbService != null){
for(String v:values){
if(v == null || v.length() <= 0){
continue;
}
if(param.getName().equals(EConnectionParameterName.GENERIC_DRIVER_JAR.getDisplayName())){
v = dbService.getMVNPath(v);
}

View File

@@ -737,6 +737,12 @@ public class StatsAndLogsManager {
if (isGeneric) {// reset the show if
resetShowIf(connectionNode);
if (checkUrlContainsAutoCommit(connectionNode)) {
IElementParameter autoCommitParam = connectionNode.getElementParameter("autocommit");//$NON-NLS-1$
if (autoCommitParam != null) {
autoCommitParam.setValue(Boolean.TRUE);
}
}
}
if (connectionComponentName.contains("Oracle")) {//$NON-NLS-1$
@@ -764,13 +770,44 @@ public class StatsAndLogsManager {
}
}
}
DataConnection dataConnec = createDataConnectionForSubJobOK(dataNode, commitNode);
((List<IConnection>) dataNode.getOutgoingConnections()).add(dataConnec);
((List<IConnection>) commitNode.getIncomingConnections()).add(dataConnec);
boolean noCommitNode = false;
if (checkUrlContainsAutoCommit(connectionNode)) {
IElementParameter autoCommitParam = connectionNode.getElementParameter("autocommit");//$NON-NLS-1$
if (autoCommitParam != null && autoCommitParam.getValue() != null) {
noCommitNode = Boolean.parseBoolean(autoCommitParam.getValue().toString());
if (noCommitNode && nodeList.contains(commitNode)) {
nodeList.remove(commitNode);
}
}
}
if (!noCommitNode) {
DataConnection dataConnec = createDataConnectionForSubJobOK(dataNode, commitNode);
((List<IConnection>) dataNode.getOutgoingConnections()).add(dataConnec);
((List<IConnection>) commitNode.getIncomingConnections()).add(dataConnec);
}
return connectionNode;
}
private static boolean checkUrlContainsAutoCommit(DataNode connectionNode) {
if (connectionNode != null) {
boolean isGeneric = connectionNode.getComponent().getComponentType() == EComponentType.GENERIC;
if (isGeneric) {
IElementParameter urlParam = connectionNode
.getElementParameter(EConnectionParameterName.GENERIC_URL.getDisplayName());
if (urlParam != null) {
Object obj = urlParam.getValue();
if (obj != null && obj instanceof String) {
String url = (String) obj;
if (url != null && url.toLowerCase().contains("autocommit=true")) {//$NON-NLS-1$
return true;
}
}
}
}
}
return false;
}
private static void resetShowIf(DataNode connectionNode) {
ComponentProperties tcomp_properties = connectionNode.getComponentProperties();
if (tcomp_properties != null) {

View File

@@ -10,27 +10,29 @@
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.designer.runprocess.bigdata;
package org.talend.designer.core.utils;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Platform;
import org.eclipse.emf.common.util.EList;
import org.osgi.framework.Bundle;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.core.hadoop.HadoopConstants;
import org.talend.core.hadoop.version.EHadoopDistributions;
import org.talend.core.model.general.ModuleNeeded;
import org.talend.core.model.properties.ProcessItem;
import org.talend.core.model.properties.Property;
import org.talend.core.model.process.IElementParameter;
import org.talend.core.model.process.IProcess;
import org.talend.core.model.process.IProcess2;
import org.talend.core.model.repository.ERepositoryObjectType;
import org.talend.designer.core.model.utils.emf.talendfile.ElementParameterType;
/**
* created by nrousseau on Mar 24, 2018 Detailled comment
@@ -38,10 +40,10 @@ import org.talend.designer.core.model.utils.emf.talendfile.ElementParameterType;
*/
public class BigDataJobUtil {
private ProcessItem processItem;
private IProcess process;
public BigDataJobUtil(ProcessItem processItem) {
this.processItem = processItem;
public BigDataJobUtil(IProcess process) {
this.process = process;
}
/**
@@ -68,11 +70,10 @@ public class BigDataJobUtil {
public boolean isSparkWithHDInsight() {
boolean isSparkWithHDInsight = false;
if (isBDJobWithFramework(ERepositoryObjectType.PROCESS_MR, HadoopConstants.FRAMEWORK_SPARK)
|| isBDJobWithFramework(ERepositoryObjectType.PROCESS_STORM,
HadoopConstants.FRAMEWORK_SPARKSTREAMING)) {
EList<ElementParameterType> parameters = processItem.getProcess().getParameters().getElementParameter();
|| isBDJobWithFramework(ERepositoryObjectType.PROCESS_STORM, HadoopConstants.FRAMEWORK_SPARKSTREAMING)) {
List<? extends IElementParameter> parameters = process.getElementParametersWithChildrens();
boolean modeParameterVisited = false;
for (ElementParameterType pt : parameters) {
for (IElementParameter pt : parameters) {
if (pt.getName().equals("SPARK_LOCAL_MODE")) { //$NON-NLS-1$
modeParameterVisited = true;
if ("true".equals(pt.getValue())) { //$NON-NLS-1$
@@ -96,11 +97,11 @@ public class BigDataJobUtil {
public boolean isMRWithHDInsight() {
Boolean isMRWithHDInsight = false;
if (processItem != null) {
if (process != null) {
isMRWithHDInsight = false;
if (isBDJobWithFramework(ERepositoryObjectType.PROCESS_MR, HadoopConstants.FRAMEWORK_MAPREDUCE)) {
EList<ElementParameterType> parameters = processItem.getProcess().getParameters().getElementParameter();
for (ElementParameterType pt : parameters) {
List<? extends IElementParameter> parameters = process.getElementParametersWithChildrens();
for (IElementParameter pt : parameters) {
if (pt.getName().equals("DISTRIBUTION") //$NON-NLS-1$
&& EHadoopDistributions.MICROSOFT_HD_INSIGHT.getName().equals(pt.getValue())) {
isMRWithHDInsight = true;
@@ -119,11 +120,10 @@ public class BigDataJobUtil {
Boolean isSparkInYarnClusterMode = false;
// Test if we are in Spark or Spark streaming
if (isBDJobWithFramework(ERepositoryObjectType.PROCESS_MR, HadoopConstants.FRAMEWORK_SPARK)
|| isBDJobWithFramework(ERepositoryObjectType.PROCESS_STORM,
HadoopConstants.FRAMEWORK_SPARKSTREAMING)) {
|| isBDJobWithFramework(ERepositoryObjectType.PROCESS_STORM, HadoopConstants.FRAMEWORK_SPARKSTREAMING)) {
EList<ElementParameterType> parameters = processItem.getProcess().getParameters().getElementParameter();
for (ElementParameterType pt : parameters) {
List<? extends IElementParameter> parameters = process.getElementParametersWithChildrens();
for (IElementParameter pt : parameters) {
if (HadoopConstants.SPARK_MODE.equals(pt.getName())
&& HadoopConstants.SPARK_MODE_YARN_CLUSTER.equals(pt.getValue())) {
isSparkInYarnClusterMode = true;
@@ -135,31 +135,23 @@ public class BigDataJobUtil {
}
private boolean isBDJobWithFramework(ERepositoryObjectType objectType, String frameworkName) {
if (processItem != null) {
// Storm/SparkStreaming(PROCESS_STORM), MR/Spark(PROCESS_MR)
if (ERepositoryObjectType.getItemType(processItem).equals(objectType)) { // have same type
Property property = processItem.getProperty();
if (property != null && property.getAdditionalProperties() != null
&& frameworkName.equals(property.getAdditionalProperties().get(HadoopConstants.FRAMEWORK))) {
return true;
}
}
// Storm/SparkStreaming(PROCESS_STORM), MR/Spark(PROCESS_MR)
if (process != null && process instanceof IProcess2 && ((IProcess2) process).getAdditionalProperties() != null
&& frameworkName.equals(((IProcess2) process).getAdditionalProperties().get(HadoopConstants.FRAMEWORK))) {
return true;
}
return false;
}
/** Find the distribution where the generated jar rquired to have the context files inside **/
public boolean needsToHaveContextInsideJar() {
if (processItem.getProcess() != null && processItem.getProcess().getParameters() != null) {
EList<ElementParameterType> parameters = processItem.getProcess().getParameters().getElementParameter();
for (ElementParameterType pt : parameters) {
List<? extends IElementParameter> parameters = process.getElementParametersWithChildrens();
if (process != null && parameters != null) {
for (IElementParameter pt : parameters) {
if (pt.getName().equals("DISTRIBUTION")) { //$NON-NLS-1$
String value = pt.getValue();
if ("MICROSOFT_HD_INSIGHT".equals(value) //$NON-NLS-1$
|| "GOOGLE_CLOUD_DATAPROC".equals(value) //$NON-NLS-1$
|| "CLOUDERA_ALTUS".equals(value)) { //$NON-NLS-1$
return true;
}
return true;
}
}
if (isSparkWithYarnClusterMode()) {
@@ -169,6 +161,27 @@ public class BigDataJobUtil {
return false;
}
public void setExcludedModules(Collection<ModuleNeeded> modulesNeeded) {
if (isMRWithHDInsight() || isSparkWithHDInsight()) {
// we need to exclude every non-MR Required jars.
for (ModuleNeeded currentModule : modulesNeeded) {
if (currentModule.isMrRequired()) {
currentModule.setExcluded(true);
}
}
}
}
public void removeExcludedModules(Collection<ModuleNeeded> modulesNeeded) {
Iterator<ModuleNeeded> itModules = modulesNeeded.iterator();
while (itModules.hasNext()) {
ModuleNeeded module = itModules.next();
if (module.isExcluded()) {
itModules.remove();
}
}
}
/**
* DOC nrousseau Comment method "getShadedModulesExclude".
*
@@ -182,6 +195,8 @@ public class BigDataJobUtil {
for (ModuleNeeded currentModule : modulesNeeded) {
if (!currentModule.isMrRequired()) {
excludedModules.add(currentModule);
} else {
currentModule.setExcluded(true);
}
}
}

View File

@@ -27,12 +27,9 @@ import org.apache.commons.lang.StringUtils;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.Path;
import org.talend.core.CorePlugin;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.ILibraryManagerService;
import org.talend.core.hadoop.IHadoopClusterService;
import org.talend.core.hadoop.repository.HadoopRepositoryUtil;
import org.talend.core.model.general.ModuleNeeded;
import org.talend.core.model.general.ModuleNeeded.ELibraryInstallStatus;
import org.talend.core.model.process.EParameterFieldType;
import org.talend.core.model.process.IContext;
import org.talend.core.model.process.IContextParameter;
@@ -106,6 +103,10 @@ public class JavaProcessUtil {
}
}
if (BitwiseOptionUtils.containOption(options, TalendProcessOptionConstants.MODULES_EXCLUDE_SHADED)) {
new BigDataJobUtil(process).removeExcludedModules(modulesNeeded);
}
return new HashSet<ModuleNeeded>(modulesNeeded);
}
@@ -208,6 +209,7 @@ public class JavaProcessUtil {
if (hadoopItemId != null) {
useCustomConfsJarIfNeeded(modulesNeeded, hadoopItemId);
}
new BigDataJobUtil(process).setExcludedModules(modulesNeeded);
}
public static String getHadoopClusterItemId(INode node) {

View File

@@ -55,15 +55,5 @@
</includes>
</fileSet>
</fileSets>
<dependencySets>
<!-- for all jobs -->
<dependencySet>
<includes>
<!--@JobIncludes@-->
</includes>
<outputFileNameMapping>${artifact.build.finalName}.${artifact.extension}</outputFileNameMapping>
<useProjectArtifact>true</useProjectArtifact>
<unpack>true</unpack>
</dependencySet>
</dependencySets>
<dependencySets></dependencySets>
</assembly>

View File

@@ -209,32 +209,5 @@
</includes>
</fileSet>
</fileSets>
<dependencySets>
<dependencySet>
<outputDirectory>lib</outputDirectory>
<includes>
<!--@TalendLibIncludes@-->
</includes>
<!-- without version for talend libraries -->
<outputFileNameMapping>${artifact.artifactId}.${artifact.extension}
</outputFileNameMapping>
<useProjectArtifact>false</useProjectArtifact>
</dependencySet>
<dependencySet>
<outputDirectory>lib</outputDirectory>
<includes>
<!--@3rdPartyLibIncludes@-->
</includes>
<useProjectArtifact>false</useProjectArtifact>
</dependencySet>
<!-- for all jobs -->
<dependencySet>
<outputDirectory>${talend.job.name}</outputDirectory>
<includes>
<!--@JobIncludes@-->
</includes>
<outputFileNameMapping>${artifact.build.finalName}.${artifact.extension}</outputFileNameMapping>
<useProjectArtifact>true</useProjectArtifact>
</dependencySet>
</dependencySets>
<dependencySets></dependencySets>
</assembly>

View File

@@ -742,6 +742,10 @@ public class DefaultRunProcessService implements IRunProcessService {
return ProcessorUtilities.isExportConfig();
}
public boolean isdebug() {
return ProcessorUtilities.isdebug();
}
@Override
public void buildCodesJavaProject(IProgressMonitor monitor) {
try {

View File

@@ -478,4 +478,10 @@ public class RunProcessService implements IRunProcessService {
delegateService.handleJobDependencyLoop(mainJobInfo, listJobs, progressMonitor);
}
@Override
public boolean isdebug() {
return delegateService.isdebug();
}
}

View File

@@ -41,6 +41,7 @@ import org.talend.core.runtime.process.ITalendProcessJavaProject;
import org.talend.core.runtime.process.LastGenerationInfo;
import org.talend.core.runtime.process.TalendProcessOptionConstants;
import org.talend.core.runtime.repository.build.IMavenPomCreator;
import org.talend.designer.core.utils.BigDataJobUtil;
import org.talend.designer.maven.tools.creator.CreateMavenJobPom;
import org.talend.designer.maven.utils.PomUtil;
import org.talend.designer.runprocess.IBigDataProcessor;
@@ -224,7 +225,7 @@ public abstract class BigDataJavaProcessor extends MavenJavaProcessor implements
// StringBuffer.
boolean needAllLibJars = true;
if (needsShade()) {
BigDataJobUtil bdUtil = new BigDataJobUtil((ProcessItem) this.getProperty().getItem());
BigDataJobUtil bdUtil = new BigDataJobUtil(process);
if (bdUtil.isMRWithHDInsight()) {
needAllLibJars = false;
}
@@ -410,7 +411,7 @@ public abstract class BigDataJavaProcessor extends MavenJavaProcessor implements
@Override
public boolean needsShade() {
if (needsShade == null && property != null) {
needsShade = new BigDataJobUtil((ProcessItem) property.getItem()).needsShade();
needsShade = new BigDataJobUtil(process).needsShade();
}
return needsShade;
}
@@ -430,7 +431,7 @@ public abstract class BigDataJavaProcessor extends MavenJavaProcessor implements
modulesNeeded);
}
return new BigDataJobUtil((ProcessItem) property.getItem()).getShadedModulesExclude(modulesNeeded);
return new BigDataJobUtil(getProcess()).getShadedModulesExclude(modulesNeeded);
}
}

View File

@@ -136,6 +136,7 @@ import org.talend.designer.core.model.utils.emf.talendfile.ProcessType;
import org.talend.designer.core.ui.editor.CodeEditorFactory;
import org.talend.designer.core.ui.editor.nodes.Node;
import org.talend.designer.core.ui.editor.process.Process;
import org.talend.designer.core.utils.BigDataJobUtil;
import org.talend.designer.maven.utils.ClasspathsJarGenerator;
import org.talend.designer.maven.utils.MavenVersionHelper;
import org.talend.designer.maven.utils.PomUtil;
@@ -145,7 +146,6 @@ import org.talend.designer.runprocess.ProcessorException;
import org.talend.designer.runprocess.ProcessorUtilities;
import org.talend.designer.runprocess.RunProcessContext;
import org.talend.designer.runprocess.RunProcessPlugin;
import org.talend.designer.runprocess.bigdata.BigDataJobUtil;
import org.talend.designer.runprocess.i18n.Messages;
import org.talend.designer.runprocess.prefs.RunProcessPrefsConstants;
import org.talend.designer.runprocess.utils.JobVMArgumentsUtil;
@@ -356,7 +356,7 @@ public class JavaProcessor extends AbstractJavaProcessor implements IJavaBreakpo
boolean needsToHaveContextInsideJar = true;
if (property != null && property.getItem() instanceof ProcessItem) {
needsToHaveContextInsideJar = !new BigDataJobUtil((ProcessItem) property.getItem()).needsToHaveContextInsideJar();
needsToHaveContextInsideJar = !new BigDataJobUtil(process).needsToHaveContextInsideJar();
}
if (ProcessorUtilities.isExportConfig() && property != null && needsToHaveContextInsideJar) {
@@ -1229,7 +1229,7 @@ public class JavaProcessor extends AbstractJavaProcessor implements IJavaBreakpo
libsStr = StringUtils.replace(libsStr, " ", "%20"); //$NON-NLS-1$ //$NON-NLS-2$
// create classpath.jar
if (!isExportConfig() && !isSkipClasspathJar()) {
if (!isExportConfig() && !isSkipClasspathJar() && isCorrespondingOS()) {
try {
libsStr = ClasspathsJarGenerator.createJar(getProperty(), libsStr, classPathSeparator);
} catch (Exception e) {
@@ -1240,6 +1240,16 @@ public class JavaProcessor extends AbstractJavaProcessor implements IJavaBreakpo
return libsStr;
}
private boolean isCorrespondingOS() {
if (Platform.getOS().equals(Platform.OS_WIN32) && isWinTargetPlatform()) {
return true;
}
if (!Platform.getOS().equals(Platform.OS_WIN32) && !isWinTargetPlatform()) {
return true;
}
return false;
}
protected String getBasePathClasspath() throws ProcessorException {
final String classPathSeparator = extractClassPathSeparator();
final String rootWorkingDir = getRootWorkingDir(false);
@@ -1356,7 +1366,12 @@ public class JavaProcessor extends AbstractJavaProcessor implements IJavaBreakpo
final String classPathSeparator = extractClassPathSeparator();
final String libPrefixPath = getRootWorkingDir(true);
Set<ModuleNeeded> neededModules = getNeededModules(TalendProcessOptionConstants.MODULES_WITH_CHILDREN);
int option = TalendProcessOptionConstants.MODULES_WITH_CHILDREN;
if (isExportConfig() || isSkipClasspathJar()) {
option = option | TalendProcessOptionConstants.MODULES_EXCLUDE_SHADED;
}
Set<ModuleNeeded> neededModules = getNeededModules(option);
JavaProcessorUtilities.checkJavaProjectLib(neededModules);
// Ignore hadoop confs jars in lib path.

View File

@@ -72,6 +72,7 @@ import org.talend.designer.core.model.utils.emf.talendfile.ElementParameterType;
import org.talend.designer.core.model.utils.emf.talendfile.NodeType;
import org.talend.designer.core.model.utils.emf.talendfile.ProcessType;
import org.talend.designer.core.ui.editor.process.Process;
import org.talend.designer.core.utils.BigDataJobUtil;
import org.talend.designer.core.utils.JavaProcessUtil;
import org.talend.designer.maven.utils.PomUtil;
import org.talend.designer.runprocess.IRunProcessService;
@@ -104,7 +105,8 @@ public class JavaProcessorUtilities {
* @return
*/
public static Set<ModuleNeeded> extractLibsOnlyForMapperAndReducer(IProcess process) {
int options = TalendProcessOptionConstants.MODULES_WITH_CHILDREN | TalendProcessOptionConstants.MODULES_FOR_MR;
int options = TalendProcessOptionConstants.MODULES_WITH_CHILDREN | TalendProcessOptionConstants.MODULES_FOR_MR
| TalendProcessOptionConstants.MODULES_EXCLUDE_SHADED;
Set<ModuleNeeded> allModules = JavaProcessUtil.getNeededModules(process, options);
return allModules;
}
@@ -241,6 +243,11 @@ public class JavaProcessorUtilities {
}
}
}
if (BitwiseOptionUtils.containOption(options, TalendProcessOptionConstants.MODULES_EXCLUDE_SHADED)) {
new BigDataJobUtil(process).removeExcludedModules(neededLibraries);
}
// move high priority modules to front.
Set<ModuleNeeded> highPriorityModuleNeeded = LastGenerationInfo.getInstance().getHighPriorityModuleNeeded();
if (!highPriorityModuleNeeded.isEmpty()) {

View File

@@ -57,7 +57,9 @@ public class AutoUpdateRelationsMigrationTask extends AbstractJobMigrationTask {
Resource processResource = ((ProcessItem) item).getProcess().eResource();
Resource propertyResource = item.eResource();
if (processResource != null && processResource.isLoaded() && (processResource instanceof XMIResource)) {
propertyResource.unload();
if (propertyResource != null) {
propertyResource.unload();
}
processResource.unload();
rM.resourceSet.getResources().remove(propertyResource);
rM.resourceSet.getResources().remove(processResource);

View File

@@ -23,9 +23,6 @@ public class UseOracle11VersionInsteadOfRemoved extends AbstractJobMigrationTask
private static final String ORACLE_VERSION_PROPERTY_NAME = "DB_VERSION";
private static final String REMOVED_ORACLE_VERSION = "ORACLE_11-6";
private static final String USE_CONNECTION_PROPERTY_NAME = "USE_EXISTING_CONNECTION";
private static final String MOVE_TO_DIR_PROPERTY_NAME = "MOVE_TO_THE_CURRENT_DIRECTORY";
@Override
public Date getOrder() {
@@ -83,12 +80,12 @@ public class UseOracle11VersionInsteadOfRemoved extends AbstractJobMigrationTask
private class OracleVersionConversion implements IComponentConversion {
@Override
public void transform(NodeType node) {
if ((!"true".equals(ComponentUtilities.getNodePropertyValue(node, USE_CONNECTION_PROPERTY_NAME)))) {
String oracleVersion = ComponentUtilities.getNodePropertyValue(node, ORACLE_VERSION_PROPERTY_NAME);
if (REMOVED_ORACLE_VERSION.equals(oracleVersion)) {
ComponentUtilities.setNodeValue(node, ORACLE_VERSION_PROPERTY_NAME, "ORACLE_11");
}
String oracleVersion = ComponentUtilities.getNodePropertyValue(node, ORACLE_VERSION_PROPERTY_NAME);
if (REMOVED_ORACLE_VERSION.equals(oracleVersion)) {
ComponentUtilities.setNodeValue(node, ORACLE_VERSION_PROPERTY_NAME, "ORACLE_11");
}
}
}

View File

@@ -81,11 +81,10 @@ public class BuildOSGiBundleHandler extends BuildJobHandler {
// to compute import-package for the manifest.mf. TalendJavaProjectManager.getTalendJobJavaProject is always
// disabled MavenNature when create project(false), it will stop jdt to compile, and imporve this part will help
// to avoid using maven in this step.
final Map<String, Object> argumentsMap = new HashMap<String, Object>();
argumentsMap.put(TalendProcessArgumentConstant.ARG_PROGRAM_ARGUMENTS, "-Dci.builder.skip=true");
MavenPomCommandLauncher mavenLauncher = new MavenPomCommandLauncher(talendProcessJavaProject.getProjectPom(),
TalendMavenConstants.GOAL_COMPILE);
mavenLauncher.setArgumentsMap(argumentsMap);
mavenLauncher.setSkipCIBuilder(true);
mavenLauncher.setSkipTests(true);
mavenLauncher.execute(monitor);
List<ExportFileResource> resources = osgiMavenManager

View File

@@ -12,14 +12,17 @@
// ============================================================================
package org.talend.repository.ui.wizards.exportjob.scriptsmanager.esb;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
@@ -30,8 +33,11 @@ import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.jar.Manifest;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.collections.map.MultiKeyMap;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.Status;
@@ -55,6 +61,7 @@ import org.talend.core.model.repository.IRepositoryViewObject;
import org.talend.core.model.utils.JavaResourcesHelper;
import org.talend.core.repository.constants.FileConstants;
import org.talend.core.repository.model.ProxyRepositoryFactory;
import org.talend.core.runtime.process.ITalendProcessJavaProject;
import org.talend.core.runtime.process.LastGenerationInfo;
import org.talend.core.runtime.repository.build.BuildExportManager;
import org.talend.core.ui.branding.IBrandingService;
@@ -670,6 +677,34 @@ public class JobJavaScriptOSGIForESBManager extends JobJavaScriptsManager {
private Manifest getManifest(ExportFileResource libResource, ProcessItem processItem) throws IOException {
Analyzer analyzer = createAnalyzer(libResource, processItem);
if (GlobalServiceRegister.getDefault().isServiceRegistered(IRunProcessService.class)) {
IRunProcessService service = (IRunProcessService) GlobalServiceRegister.getDefault()
.getService(IRunProcessService.class);
ITalendProcessJavaProject talendProcessJavaProject = service.getTalendJobJavaProject(processItem.getProperty());
if (talendProcessJavaProject != null) {
String optional = ";resolution:=optional";
String src = JavaResourcesHelper.getJobClassFilePath(processItem, true);
IFile srcFile = talendProcessJavaProject.getSrcFolder().getFile(src);
Set<String> imports = importCompiler(srcFile.getLocation().toString());
String[] defaultPackages = analyzer.getProperty(Analyzer.IMPORT_PACKAGE).split(",");
for (String dp : defaultPackages) {
if (!imports.contains(dp) && !imports.contains(dp + optional)) {
imports.add(dp);
}
}
imports.remove("*;resolution:=optional");
imports.remove("routines.system");
imports.remove("routines.system" + optional);
StringBuilder importPackage = new StringBuilder();
for (String packageName : imports) {
importPackage.append(packageName).append(',');
}
importPackage.append("*;resolution:=optional");
analyzer.setProperty(Analyzer.IMPORT_PACKAGE, importPackage.toString());
}
}
// Calculate the manifest
Manifest manifest = null;
try {
@@ -900,4 +935,36 @@ public class JobJavaScriptOSGIForESBManager extends JobJavaScriptsManager {
return processor.getProcess();
}
private Set<String> importCompiler(String src) {
Set<String> imports = new HashSet<String>();
ByteArrayOutputStream out = new ByteArrayOutputStream();
ByteArrayOutputStream err = new ByteArrayOutputStream();
try {
org.eclipse.jdt.core.compiler.batch.BatchCompiler.compile(src + " -1.7 -nowarn", new PrintWriter(out),
new PrintWriter(err), null);
String errString = new String(err.toByteArray());
String[] errBlocks = errString.split("----------");
String reg = "(^[a-z_0-9\\.]+)\\.";
Pattern pattern = Pattern.compile(reg);
for (String errBlock : errBlocks) {
String[] lines = errBlock.trim().replaceAll("\r", "").split("\n");
if (lines.length == 4) {
if (lines[3].endsWith("cannot be resolved to a type") || lines[3].endsWith("cannot be resolved")) {
int markerPos = lines[2].indexOf('^');
Matcher m = pattern.matcher(lines[1].substring(markerPos));
if (m.find()) {
if (m.groupCount() == 1 && m.group(1).indexOf('.') > 0) {
imports.add(m.group(1) + ";resolution:=optional");
}
}
}
}
}
out.close();
err.close();
} catch (IOException e) {
e.printStackTrace();
}
return imports;
}
}

View File

@@ -234,6 +234,8 @@ public class BuildJobHandlerTest {
Properties jobInfoProp = new Properties();
jobInfoProp.load(jobInfoStream);
jobInfoStream.close();
assertEquals(property.getId(), jobInfoProp.getProperty("jobId"));
assertEquals(jobName, jobInfoProp.getProperty("job"));
assertEquals(jobVersion, jobInfoProp.getProperty("jobVersion"));
@@ -245,6 +247,14 @@ public class BuildJobHandlerTest {
ZipEntry libEntry = zip.getEntry("lib");
assertNotNull("No lib folder", libEntry);
if (jobItem == jobWithChildrenItem) {
String dependencyFromParent = "commons-lang-2.5.jar";
ZipEntry dependencyEntry = zip.getEntry("lib/" + dependencyFromParent);
assertNotNull("No parent job dependency in lib folder", dependencyEntry);
String dependencyFromChild = "commons-lang-2.6.jar";
dependencyEntry = zip.getEntry("lib/" + dependencyFromChild);
assertNotNull("No child job dependency in lib folder", dependencyEntry);
}
if (jobItem == jobWithJobletItem) {
String dependencyFromJoblet = "commons-beanutils-1.9.2.jar";
ZipEntry dependencyEntry = zip.getEntry("lib/" + dependencyFromJoblet);