Compare commits

...

5 Commits

Author SHA1 Message Date
Svitlana Ponomarova
cddc4ed618 feat(TBD-12990): Dataproc TP 2022-02-11 14:21:24 +02:00
Svitlana Ponomarova
3a4a955439 feat(TBD-12990): Dataproc TP 2022-02-11 14:05:44 +02:00
Svitlana Ponomarova
cb55f78b3d feat(TBD-12990): Dataproc TP 2022-02-11 13:49:04 +02:00
Svitlana Ponomarova
c4773ffdd1 Merge branch 'maintenance/8.0' into feat/TBD-12990 2022-02-11 08:01:44 +02:00
Svitlana Ponomarova
2d2684e176 feat(TBD-12990): Dataproc TP 2022-02-01 13:47:31 +02:00
3 changed files with 52 additions and 1 deletions

View File

@@ -140,6 +140,7 @@ import org.talend.hadoop.distribution.ComponentType;
import org.talend.hadoop.distribution.DistributionFactory;
import org.talend.hadoop.distribution.DistributionModuleGroup;
import org.talend.hadoop.distribution.ESparkVersion;
import org.talend.hadoop.distribution.constants.dataproc.DataprocRuntimeVersion;
import org.talend.hadoop.distribution.component.HadoopComponent;
import org.talend.hadoop.distribution.component.SparkComponent;
import org.talend.hadoop.distribution.condition.BasicExpression;
@@ -1888,6 +1889,30 @@ public class EmfComponent extends AbstractBasicComponent {
newParam.setGroup(xmlParam.getGROUP());
newParam.setGroupDisplayName(parentParam.getGroupDisplayName());
newParam.setDefaultValues(sparkModesDefaultValues);
newParam.setRepositoryValue(SparkBatchConstant.SPARK_MODE_PARAMETER);
// Dataproc RuntimeVersion for universal
String[] dataprocRuntimeVersionLabels = DataprocRuntimeVersion.getAvailableRuntimeAndSparkVersion().stream().map(x -> x.getRuntimeVersion())
.collect(Collectors.toList()).toArray(new String[0]);
String[] dataprocRuntimeVersionDisplayConditions = DataprocRuntimeVersion.getAvailableRuntimeAndSparkVersion().stream()
.map(x -> "(SPARK_VERSION=='"+x.getSparkVersion()+"')")
.collect(Collectors.toList()).toArray(new String[0]);
newParam = new ElementParameter(node);
newParam.setCategory(EComponentCategory.BASIC);
newParam.setName(SparkBatchConstant.DATAPROC_RUNTIME_VERSION);
newParam.setDisplayName(SparkBatchConstant.getName(SparkBatchConstant.DATAPROC_RUNTIME_VERSION));
newParam.setShowIf(DataprocRuntimeVersion.showIfCondition);
newParam.setListItemsDisplayName(dataprocRuntimeVersionLabels);
newParam.setListItemsDisplayCodeName(dataprocRuntimeVersionLabels);
newParam.setListItemsValue(dataprocRuntimeVersionLabels);
newParam.setListItemsShowIf(dataprocRuntimeVersionDisplayConditions);
newParam.setListItemsNotShowIf(new String[DataprocRuntimeVersion.getAvailableRuntimeAndSparkVersion().size()]);
newParam.setNumRow(xmlParam.getNUMROW() + 1);
newParam.setFieldType(EParameterFieldType.CLOSED_LIST);
newParam.setShow(true);
newParam.setGroup(xmlParam.getGROUP());
listParam.add(newParam);
}
}

View File

@@ -25,6 +25,7 @@ import org.talend.core.model.process.IElementParameter;
import org.talend.core.model.process.IProcess;
import org.talend.core.model.process.IProcess2;
import org.talend.core.model.repository.ERepositoryObjectType;
import org.talend.hadoop.distribution.constants.SparkBatchConstant;
/**
* created by nrousseau on Mar 24, 2018 Detailled comment
@@ -88,6 +89,21 @@ public class BigDataJobUtil {
}
return isSparkWithSynapse;
}
public boolean isSparkWithGoogleDataProc() {
boolean isSparkWithGoogleDataProc = false;
if (isBDJobWithFramework(ERepositoryObjectType.PROCESS_MR, HadoopConstants.FRAMEWORK_SPARK)
|| isBDJobWithFramework(ERepositoryObjectType.PROCESS_STORM, HadoopConstants.FRAMEWORK_SPARKSTREAMING)) {
List<? extends IElementParameter> parameters = process.getElementParametersWithChildrens();
for (IElementParameter pt : parameters) {
if (pt.getName().equals("DISTRIBUTION") //$NON-NLS-1$
&& EHadoopDistributions.GOOGLE_CLOUD_DATAPROC.getName().equals(pt.getValue())) {
isSparkWithGoogleDataProc = true;
}
}
}
return isSparkWithGoogleDataProc;
}
public boolean isMRWithHDInsight() {
Boolean isMRWithHDInsight = false;

View File

@@ -29,6 +29,7 @@ import org.eclipse.core.runtime.Platform;
import org.talend.commons.exception.CommonExceptionHandler;
import org.talend.commons.utils.generation.JavaUtils;
import org.talend.commons.utils.resource.FileExtensions;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.model.general.ModuleNeeded;
import org.talend.core.model.process.Element;
import org.talend.core.model.process.IElementParameter;
@@ -59,6 +60,7 @@ import org.talend.designer.runprocess.ProcessorUtilities;
import org.talend.designer.runprocess.java.JavaProcessorUtilities;
import org.talend.designer.runprocess.java.TalendJavaProjectManager;
import org.talend.designer.runprocess.maven.MavenJavaProcessor;
import org.talend.repository.model.IRepositoryService;
import org.talend.repository.ui.utils.UpdateLog4jJarUtils;
import org.talend.repository.ui.wizards.exportjob.scriptsmanager.JobScriptsManager;
import org.talend.repository.ui.wizards.exportjob.scriptsmanager.JobScriptsManager.ExportChoice;
@@ -80,7 +82,7 @@ public abstract class BigDataJavaProcessor extends MavenJavaProcessor implements
super(process, property, filenameFromLabel);
this.process = process;
BigDataJobUtil bdUtil = new BigDataJobUtil(process);
if (bdUtil.isSparkWithHDInsight() || bdUtil.isSparkWithSynapse()) {
if (isLog4j2() && (bdUtil.isSparkWithHDInsight() || bdUtil.isSparkWithSynapse() || bdUtil.isSparkWithGoogleDataProc())) {
Element e = (Element) process;
IElementParameter paramActivate = e.getElementParameter(EParameterName.LOG4J_ACTIVATE.getName());
if (paramActivate != null) {
@@ -93,6 +95,14 @@ public abstract class BigDataJavaProcessor extends MavenJavaProcessor implements
}
}
private static boolean isLog4j2() {
if (GlobalServiceRegister.getDefault().isServiceRegistered(IRepositoryService.class)) {
IRepositoryService service = GlobalServiceRegister.getDefault().getService(IRepositoryService.class);
return service.isProjectLevelLog4j2();
}
return false;
}
protected abstract JobScriptsManager createJobScriptsManager(ProcessItem processItem,
Map<ExportChoice, Object> exportChoiceMap);