Compare commits

..

7 Commits

Author SHA1 Message Date
wchen-talend
1342f2ceec TPS-1120:[5.4.2]copy to branch' search option appears to take time to
generate the results (TDI-31520)
2015-10-12 15:50:11 +08:00
Sebastien Gandon
f7623122bf TDM-4411 : generate a timestamp at every export for TDM generation
Conflicts:
	main/plugins/org.talend.core/src/main/java/org/talend/designer/runprocess/ProcessorUtilities.java
2015-01-27 16:26:02 +08:00
hcyi
3c22d8c4e0 TPS-710:[5.4.2] Unable to extract schema from an AS400
Connection(TDI-30384,TDI-30423)
2014-09-11 13:52:52 +08:00
hcyi
cff9e0ac4c TPS-710:[5.4.2] Unable to extract schema from an AS400
Connection(TDI-30384,TDI-30423)
2014-09-10 18:10:32 +08:00
cmeng-talend
da2be71e56 TPS-674 [5.4.2] Problem (unwanted preceding joblet name in a sql) with
migration from 512 to 542 (TDI-29901)
https://jira.talendforge.org/browse/TPS-674
2014-07-14 15:41:42 +08:00
Sébastien Gandon
0292b0364a Merge release/5.4.2/tis_shared and release/5.4.2/tos for release/5.4.2. 2014-06-22 14:17:21 +02:00
ggu
b1ac1d57c7 TUP-1960: allow to import the item with different path for same name, when the attribute AllowMultiName is true for repository type.
git-svn-id: http://talendforge.org/svn/tos/branches/branch-5_4@117387 f6f1c999-d317-4740-80b0-e6d1abc6f99e
2014-05-13 10:43:12 +00:00
82 changed files with 1423 additions and 2799 deletions

View File

@@ -1 +1 @@
talend.version=5.5.1
talend.version=5.4.2

View File

@@ -21,7 +21,6 @@ import org.eclipse.swt.layout.RowData;
import org.eclipse.swt.layout.RowLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
/**
* Dimensionned Button ( defined width & height OR use FillLayout).
@@ -196,8 +195,4 @@ public class UtilsButton {
public String getText() {
return button.getText();
}
public Control getControl() {
return button;
}
}

View File

@@ -164,7 +164,7 @@ RepositoryDropAdapter_lockedByOthers=This item is locked by other users, it can
RepositoryDropAdapter_lockedByYou=This item is locked by you, it can not be moved now.
RepositoryDropAdapter_moveTitle=Move
RepositoryDropAdapter_movingItems=Moving items...
RepositoryDropAdapter.checkingLockStatus=Checking lock status of
RepositoryDropAdapter.checkingLockStatus=Checking lock status of
RepositoryDropAdapter.moving=Moving
NewFolderWizard.description=Create a new folder in repository
@@ -182,7 +182,6 @@ CreateFolderAction.action.toolTipText=Create folder
RenameFolderAction.action.title=Rename folder
RenameFolderAction.action.toolTipText=Rename folder
RenameFolderAction.description=Rename the folder
RenameFolderAction.warning.editorOpen.message=Cannot rename "{1}" folder because an item ({0}) contained in this folder is currently open.\nClose it and retry.
RenameFolderAction.warning.editorOpen.title=Action not available
RenameFolderAction.warning.cannotFind.message=Cannot rename folder, it may have been moved or deleted. Click refresh button to update the repository.

View File

@@ -41,15 +41,13 @@ public class FolderWizardPage extends WizardPage {
private static final String DESC = Messages.getString("NewFolderWizard.description"); //$NON-NLS-1$
private static final String RENAME_DESC = Messages.getString("RenameFolderAction.description"); //$NON-NLS-1$
private Text nameText;
private IStatus nameStatus;
private final String defaultLabel;
private boolean isPlainFolder = false;
private boolean isPlainFolder = false;
/**
* Constructs a new NewProjectWizardPage.
@@ -63,7 +61,7 @@ public class FolderWizardPage extends WizardPage {
if (defaultLabel == null) {
setDescription(DESC);
} else {
setDescription(RENAME_DESC);
setDescription("");
}
nameStatus = createOkStatus();
@@ -72,7 +70,6 @@ public class FolderWizardPage extends WizardPage {
/**
* @see org.eclipse.jface.dialogs.IDialogPage#createControl(org.eclipse.swt.widgets.Composite)
*/
@Override
public void createControl(Composite parent) {
Composite container = new Composite(parent, SWT.NONE);
@@ -101,7 +98,6 @@ public class FolderWizardPage extends WizardPage {
private void addListeners() {
nameText.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
checkFieldsValue();
}
@@ -113,28 +109,28 @@ public class FolderWizardPage extends WizardPage {
*/
protected void checkFieldsValue() {
// Field Name
if (isPlainFolder) {
nameStatus = ResourcesPlugin.getWorkspace().validateName(nameText.getText(), IResource.FOLDER);
if (nameStatus.isOK() && (defaultLabel == null || !defaultLabel.equals(nameText.getText()))
&& !((FolderWizard) getWizard()).isValid(nameText.getText())) {
nameStatus = new Status(IStatus.ERROR, CoreRepositoryPlugin.PLUGIN_ID, IStatus.OK, Messages.getString(
"NewFolderWizard.nameInvalid", nameText.getText()), null); //$NON-NLS-1$
}
} else {
if (nameText.getText().length() == 0) {
nameStatus = new Status(IStatus.ERROR, CoreRepositoryPlugin.PLUGIN_ID, IStatus.OK,
Messages.getString("NewFolderWizard.nameEmpty"), null); //$NON-NLS-1$
} else if (!Pattern.matches(RepositoryConstants.FOLDER_PATTERN, nameText.getText())) {
nameStatus = new Status(IStatus.ERROR, CoreRepositoryPlugin.PLUGIN_ID, IStatus.OK,
Messages.getString("NewFolderWizard.nameIncorrect"), null); //$NON-NLS-1$
} else if ((defaultLabel == null || !defaultLabel.equals(nameText.getText()))
&& !((FolderWizard) getWizard()).isValid(nameText.getText())) {
nameStatus = new Status(IStatus.ERROR, CoreRepositoryPlugin.PLUGIN_ID, IStatus.OK, Messages.getString(
"NewFolderWizard.nameInvalid", nameText.getText()), null); //$NON-NLS-1$
} else {
nameStatus = createOkStatus();
}
}
if(isPlainFolder){
nameStatus = ResourcesPlugin.getWorkspace().validateName(nameText.getText(), IResource.FOLDER);
if(nameStatus.isOK() && (defaultLabel == null || !defaultLabel.equals(nameText.getText()))
&& !((FolderWizard) getWizard()).isValid(nameText.getText())) {
nameStatus = new Status(IStatus.ERROR, CoreRepositoryPlugin.PLUGIN_ID, IStatus.OK, Messages.getString(
"NewFolderWizard.nameInvalid", nameText.getText()), null); //$NON-NLS-1$
}
}else{
if (nameText.getText().length() == 0) {
nameStatus = new Status(IStatus.ERROR, CoreRepositoryPlugin.PLUGIN_ID, IStatus.OK,
Messages.getString("NewFolderWizard.nameEmpty"), null); //$NON-NLS-1$
} else if (!Pattern.matches(RepositoryConstants.FOLDER_PATTERN, nameText.getText())) {
nameStatus = new Status(IStatus.ERROR, CoreRepositoryPlugin.PLUGIN_ID, IStatus.OK,
Messages.getString("NewFolderWizard.nameIncorrect"), null); //$NON-NLS-1$
} else if ((defaultLabel == null || !defaultLabel.equals(nameText.getText()))
&& !((FolderWizard) getWizard()).isValid(nameText.getText())) {
nameStatus = new Status(IStatus.ERROR, CoreRepositoryPlugin.PLUGIN_ID, IStatus.OK, Messages.getString(
"NewFolderWizard.nameInvalid", nameText.getText()), null); //$NON-NLS-1$
} else {
nameStatus = createOkStatus();
}
}
updatePageStatus();
}
@@ -152,14 +148,11 @@ public class FolderWizardPage extends WizardPage {
setErrorMessage(status.getMessage());
setMessage(""); //$NON-NLS-1$
} else {
if (defaultLabel != null) {
setMessage(RENAME_DESC);
}
setMessage(DESC);
setErrorMessage(null);
}
}
@Override
public String getName() {
return nameText.getText();
}

View File

@@ -0,0 +1,438 @@
// ============================================================================
//
// Copyright (C) 2006-2014 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.core.model.utils;
import junit.framework.Assert;
import org.junit.Test;
/**
* DOC cmeng class global comment. Detailled comment
*/
@SuppressWarnings("nls")
public class ParameterValueUtilTest {
@Test
public void testSplitQueryData4SQL() {
String testString = null;
String expectRetValue = null;
String retValue = null;
int i = 0;
// test case 0
// testString : context.operation+" "+context.schema+"."+context.table+";"
testString = "context.operation+\" \"+context.schema+\".\"+context.table+\";\"";
expectRetValue = "context.oper+\" \"+context.schema+\".\"+context.table+\";\"";
retValue = ParameterValueUtil.splitQueryData("context.operation", "context.oper", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// schema
expectRetValue = "context.operation+\" \"+context.db+\".\"+context.table+\";\"";
retValue = ParameterValueUtil.splitQueryData("context.schema", "context.db", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// table
expectRetValue = "context.operation+\" \"+context.schema+\".\"+context.table1+\";\"";
retValue = ParameterValueUtil.splitQueryData("context.table", "context.table1", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// part of replacing
retValue = ParameterValueUtil.splitQueryData("text.schema", "text.schemaABC", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, testString.equals(retValue));
// only normal sting
testString = "context.operation+\" \"+context_schema+schema+\".\"+context.table+\";\"";
expectRetValue = "context.operation+\" \"+context_schema+schema123+\".\"+context.table+\";\"";
retValue = ParameterValueUtil.splitQueryData("schema", "schema123", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// only normal sting for context
testString = "context.operation+\" \"+context.schema+schema+\".\"+context.table+\";\"";
expectRetValue = "context.operation+\" \"+context.schema+schema123+\".\"+context.table+\";\"";
retValue = ParameterValueUtil.splitQueryData("schema", "schema123", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// same prefix
testString = "context.operation+\" \"+context.test1+\".\"+context.test11+\";\"";
expectRetValue = "context.operation+\" \"+context.test2+\".\"+context.test11+\";\"";
retValue = ParameterValueUtil.splitQueryData("context.test1", "context.test2", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 1
// testString (For bug:TDI-29092) : "drop table "+context.oracle_schema+".\"TDI_26803\""
testString = "\"drop table \"+context.oracle_schema+\".\\\"TDI_26803\\\"\"";
expectRetValue = "\"drop table \"+context.oracl_schema+\".\\\"TDI_26803\\\"\"";
retValue = ParameterValueUtil.splitQueryData("context.oracle_schema", "context.oracl_schema", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// column, don't replace the file for SQL
expectRetValue = "\"drop table \"+context.oracl_schema+\".\\\"TDI_12345\\\"\"";
retValue = ParameterValueUtil.splitQueryData("TDI_26803", "TDI_12345", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, testString.equals(retValue)); // not changed
// test case 7
// all are empty
// testString :
// ""
testString = "";
expectRetValue = "";
retValue = ParameterValueUtil.splitQueryData("", "", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 8
// many same varibles
// testString :
// "contextA"+context+"contextB"+context+"contextC" + context+" "
testString = "\"contextA\"+context+\"contextB\"+context+\"contextC\" + context+\" \"";
expectRetValue = "\"contextA\"+context.db+\"contextB\"+context.db+\"contextC\" + context.db+\" \"";
retValue = ParameterValueUtil.splitQueryData("context", "context.db", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
expectRetValue = "\"contextA\"+context.db+\"contextB\"+context.db+\"contextCC\" + context.db+\" \"";
retValue = ParameterValueUtil.splitQueryData("contextC", "contextCC", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, testString.equals(retValue)); // not changed
// test case 9
// testString :
// "contextA"+contextA+"contextB"+context+"contextC" + context+" "
testString = "\"contextA\"+contextA+\"contextB\"+context+\"contextC\" + context+\" \"";
expectRetValue = "\"contextA\"+contextA+\"contextB\"+context.db+\"contextC\" + context.db+\" \"";
retValue = ParameterValueUtil.splitQueryData("context", "context.db", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
expectRetValue = "\"contextA\"+contextAA+\"contextB\"+context+\"contextC\" + context+\" \"";
retValue = ParameterValueUtil.splitQueryData("contextA", "contextAA", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 10
// "SELECT
// "+context.ORA_VIRTULIA_Schema+".PER_ETATCIVIL.IDE_DOSSIER,
// "+context.ORA_VIRTULIA_Schema+".PER_ETATCIVIL.QUALITE,
// "+context.ORA_VIRTULIA_Schema+".PER_ETATCIVIL.NOM
// FROM "+context.ORA_VIRTULIA_Schema+".PER_ETATCIVIL"
// this function should not replace constant
testString = "\"SELECT \r\n" + "\"+context.ORA_VIRTULIA_Schema+\".PER_ETATCIVIL.IDE_DOSSIER,\r\n"
+ "\"+context.ORA_VIRTULIA_Schema+\".PER_ETATCIVIL.QUALITE,\r\n"
+ "\"+context.ORA_VIRTULIA_Schema+\".PER_ETATCIVIL.NOM\r\n"
+ "FROM \"+context.ORA_VIRTULIA_Schema+\".PER_ETATCIVIL\"";
expectRetValue = "\"SELECT \r\n" + "\"+context.ORA_VIRTULIA_Schema+\".PER_ETATCIVIL.IDE_DOSSIER,\r\n"
+ "\"+context.ORA_VIRTULIA_Schema+\".PER_ETATCIVIL.QUALITE,\r\n"
+ "\"+context.ORA_VIRTULIA_Schema+\".PER_ETATCIVIL.NOM\r\n"
+ "FROM \"+context.ORA_VIRTULIA_Schema+\".PER_ETATCIVIL\"";
retValue = ParameterValueUtil.splitQueryData("PER_ETATCIVIL.IDE_DOSSIER", "simplejoblet_1_PER_ETATCIVIL.IDE_DOSSIER",
testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
testString = "\"SELECT \r\n" + "\"+context.ORA_VIRTULIA_Schema+\".PER_ETATCIVIL.IDE_DOSSIER,\r\n"
+ "\"+context.ORA_VIRTULIA_Schema+\".PER_ETATCIVIL.QUALITE,\r\n"
+ "\"+context.ORA_VIRTULIA_Schema+\".PER_ETATCIVIL.NOM\r\n"
+ "FROM \"+context.ORA_VIRTULIA_Schema+\".PER_ETATCIVIL\"";
expectRetValue = "\"SELECT \r\n" + "\"+context.ORA_CHANGE_Schema+\".PER_ETATCIVIL.IDE_DOSSIER,\r\n"
+ "\"+context.ORA_CHANGE_Schema+\".PER_ETATCIVIL.QUALITE,\r\n"
+ "\"+context.ORA_CHANGE_Schema+\".PER_ETATCIVIL.NOM\r\n"
+ "FROM \"+context.ORA_CHANGE_Schema+\".PER_ETATCIVIL\"";
retValue = ParameterValueUtil.splitQueryData("context.ORA_VIRTULIA_Schema", "context.ORA_CHANGE_Schema", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
testString = "no match";
expectRetValue = "no match";
retValue = ParameterValueUtil.splitQueryData("context.schema", "context.db", testString);
Assert.assertTrue(retValue != null && !"".equals(retValue));
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 11
// testString : "select * from " + context.table + " where value = \"value from context.table\""
// expectString : "select * from " + context.table1 + " where value = \"value from context.table\""
testString = "\"select * from \" + context.table + \" where value = \\\"value from context.table\\\"\"";
expectRetValue = "\"select * from \" + context.table1 + \" where value = \\\"value from context.table\\\"\"";
retValue = ParameterValueUtil.splitQueryData("context.table", "context.table1", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 12
// testString : "select * from " + context.table + " where value = \"context.table\""
// expectString : "select * from " + context.table1 + " where value = \"context.table\""
testString = "\"select * from \" + context.table + \" where value = \\\"context.table\\\"\"";
expectRetValue = "\"select * from \" + context.table1 + \" where value = \\\"context.table\\\"\"";
retValue = ParameterValueUtil.splitQueryData("context.table", "context.table1", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 13
// testString : "select * from " + context.table + " where value = \"context.table\"" + context.table
// expectString : "select * from " + context.table1 + " where value = \"context.table\"" + context.table
testString = "\"select * from \" + context.table + \" where value = \\\"context.table\\\"\" + context.table";
expectRetValue = "\"select * from \" + context.table1 + \" where value = \\\"context.table\\\"\" + context.table1";
retValue = ParameterValueUtil.splitQueryData("context.table", "context.table1", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 14 : incomplete double quota
// testString : "select a,context.b from " + context.b + "where value = context.b
// expectString : "select a,context.b from " + context.b1 + "where value = context.b1
testString = "\"select a,context.b from \" + context.b + \"where value = context.b";
expectRetValue = "\"select a,context.b from \" + context.b1 + \"where value = context.b1";
retValue = ParameterValueUtil.splitQueryData("context.b", "context.b1", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 15 : incomplete double quota
// testString : "select a,context.b from " + context.b + "where value = \"context.b
// expectString : "select a,context.b from " + context.b1 + "where value = \"context.b1
testString = "\"select a,context.b from \" + context.b + \"where value = \\\"context.b";
expectRetValue = "\"select a,context.b from \" + context.b1 + \"where value = \\\"context.b1";
retValue = ParameterValueUtil.splitQueryData("context.b", "context.b1", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 16
// testString : "select * from " + context.table + " where value = \"\\" + context.table + "\\context.table\""
// expectString : "select * from " + context.table1 + " where value = \"\\" + context.table1 +
// "\\context.table\""
testString = "\"select * from \" + context.table + \" where value = \\\"\\\\\" + context.table + \"\\\\context.table\\\"\"";
expectRetValue = "\"select * from \" + context.table1 + \" where value = \\\"\\\\\" + context.table1 + \"\\\\context.table\\\"\"";
retValue = ParameterValueUtil.splitQueryData("context.table", "context.table1", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 17
// testString : "select * from ""context.table where value = \"\\" + context.table + "\\context.table\""
// expectString : "select * from ""context.table where value = \"\\" + context.table1 + "\\context.table\""
testString = "\"select * from \"\"context.table where value = \\\"\\\\\" + context.table + \"\\\\context.table\\\"\"";
expectRetValue = "\"select * from \"\"context.table where value = \\\"\\\\\" + context.table1 + \"\\\\context.table\\\"\"";
retValue = ParameterValueUtil.splitQueryData("context.table", "context.table1", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 18
// testString : "select * from " + context.table + "where id = " + getId(getHeader(context.header, "CONTEXT_ID")
// + "CONTEXT_ID")
// expectString : "select * from " + context.table + "where id = " + getId(getHeader(context.header,
// "CONTEXT_ID") + "CONTEXT_ID")
testString = "\"select * from \" + context.table + \"where id = \" + getId(getHeader(context.header, \"CONTEXT_ID\") + \"CONTEXT_ID\")";
expectRetValue = "\"select * from \" + context.table + \"where id = \" + getId(getHeader(context.header, \"CONTEXT_IDS\") + \"CONTEXT_IDS\")";
retValue = ParameterValueUtil.splitQueryData("CONTEXT_ID", "CONTEXT_IDS", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 19
// testString : "select * from " + context.table + "where id = " + getId(getHeader(context.header, "CONTEXT_ID")
// + "CONTEXT_ID", context.p2, "CONTEXT_ID")
// expectString : "select * from " + context.table + "where id = " + getId(getHeader(context.header,
// "CONTEXT_IDS") + "CONTEXT_IDS", context.p2, "CONTEXT_IDS")
testString = "\"select * from \" + context.table + \"where id = \" + getId(getHeader(context.header, \"CONTEXT_ID\") + \"CONTEXT_ID\", context.p2, \"CONTEXT_ID\")";
expectRetValue = "\"select * from \" + context.table + \"where id = \" + getId(getHeader(context.header, \"CONTEXT_IDS\") + \"CONTEXT_IDS\", context.p2, \"CONTEXT_IDS\")";
retValue = ParameterValueUtil.splitQueryData("CONTEXT_ID", "CONTEXT_IDS", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 20
// testString : "select * from " + context.table + "where id = " + getId(global.getHeader(context.header,
// "CONTEXT_ID")
// + "CONTEXT_ID", context.p2, "CONTEXT_ID")
// expectString : "select * from " + context.table + "where id = " + getId(global.getHeader(context.header,
// "CONTEXT_IDS") + "CONTEXT_IDS", context.p2, "CONTEXT_IDS")
testString = "\"select * from \" + context.table + \"where id = \" + getId(global.getHeader(context.header, \"CONTEXT_ID\") + \"CONTEXT_ID\", context.p2, \"CONTEXT_ID\")";
expectRetValue = "\"select * from \" + context.table + \"where id = \" + getId(global.getHeader(context.header, \"CONTEXT_IDS\") + \"CONTEXT_IDS\", context.p2, \"CONTEXT_IDS\")";
retValue = ParameterValueUtil.splitQueryData("CONTEXT_ID", "CONTEXT_IDS", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 21
// testString : "select * from " + context.table + "where id = " + getId(global.getHeader(context.header,
// "\"CONTEXT_ID\\\"")
// + "\"CONTEXT_ID", context.p2, "CONTEXT_ID")
// expectString : "select * from " + context.table + "where id = " + getId(global.getHeader(context.header,
// "CONTEXT_IDS") + "CONTEXT_IDS", context.p2, "CONTEXT_IDS")
testString = "\"select * from \" + context.table + \"where id = \" + getId(global.getHeader(context.header, \"\\\"CONTEXT_ID\\\\\\\"\") + \"\\\"CONTEXT_ID\", context.p2, \"CONTEXT_ID\")";
expectRetValue = "\"select * from \" + context.table + \"where id = \" + getId(global.getHeader(context.header, \"\\\"CONTEXT_IDS\\\\\\\"\") + \"\\\"CONTEXT_IDS\", context.p2, \"CONTEXT_IDS\")";
retValue = ParameterValueUtil.splitQueryData("CONTEXT_ID", "CONTEXT_IDS", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 22
// testString : "select * from " + context.table + "where id = " + getId(getHeader(context.header, "CONTEXT_ID")
// + "CONTEXT_ID", context.p2, "CONTEXT_ID"
// expectString : "select * from " + context.table + "where id = " + getId(getHeader(context.header,
// "CONTEXT_IDS") + "CONTEXT_IDS", context.p2, "CONTEXT_IDS"
testString = "\"select * from \" + context.table + \"where id = \" + getId(getHeader(context.header, \"CONTEXT_ID\") + \"CONTEXT_ID\", context.p2, \"CONTEXT_ID\"";
expectRetValue = "\"select * from \" + context.table + \"where id = \" + getId(getHeader(context.header, \"CONTEXT_IDS\") + \"CONTEXT_IDS\", context.p2, \"CONTEXT_IDS\"";
retValue = ParameterValueUtil.splitQueryData("CONTEXT_ID", "CONTEXT_IDS", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 23
// testString : "select * from " + context.table + "where id = " + getId(getHeader(context.header, "CONTEXT_ID")
// + "CONTEXT_ID", context.p2, "CONTEXT_ID
// expectString : "select * from " + context.table + "where id = " + getId(getHeader(context.header,
// "CONTEXT_IDS") + "CONTEXT_IDS", context.p2, "CONTEXT_IDS
testString = "\"select * from \" + context.table + \"where id = \" + getId(getHeader(context.header, \"CONTEXT_ID\") + \"CONTEXT_ID\", context.p2, \"CONTEXT_ID";
expectRetValue = "\"select * from \" + context.table + \"where id = \" + getId(getHeader(context.header, \"CONTEXT_IDS\") + \"CONTEXT_IDS\", context.p2, \"CONTEXT_IDS";
retValue = ParameterValueUtil.splitQueryData("CONTEXT_ID", "CONTEXT_IDS", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 24
// testString : "select * from " + context.table + "where id = " + getId(context.id) + globalMap.get("CONST")
// expectString : "select * from " + context.table + "where id = " + getId(context.id) + globalMap.get("CONST1")
testString = "\"select * from \" + context.table + \"where id = \" + getId(context.id) + globalMap.get(\"CONST\")";
expectRetValue = "\"select * from \" + context.table + \"where id = \" + getId(context.id) + globalMap.get(\"CONST1\")";
retValue = ParameterValueUtil.splitQueryData("CONST", "CONST1", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 25 : should not replace method name
// testString : "select * from " + context.table + "where id = " + getId(context.id) +
// globalMap.get("globalMap")
// expectString : "select * from " + context.table + "where id = " + getId(context.id) +
// globalMap.get("globalMap1")
testString = "\"select * from \" + context.table + \"where id = \" + getId(context.id) + globalMap.get(\"globalMap\")";
expectRetValue = "\"select * from \" + context.table + \"where id = \" + getId(context.id) + globalMap.get(\"globalMap1\")";
retValue = ParameterValueUtil.splitQueryData("globalMap", "globalMap1", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 26
// testString : "select * from " + context.table.a.b + contextA.table.a + table.a.b + table.a + "where id = " +
// getId(table.a) + table.a.get("table.a")
//
// expectString : "select * from " + context.table.a.b + contextA.table.a + table.a.b + table.a1 + "where id = "
// + getId(table.a1) + table.a.get("table.a1")
testString = "\"select * from \" + context.table.a.b + contextA.table.a + table.a.b + table.a + \"where id = \" + getId(table.a) + table.a.get(\"table.a\")";
expectRetValue = "\"select * from \" + context.table.a.b + contextA.table.a + table.a.b + table.a1 + \"where id = \" + getId(table.a1) + table.a.get(\"table.a1\")";
retValue = ParameterValueUtil.splitQueryData("table.a", "table.a1", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
// test case 2
// testString : "select * from " + a.CONTEXT_ID + CONTEXT_ID.b + CONTEXT_ID + "where id = " +
// CONTEXT_ID(CONTEXT_ID(CONTEXT_ID, "\"CONTEXT_ID\"\\" + CONTEXT_ID, CONTEXT_ID, "CONTEXT_ID") + "CONTEXT_ID",
// CONTEXT_ID(ID, "CONTEXT_ID"), "CONTEXT_ID")
// expectString : "select * from " + a.CONTEXT_ID + CONTEXT_ID.b + CONTEXT_ID1 + "where id = " +
// CONTEXT_ID(CONTEXT_ID(CONTEXT_ID1, "\"CONTEXT_ID1\"\\" + CONTEXT_ID1, CONTEXT_ID1, "CONTEXT_ID1") +
// "CONTEXT_ID1", CONTEXT_ID(ID, "CONTEXT_ID1"), "CONTEXT_ID1")
testString = "\"select * from \" + a.CONTEXT_ID + CONTEXT_ID.b + CONTEXT_ID + \"where id = \" + CONTEXT_ID(CONTEXT_ID(CONTEXT_ID, \"\\\"CONTEXT_ID\\\"\\\\\" + CONTEXT_ID, CONTEXT_ID, \"CONTEXT_ID\") + \"CONTEXT_ID\", CONTEXT_ID(ID, \"CONTEXT_ID\"), \"CONTEXT_ID\")";
expectRetValue = "\"select * from \" + a.CONTEXT_ID + CONTEXT_ID.b + CONTEXT_ID1 + \"where id = \" + CONTEXT_ID(CONTEXT_ID(CONTEXT_ID1, \"\\\"CONTEXT_ID1\\\"\\\\\" + CONTEXT_ID1, CONTEXT_ID1, \"CONTEXT_ID1\") + \"CONTEXT_ID1\", CONTEXT_ID(ID, \"CONTEXT_ID1\"), \"CONTEXT_ID1\")";
retValue = ParameterValueUtil.splitQueryData("CONTEXT_ID", "CONTEXT_ID1", testString);
Assert.assertTrue("testSplitQueryDataCase_" + i++, expectRetValue.equals(retValue));
}
@Test
public void testSplitQueryData4SQL_Case2() {
// case 2:
// "insert into "+context.schema+"."+context.table+"(schema, table) values(\"context.schema\", \"context.table\")"
testSplitQueryData4SQL_Case2_5("testSplitQueryData4Case2", null, null, null);
}
@Test
public void testSplitQueryData4SQL_Case3() {
// case 3:
// ""+"insert into "+context.schema+"."+context.table+"(schema, table) values(\"context.schema\", \"context.table\")"
testSplitQueryData4SQL_Case2_5("testSplitQueryData4Case3", "\"\"", null, null);
}
@Test
public void testSplitQueryData4SQL_Case4() {
// case 4:
// "insert into "+context.schema+"."+context.table+"(schema, table) values(\"context.schema\", \"context.table\")"+""
testSplitQueryData4SQL_Case2_5("testSplitQueryData4Case4", null, null, "\"\"");
}
@Test
public void testSplitQueryData4SQL_Case5() {
// case 5:
// "insert into "+context.schema+"."+context.table+""+
// "(schema, table) values(\"context.schema\", \"context.table\")"
testSplitQueryData4SQL_Case2_5("testSplitQueryData4Case5", null, "\"\"", null);
}
private void testSplitQueryData4SQL_Case2_5(String message, String prefix, String mid, String suffix) {
if (prefix == null) {
prefix = "";
}
if (mid == null) {
mid = "";
}
if (suffix == null) {
suffix = "";
}
String testString = null;
String expectRetValue = null;
int i = 0;
// test case 2-5
// String which is same to the String to be replaced was contained in the testString
// testString :
/*
* case 2:
* "insert into "+context.schema+"."+context.table+"(schema, table) values(\"context.schema\", \"context.table\")"
*
* case 3: ""+"insert into "+context.schema+"."+context.table+
* "(schema, table) values(\"context.schema\", \"context.table\")"
*
*
* case 4:
* "insert into "+context.schema+"."+context.table+"(schema, table) values(\"context.schema\", \"context.table\")"
* +""
*
* case 5: "insert into "+context.schema+"."+context.table+""+
* "(schema, table) values(\"context.schema\", \"context.table\")"
*/
testString = "\"insert into \"+context.schema+\".\"+context.table+" + mid
+ "\"(schema, table) values(\\\"context.schema\\\", \\\"context.table\\\")\"";
expectRetValue = "\"insert into \"+context.db+\".\"+context.table+" + mid
+ "\"(schema, table) values(\\\"context.schema\\\", \\\"context.table\\\")\"";
assertTest(message, i++, prefix + testString + suffix, prefix + expectRetValue + suffix, "context.schema", "context.db");
// table
expectRetValue = "\"insert into \"+context.schema+\".\"+context.table111+" + mid
+ "\"(schema, table) values(\\\"context.schema\\\", \\\"context.table\\\")\"";
assertTest(message, i++, prefix + testString + suffix, prefix + expectRetValue + suffix, "context.table",
"context.table111");
// prefix name 1
testString = "\"insert into \"+context.schema+\".\"+context.schematable+" + mid
+ "\"(schema, table) values(\\\"context.schema\\\", \\\"context.table\\\")\"";
expectRetValue = "\"insert into \"+context.db+\".\"+context.schematable+" + mid
+ "\"(schema, table) values(\\\"context.schema\\\", \\\"context.table\\\")\"";
assertTest(message, i++, prefix + testString + suffix, prefix + expectRetValue + suffix, "context.schema", "context.db");
// prefix name 2
testString = "\"insert into \"+context.schema+\".\"+context.schema_table+" + mid
+ "\"(schema, table) values(\\\"context.schema\\\", \\\"context.table\\\")\"";
expectRetValue = "\"insert into \"+context.db+\".\"+context.schema_table+" + mid
+ "\"(schema, table) values(\\\"context.schema\\\", \\\"context.table\\\")\"";
assertTest(message, i++, prefix + testString + suffix, prefix + expectRetValue + suffix, "context.schema", "context.db");
}
private void assertTest(String message, int index, String testString, String expectRetValue, String oldOne, String newOne) {
String resultValue = ParameterValueUtil.splitQueryData(oldOne, newOne, testString);
Assert.assertTrue(message + index, expectRetValue.equals(resultValue));
}
@Test
public void testRenameValues4GlobleMap() {
String testString = "((String)globalMap.get(\"tFileList_1_CURRENT_FILE\"))";
String expectedValue = "((String)globalMap.get(\"tFileList_2_CURRENT_FILE\"))";
String resultValue = ParameterValueUtil.renameValues(testString, "tFileList_1", "tFileList_2", true);
Assert.assertTrue(expectedValue.equals(resultValue));
//
testString = "((String)globalMap.get(\"tFileList_1_CURRENT_FILEDIRECTORY\"))+((String)globalMap.get(\"tFileList_1_CURRENT_FILE\"))";
expectedValue = "((String)globalMap.get(\"tFileList_2_CURRENT_FILEDIRECTORY\"))+((String)globalMap.get(\"tFileList_2_CURRENT_FILE\"))";
resultValue = ParameterValueUtil.renameValues(testString, "tFileList_1", "tFileList_2", true);
Assert.assertTrue(expectedValue.equals(resultValue));
//
testString = "((String)globalMap.get(\"tFileList_1_CURRENT_FILEDIRECTORY\"))+((String)globalMap.get(\"tFileList_11_CURRENT_FILE\"))";
expectedValue = "((String)globalMap.get(\"tFileList_2_CURRENT_FILEDIRECTORY\"))+((String)globalMap.get(\"tFileList_11_CURRENT_FILE\"))";
resultValue = ParameterValueUtil.renameValues(testString, "tFileList_1", "tFileList_2", true);
Assert.assertTrue(expectedValue.equals(resultValue));
}
@Test
public void testRenameValues4SQLAndGlobleMap() {
// case
// "select A.id, A.name form "+context.table+" A where A.name= "+((String)globalMap.get("tFileList_1_CURRENT_FILE"))
String testString = "\"select A.id, A.name form \"+context.table+\" A where A.name= \"+((String)globalMap.get(\"tFileList_1_CURRENT_FILE\"))";
String expectRetValue = "\"select A.id, A.name form \"+context.table123+\" A where A.name= \"+((String)globalMap.get(\"tFileList_1_CURRENT_FILE\"))";
// if flag is false, means is from SQL. but when replace the globlemap, will be problem.
String retValue = ParameterValueUtil.renameValues(testString, "context.table", "context.table123", false);
Assert.assertTrue("testRenameValues4SQLAndGlobleMap", expectRetValue.equals(retValue));
expectRetValue = "\"select A.id, A.name form \"+context.table+\" A where A.name= \"+((String)globalMap.get(\"tFileList_2_CURRENT_FILE\"))";
// if flag is false, means is from SQL. but when replace the globlemap, will be problem.
retValue = ParameterValueUtil.renameValues(testString, "tFileList_1", "tFileList_2", false);
Assert.assertTrue("testRenameValues4SQLAndGlobleMap", expectRetValue.equals(retValue));
}
}

View File

@@ -8,16 +8,12 @@
<dbType type="NUMBER" defaultLength="20" defaultPrecision="10" ignoreLen="false" ignorePre="false"/>
<dbType type="INT" ignoreLen="true" ignorePre="true"/>
<dbType type="INTEGER" ignoreLen="true" ignorePre="true"/>
<dbType type="DECIMAL" ignoreLen="true" ignorePre="true"/>
<dbType type="LONG" ignoreLen="true" ignorePre="true"/>
<dbType type="FLOAT" ignoreLen="true" ignorePre="true"/>
<dbType type="BINARY_FLOAT" ignoreLen="true" ignorePre="true"/>
<dbType type="DOUBLE PRECISION" ignoreLen="true" ignorePre="true"/>
<dbType type="BINARY_DOUBLE" ignoreLen="true" ignorePre="true"/>
<dbType type="DATE" ignoreLen="true" ignorePre="true"/>
<dbType type="TIMESTAMP" ignoreLen="true" ignorePre="true"/>
<dbType type="TIMESTAMP WITH LOCAL TIME ZONE" ignoreLen="true" ignorePre="true"/>
<dbType type="BOOLEAN" ignoreLen="true" ignorePre="true"/>
</dbTypes>
<language name="java">
@@ -25,7 +21,6 @@
<talendType type="id_List">
</talendType>
<talendType type="id_Boolean">
<dbType type="BOOLEAN" default="true"/>
</talendType>
<talendType type="id_Byte">
</talendType>
@@ -37,11 +32,9 @@
</talendType>
<talendType type="id_Date">
<dbType type="DATE" default="true"/>
<dbType type="TIMESTAMP"/>
<dbType type="TIMESTAMP WITH LOCAL TIME ZONE"/>
</talendType>
<talendType type="id_BigDecimal">
<dbType type="DECIMAL" default="true"/>
<dbType type="NUMBER"/>
</talendType>
<talendType type="id_Double">
<dbType type="DOUBLE PRECISION" default="true"/>
@@ -54,7 +47,6 @@
<talendType type="id_Integer">
<dbType type="INT" default="true"/>
<dbType type="INTEGER"/>
<dbType type="DECIMAL"/>
</talendType>
<talendType type="id_Long">
<dbType type="NUMBER" default="true"/>
@@ -103,22 +95,7 @@
</dbType>
<dbType type="DATE">
<talendType type="id_Date" default="true" />
</dbType>
<dbType type="DECIMAL">
<talendType type="id_BigDecimal" default="true" />
<talendType type="id_Long"/>
<talendType type="id_Integer"/>
<talendType type="id_Double"/>
</dbType>
<dbType type="TIMESTAMP">
<talendType type="id_Date" default="true" />
</dbType>
<dbType type="TIMESTAMP WITH LOCAL TIME ZONE">
<talendType type="id_Date" default="true" />
</dbType>
<dbType type="BOOLEAN">
<talendType type="id_Boolean" default="true" />
</dbType>
</dbType>
</dbToTalendTypes>
</language>
</dbms>

View File

@@ -10,8 +10,6 @@
<dbType type="CLOB" ignorePre="true"/>
<dbType type="DATE" ignoreLen="true" ignorePre="true"/>
<dbType type="DECIMAL" defaultLength="20" defaultPrecision="10"/>
<dbType type="DOUBLE" ignoreLen="true" ignorePre="true"/>
<dbType type="FLOAT" ignoreLen="true" ignorePre="true" />
<dbType type="GRAPHIC" defaultLength="20" ignoreLen="true" ignorePre="true"/>
<dbType type="INTEGER" ignoreLen="true" ignorePre="true"/>
<dbType type="LONG VARCHAR" ignoreLen="true" ignorePre="true"/>
@@ -52,15 +50,11 @@
<dbType type="NUMERIC"/>
</talendType>
<talendType type="id_Double">
<dbType type="DOUBLE" default="true"/>
<dbType type="DECIMAL"/>
<dbType type="FLOAT"/>
<dbType type="DECIMAL" default="true"/>
<dbType type="NUMERIC"/>
</talendType>
<talendType type="id_Float">
<dbType type="FLOAT" default="true"/>
<dbType type="DECIMAL"/>
<dbType type="DOUBLE"/>
<dbType type="DECIMAL" default="true"/>
<dbType type="NUMERIC"/>
</talendType>
<talendType type="id_Integer">
@@ -116,16 +110,6 @@
<talendType type="id_BigDecimal"/>
<talendType type="id_Double"/>
</dbType>
<dbType type="DOUBLE">
<talendType type="id_Double" default="true" />
<talendType type="id_BigDecimal"/>
<talendType type="id_Float"/>
</dbType>
<dbType type="FLOAT">
<talendType type="id_Float" default="true" />
<talendType type="id_BigDecimal"/>
<talendType type="id_Double"/>
</dbType>
<dbType type="GRAPHIC">
<talendType type="id_Object" default="true" />
</dbType>

View File

@@ -33,6 +33,6 @@ public interface ISVNProviderServiceInCoreRuntime extends IService {
public void synProjectLib(String filePath);
public boolean update();
public void update();
}

View File

@@ -14,7 +14,6 @@ package org.talend.core;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -188,8 +187,4 @@ public interface ITDQRepositoryService extends IService {
*/
public void reloadMetadataOfDelimitedFile(MetadataTable newMetadataTable) throws BusinessException;
public Collection<org.talend.core.model.metadata.builder.connection.Connection> getAllDataProviders();
public void updateDriverIfClassNotLoad(DatabaseConnection databaseConnection);
}

View File

@@ -14,7 +14,6 @@ package org.talend.core.context;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.talend.core.model.general.Project;
import org.talend.core.model.properties.User;
@@ -63,18 +62,7 @@ public class RepositoryContext {
* @param user the user to set
*/
public void setUser(User user) {
// svn authentification is not saved actually in the emf model.
// if the new user have no svn authentification, but old instance of user have svn authentification
// we force the new instance to set the svn infos.
String oldAuthentification = null;
if (this.user != null && user != null && StringUtils.equals(this.user.getLogin(), user.getLogin())
&& user.getAuthenticationInfo() == null) {
oldAuthentification = this.user.getAuthenticationInfo();
}
this.user = user;
if (oldAuthentification != null) {
this.user.setAuthenticationInfo(oldAuthentification);
}
}
/**

View File

@@ -69,7 +69,6 @@ public enum EDatabaseVersion4Drivers {
// add for 9594
MSSQL(new DbVersion4Drivers(EDatabaseTypeName.MSSQL, "jtds-1.2.5.jar")), //$NON-NLS-1$
VERTICA_7(new DbVersion4Drivers(EDatabaseTypeName.VERTICA, "VERTICA 7.0.x", "VERTICA_7_0_X", "vertica-jdbc-7.0.1-0.jar")), //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
VERTICA_6_1_X(new DbVersion4Drivers(EDatabaseTypeName.VERTICA, "VERTICA 6.1.x", "VERTICA_6_1_X", "vertica-jdk5-6.1.2-0.jar")), //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
VERTICA_6(new DbVersion4Drivers(EDatabaseTypeName.VERTICA, "VERTICA 6.0", "VERTICA_6_0", "vertica-jdk5-6.0.0-0.jar")), //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
VERTICA_5_1(new DbVersion4Drivers(EDatabaseTypeName.VERTICA, "VERTICA 5.1", "VERTICA_5_1", "vertica_5.1.6_jdk_5.jar")), //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$

View File

@@ -19,41 +19,34 @@ import java.util.List;
* DOC ycbai class global comment. Detailled comment
*/
public enum EHBaseDistribution4Versions {
HDP_2_1(EHBaseDistributions.HORTONWORKS, "Hortonworks Data Platform V2.1.0(Baikal)", "HDP_2_1"),
HDP_2_0(EHBaseDistributions.HORTONWORKS, "Hortonworks Data Platform V2.0.0(BigWheel)", "HDP_2_0"),
HDP_1_3(EHBaseDistributions.HORTONWORKS, "Hortonworks Data Platform V1.3.0(Condor)", "HDP_1_3"),
HDP_1_0(EHBaseDistributions.HORTONWORKS, "Hortonworks Data Platform V1.0.0", "HDP_1_0"),
HDP_1_2(EHBaseDistributions.HORTONWORKS, "Hortonworks Data Platform V1.2.0(Bimota)", "HDP_1_2"),
HDP_1_0(EHBaseDistributions.HORTONWORKS, "Hortonworks Data Platform V1.0.0(deprecated)", "HDP_1_0"),
HDP_1_3(EHBaseDistributions.HORTONWORKS, "Hortonworks Data Platform V1.3.0(Condor)", "HDP_1_3"),
APACHE_0_20_203(EHBaseDistributions.APACHE, "Apache 0.20.203", "APACHE_0_20_203"),
APACHE_1_0_3_EMR(EHBaseDistributions.AMAZON_EMR, "Apache 1.0.3(EMR)", "APACHE_1_0_3_EMR"),
HDP_2_0(EHBaseDistributions.HORTONWORKS, "Hortonworks Data Platform V2.0.0", "HDP_2_0"),
APACHE_1_0_0(EHBaseDistributions.APACHE, "Apache 1.0.0", "APACHE_1_0_0"),
CLOUDERA_CDH5(EHBaseDistributions.CLOUDERA, "Cloudera CDH5", "Cloudera_CDH5"),
APACHE_1_0_3_EMR(EHBaseDistributions.AMAZON_EMR, "Apache 1.0.3(EMR)", "APACHE_1_0_3_EMR"),
CLOUDERA_CDH4_YARN(EHBaseDistributions.CLOUDERA, "Cloudera CDH4 YARN", "Cloudera_CDH4_YARN"),
APACHE_0_20_203(EHBaseDistributions.APACHE, "Apache 0.20.203", "APACHE_0_20_203"),
CLOUDERA_CDH3(EHBaseDistributions.CLOUDERA, "Cloudera CDH3", "Cloudera_CDH3"),
CLOUDERA_CDH4(EHBaseDistributions.CLOUDERA, "Cloudera CDH4", "Cloudera_CDH4"),
CLOUDERA_CDH3(EHBaseDistributions.CLOUDERA, "Cloudera CDH3(deprecated)", "Cloudera_CDH3"),
MAPR_3_1_0(EHBaseDistributions.MAPR, "MapR 3.1.0", "MAPR310"),
MAPR_3_0_1(EHBaseDistributions.MAPR, "MapR 3.0.1", "MAPR301"),
MAPR_2_1_3(EHBaseDistributions.MAPR, "MapR 2.1.3", "MAPR213"),
MAPR_2_1_2(EHBaseDistributions.MAPR, "MapR 2.1.2", "MAPR212"),
CLOUDERA_CDH4_YARN(EHBaseDistributions.CLOUDERA, "Cloudera CDH4 YARN", "Cloudera_CDH4_YARN"),
MAPR(EHBaseDistributions.MAPR, "MapR 2.0.0", "MAPR2"),
PIVOTAL_HD_2_0(EHBaseDistributions.PIVOTAL_HD, "Pivotal HD 2.0", "PIVOTAL_HD_2_0"),
MAPR_2_1_2(EHBaseDistributions.MAPR, "MapR 2.1.2", "MAPR212"),
MAPR_2_1_3(EHBaseDistributions.MAPR, "MapR 2.1.3", "MAPR213"),
MAPR_3_0_1(EHBaseDistributions.MAPR, "MapR 3.0.1", "MAPR301"),
PIVOTAL_HD_1_0_1(EHBaseDistributions.PIVOTAL_HD, "Pivotal HD 1.0.1", "PIVOTAL_HD_1_0_1"),

View File

@@ -1,51 +0,0 @@
// ============================================================================
//
// Copyright (C) 2006-2014 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.core.hadoop;
/**
* created by ycbai on 2014-5-28 Detailled comment
*
*/
public enum EHadoopConfigurationJars {
HDFS(
new String[] { "hadoop-conf.jar" }, new String[] { "hadoop-conf-kerberos.jar", "jetty-util-6.1.26.jar", "jersey-core-1.8.jar", "commons-io-2.4.jar" }), //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ //$NON-NLS-5$
HCATALOG(new String[] { "hadoop-conf.jar" }, new String[] { "hadoop-conf-kerberos.jar" }), //$NON-NLS-1$ //$NON-NLS-2$
HIVE(new String[] { "hadoop-conf.jar" }, new String[] { "hadoop-conf-kerberos.jar" }), //$NON-NLS-1$ //$NON-NLS-2$
;
private String[] disableSecurityJars;
private String[] enableSecurityJars;
EHadoopConfigurationJars(String[] disableSecurityJars, String[] enableSecurityJars) {
this.disableSecurityJars = disableSecurityJars;
this.enableSecurityJars = enableSecurityJars;
}
public String getName() {
return name();
}
public String[] getDisableSecurityJars() {
return this.disableSecurityJars;
}
public String[] getEnableSecurityJars() {
return this.enableSecurityJars;
}
}

View File

@@ -190,6 +190,6 @@ public interface IHadoopClusterService extends IService {
* @param process
* @return true if there are some changes from them, otherwise return false.
*/
public boolean hasDiffsFromClusterToProcess(Item item, IProcess process);
public boolean hasDiffsFromClusterToProcess(Connection hcConnection, IProcess process);
}

View File

@@ -1,206 +1,181 @@
// ============================================================================
//
// Copyright (C) 2006-2014 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.core.hadoop.version;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.ArrayUtils;
/**
* DOC ycbai class global comment. Detailled comment
*/
public enum EHadoopVersion4Drivers {
HDP_2_1(
EHadoopDistributions.HORTONWORKS,
"Hortonworks Data Platform V2.1.0(Baikal)",
"HDP_2_1",
true,
false,
new EMRVersion[] { EMRVersion.YARN }),
HDP_2_0(
EHadoopDistributions.HORTONWORKS,
"Hortonworks Data Platform V2.0.0(BigWheel)",
"HDP_2_0",
true,
false,
new EMRVersion[] { EMRVersion.YARN }),
HDP_1_3(EHadoopDistributions.HORTONWORKS, "Hortonworks Data Platform V1.3.0(Condor)", "HDP_1_3", true, false),
HDP_1_2(EHadoopDistributions.HORTONWORKS, "Hortonworks Data Platform V1.2.0(Bimota)", "HDP_1_2", true, false),
HDP_1_0(EHadoopDistributions.HORTONWORKS, "Hortonworks Data Platform V1.0.0(deprecated)", "HDP_1_0", true, false),
APACHE_1_0_0(EHadoopDistributions.APACHE, "Apache 1.0.0", "APACHE_1_0_0", true, false),
APACHE_0_20_204(EHadoopDistributions.APACHE, "Apache 0.20.204", "APACHE_0_20_204", false, false),
APACHE_0_20_203(EHadoopDistributions.APACHE, "Apache 0.20.203", "APACHE_0_20_203", false, false),
APACHE_0_20_2(EHadoopDistributions.APACHE, "Apache 0.20.2", "APACHE_0_20_2", false, true),
CLOUDERA_CDH5(
EHadoopDistributions.CLOUDERA,
"Cloudera CDH5",
"Cloudera_CDH5",
true,
false,
new EMRVersion[] { EMRVersion.YARN }),
CLOUDERA_CDH4_YARN(
EHadoopDistributions.CLOUDERA,
"Cloudera CDH4 YARN",
"Cloudera_CDH4_YARN",
true,
false,
new EMRVersion[] { EMRVersion.YARN }),
CLOUDERA_CDH4(EHadoopDistributions.CLOUDERA, "Cloudera CDH4", "Cloudera_CDH4", true, false),
CLOUDERA_CDH3(EHadoopDistributions.CLOUDERA, "Cloudera CDH3(deprecated)", "Cloudera_CDH3", false, false),
MAPR310(EHadoopDistributions.MAPR, "MapR 3.1.0", "MAPR310", false, true),
MAPR301(EHadoopDistributions.MAPR, "MapR 3.0.1", "MAPR301", false, true),
MAPR213(EHadoopDistributions.MAPR, "MapR 2.1.3", "MAPR213", false, true),
MAPR212(EHadoopDistributions.MAPR, "MapR 2.1.2", "MAPR212", false, true),
MAPR2(EHadoopDistributions.MAPR, "MapR 2.0.0", "MAPR2", false, true),
MAPR1(EHadoopDistributions.MAPR, "MapR 1.2.0", "MAPR1", false, true),
APACHE_1_0_3_EMR(EHadoopDistributions.AMAZON_EMR, "Apache 1.0.3", "APACHE_1_0_3_EMR", true, false),
MAPR_EMR(EHadoopDistributions.AMAZON_EMR, "MapR 1.2.8(deprecated)", "MapR_EMR", false, true),
PIVOTAL_HD_2_0(
EHadoopDistributions.PIVOTAL_HD,
"Pivotal HD 2.0",
"PIVOTAL_HD_2_0",
true,
false,
new EMRVersion[] { EMRVersion.YARN }),
PIVOTAL_HD_1_0_1(
EHadoopDistributions.PIVOTAL_HD,
"Pivotal HD 1.0.1",
"PIVOTAL_HD_1_0_1",
false,
false,
new EMRVersion[] { EMRVersion.YARN }),
CUSTOM(EHadoopDistributions.CUSTOM, "", "", false, false, new EMRVersion[] { EMRVersion.MR1, EMRVersion.YARN });
private EHadoopDistributions distribution;
private String versionDisplayName;
private String versionValue;
private boolean supportSecurity;
private boolean supportGroup;
private EMRVersion[] mrVersions;
EHadoopVersion4Drivers(EHadoopDistributions distribution, String versionDisplayName, String versionValue,
boolean supportSecurity, boolean supportGroup) {
this(distribution, versionDisplayName, versionValue, supportSecurity, supportGroup, new EMRVersion[] { EMRVersion.MR1 });
}
EHadoopVersion4Drivers(EHadoopDistributions distribution, String versionDisplayName, String versionValue,
boolean supportSecurity, boolean supportGroup, EMRVersion[] mrVersions) {
this.distribution = distribution;
this.versionDisplayName = versionDisplayName;
this.versionValue = versionValue;
this.supportSecurity = supportSecurity;
this.supportGroup = supportGroup;
this.mrVersions = mrVersions;
}
public EHadoopDistributions getDistribution() {
return this.distribution;
}
public String getVersionDisplay() {
return this.versionDisplayName;
}
public String getVersionValue() {
return this.versionValue;
}
public EMRVersion[] getMrVersions() {
return this.mrVersions;
}
public static EHadoopVersion4Drivers indexOfByVersionDisplay(String displayName) {
return indexOf(displayName, true);
}
public static EHadoopVersion4Drivers indexOfByVersion(String value) {
return indexOf(value, false);
}
private static EHadoopVersion4Drivers indexOf(String name, boolean display) {
if (name != null) {
for (EHadoopVersion4Drivers version : EHadoopVersion4Drivers.values()) {
if (display) {
if (name.equalsIgnoreCase(version.getVersionDisplay())) {
return version;
}
} else {
if (name.equalsIgnoreCase(version.getVersionValue())) {
return version;
}
}
}
}
return EHadoopVersion4Drivers.CUSTOM;
}
public static List<EHadoopVersion4Drivers> indexOfByDistribution(EHadoopDistributions distribution) {
List<EHadoopVersion4Drivers> distribution4Versions = new ArrayList<EHadoopVersion4Drivers>();
if (distribution != null) {
for (EHadoopVersion4Drivers d4v : EHadoopVersion4Drivers.values()) {
if (d4v.getDistribution().equals(distribution)) {
distribution4Versions.add(d4v);
}
}
}
return distribution4Versions;
}
public boolean isSupportSecurity() {
return this.supportSecurity;
}
public boolean isSupportGroup() {
return this.supportGroup;
}
public boolean isSupportMR1() {
return ArrayUtils.contains(getMrVersions(), EMRVersion.MR1);
}
public boolean isSupportYARN() {
return ArrayUtils.contains(getMrVersions(), EMRVersion.YARN);
}
}
// ============================================================================
//
// Copyright (C) 2006-2014 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.core.hadoop.version;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.ArrayUtils;
/**
* DOC ycbai class global comment. Detailled comment
*/
public enum EHadoopVersion4Drivers {
HDP_1_0(EHadoopDistributions.HORTONWORKS, "Hortonworks Data Platform V1.0.0", "HDP_1_0", true, false),
HDP_1_2(EHadoopDistributions.HORTONWORKS, "Hortonworks Data Platform V1.2.0(Bimota)", "HDP_1_2", true, false),
HDP_1_3(EHadoopDistributions.HORTONWORKS, "Hortonworks Data Platform V1.3.0(Condor)", "HDP_1_3", true, false),
HDP_2_0(
EHadoopDistributions.HORTONWORKS,
"Hortonworks Data Platform V2.0.0(BigWheel)",
"HDP_2_0",
true,
false,
new EMRVersion[] { EMRVersion.YARN }),
APACHE_1_0_0(EHadoopDistributions.APACHE, "Apache 1.0.0", "APACHE_1_0_0", true, false),
APACHE_0_20_204(EHadoopDistributions.APACHE, "Apache 0.20.204", "APACHE_0_20_204", false, false),
APACHE_0_20_203(EHadoopDistributions.APACHE, "Apache 0.20.203", "APACHE_0_20_203", false, false),
APACHE_0_20_2(EHadoopDistributions.APACHE, "Apache 0.20.2", "APACHE_0_20_2", false, true),
CLOUDERA_CDH3(EHadoopDistributions.CLOUDERA, "Cloudera CDH3", "Cloudera_CDH3", false, false),
CLOUDERA_CDH4(EHadoopDistributions.CLOUDERA, "Cloudera CDH4", "Cloudera_CDH4", true, false),
CLOUDERA_CDH4_YARN(
EHadoopDistributions.CLOUDERA,
"Cloudera CDH4 YARN",
"Cloudera_CDH4_YARN",
true,
false,
new EMRVersion[] { EMRVersion.YARN }),
MAPR1(EHadoopDistributions.MAPR, "MapR 1.2.0", "MAPR1", false, true),
MAPR2(EHadoopDistributions.MAPR, "MapR 2.0.0", "MAPR2", false, true),
MAPR212(EHadoopDistributions.MAPR, "MapR 2.1.2", "MAPR212", false, true),
MAPR213(EHadoopDistributions.MAPR, "MapR 2.1.3", "MAPR213", false, true),
MAPR301(EHadoopDistributions.MAPR, "MapR 3.0.1", "MAPR301", false, true),
MAPR_EMR(EHadoopDistributions.AMAZON_EMR, "MapR 1.2.8", "MapR_EMR", false, true),
APACHE_1_0_3_EMR(EHadoopDistributions.AMAZON_EMR, "Apache 1.0.3", "APACHE_1_0_3_EMR", true, false),
PIVOTAL_HD_1_0_1(
EHadoopDistributions.PIVOTAL_HD,
"Pivotal HD 1.0.1",
"PIVOTAL_HD_1_0_1",
false,
false,
new EMRVersion[] { EMRVersion.YARN }),
CUSTOM(EHadoopDistributions.CUSTOM, "", "", false, false, new EMRVersion[] { EMRVersion.MR1, EMRVersion.YARN });
private EHadoopDistributions distribution;
private String versionDisplayName;
private String versionValue;
private boolean supportSecurity;
private boolean supportGroup;
private EMRVersion[] mrVersions;
EHadoopVersion4Drivers(EHadoopDistributions distribution, String versionDisplayName, String versionValue,
boolean supportSecurity, boolean supportGroup) {
this(distribution, versionDisplayName, versionValue, supportSecurity, supportGroup, new EMRVersion[] { EMRVersion.MR1 });
}
EHadoopVersion4Drivers(EHadoopDistributions distribution, String versionDisplayName, String versionValue,
boolean supportSecurity, boolean supportGroup, EMRVersion[] mrVersions) {
this.distribution = distribution;
this.versionDisplayName = versionDisplayName;
this.versionValue = versionValue;
this.supportSecurity = supportSecurity;
this.supportGroup = supportGroup;
this.mrVersions = mrVersions;
}
public EHadoopDistributions getDistribution() {
return this.distribution;
}
public String getVersionDisplay() {
return this.versionDisplayName;
}
public String getVersionValue() {
return this.versionValue;
}
public EMRVersion[] getMrVersions() {
return this.mrVersions;
}
public static EHadoopVersion4Drivers indexOfByVersionDisplay(String displayName) {
return indexOf(displayName, true);
}
public static EHadoopVersion4Drivers indexOfByVersion(String value) {
return indexOf(value, false);
}
private static EHadoopVersion4Drivers indexOf(String name, boolean display) {
if (name != null) {
for (EHadoopVersion4Drivers version : EHadoopVersion4Drivers.values()) {
if (display) {
if (name.equalsIgnoreCase(version.getVersionDisplay())) {
return version;
}
} else {
if (name.equalsIgnoreCase(version.getVersionValue())) {
return version;
}
}
}
}
return EHadoopVersion4Drivers.CUSTOM;
}
public static List<EHadoopVersion4Drivers> indexOfByDistribution(EHadoopDistributions distribution) {
List<EHadoopVersion4Drivers> distribution4Versions = new ArrayList<EHadoopVersion4Drivers>();
if (distribution != null) {
for (EHadoopVersion4Drivers d4v : EHadoopVersion4Drivers.values()) {
if (d4v.getDistribution().equals(distribution)) {
distribution4Versions.add(d4v);
}
}
}
return distribution4Versions;
}
public boolean isSupportSecurity() {
return this.supportSecurity;
}
public boolean isSupportGroup() {
return this.supportGroup;
}
public boolean isSupportMR1() {
return ArrayUtils.contains(getMrVersions(), EMRVersion.MR1);
}
public boolean isSupportYARN() {
return ArrayUtils.contains(getMrVersions(), EMRVersion.YARN);
}
}

View File

@@ -28,8 +28,6 @@ public enum ECustomVersionGroup {
PIG_HBASE,
PIG_HCATALOG,
MAP_REDUCE,
ALL;

View File

@@ -21,8 +21,6 @@ public enum ECustomVersionType {
PIG_HBASE("Pig for HBase", ECustomVersionGroup.PIG_HBASE), //$NON-NLS-1$
PIG_HCATALOG("Pig for Hcatalog", ECustomVersionGroup.PIG_HCATALOG), //$NON-NLS-1$
MAP_REDUCE("Map Reduce", ECustomVersionGroup.MAP_REDUCE), //$NON-NLS-1$
ALL("All", ECustomVersionGroup.ALL); //$NON-NLS-1$

View File

@@ -34,7 +34,6 @@ import org.talend.core.GlobalServiceRegister;
import org.talend.core.hadoop.IHadoopService;
import org.talend.core.hadoop.version.EHadoopDistributions;
import org.talend.core.hadoop.version.EHadoopVersion4Drivers;
import org.talend.core.model.components.ComponentCategory;
import org.talend.core.model.general.ModuleNeeded;
import org.talend.core.model.process.IElementParameter;
import org.talend.core.model.process.INode;
@@ -100,7 +99,7 @@ public class HadoopVersionDialog extends TitleAreaDialog {
protected void configureShell(Shell newShell) {
super.configureShell(newShell);
newShell.setText(Messages.getString("HadoopVersionDialog.title")); //$NON-NLS-1$
newShell.setSize(700, 450);
newShell.setSize(580, 450);
setHelpAvailable(false);
}
@@ -412,8 +411,6 @@ public class HadoopVersionDialog extends TitleAreaDialog {
if (ECustomVersionType.PIG == type || ECustomVersionType.PIG_HBASE == type
|| ECustomVersionType.PIG_HCATALOG == type) {
hadoopLibraries = getLibrariesForPig(type);
} else if (ECustomVersionType.MAP_REDUCE == type) {
hadoopLibraries = getLibrariesForMapReduce(type);
} else {
// fix for TDI-25676 HCATALOG and OOZIE should use the same jars as HDFS
if (!commonGroupCalculated
@@ -450,30 +447,6 @@ public class HadoopVersionDialog extends TitleAreaDialog {
return libMap;
}
private Set<String> getLibrariesForMapReduce(ECustomVersionType type) {
Set<String> neededLibraries = new HashSet<String>();
INode node = CoreRuntimePlugin.getInstance().getDesignerCoreService()
.getRefrenceNode("tMRConfiguration", ComponentCategory.CATEGORY_4_MAPREDUCE.getName());//$NON-NLS-1$
IElementParameter elementParameter = node.getElementParameter("DISTRIBUTION");//$NON-NLS-1$
if (elementParameter != null) {
elementParameter.setValue(distribution);
}
elementParameter = node.getElementParameter("MR_VERSION");//$NON-NLS-1$
if (elementParameter != null) {
elementParameter.setValue(version);
}
List<ModuleNeeded> modulesNeeded = node.getModulesNeeded();
for (ModuleNeeded module : modulesNeeded) {
if (module.isRequired(node.getElementParameters())) {
neededLibraries.add(module.getModuleName());
}
}
return neededLibraries;
}
private Set<String> getLibrariesForPig(ECustomVersionType type) {
Set<String> neededLibraries = new HashSet<String>();

View File

@@ -206,7 +206,6 @@ public class MetadataSchema {
final Node defaultValue = nodeMap.getNamedItem("default"); //$NON-NLS-1$
final Node comment = nodeMap.getNamedItem("comment"); //$NON-NLS-1$
final Node pattern = nodeMap.getNamedItem("pattern"); //$NON-NLS-1$
final Node originalLength = nodeMap.getNamedItem("originalLength");//$NON-NLS-1$
// see feature 4456
String nodeValue = MetadataToolHelper.validateColumnName(label.getNodeValue(), 0);
@@ -223,15 +222,6 @@ public class MetadataSchema {
} else {
metadataColumn.setOriginalDbColumnName(nodeValue);
}
if (originalLength != null && originalLength.getNodeValue() != null) {
try {
metadataColumn.setOriginalLength(Integer.parseInt(originalLength.getNodeValue()));
} catch (final NumberFormatException e) {
metadataColumn.setOriginalLength(null);
}
} else {
metadataColumn.setOriginalLength(null);
}
if (length.getNodeValue() != null) {
try {
metadataColumn.setLength(Integer.parseInt(length.getNodeValue()));
@@ -794,14 +784,6 @@ public class MetadataSchema {
}
column.setAttributeNode(length);
Attr originalLength = document.createAttribute("originalLength"); //$NON-NLS-1$
if (metadataColumn.getOriginalLength() == null) {
originalLength.setNodeValue("-1"); //$NON-NLS-1$
} else {
originalLength.setNodeValue(String.valueOf(metadataColumn.getOriginalLength()));
}
column.setAttributeNode(originalLength);
Attr precision = document.createAttribute("precision"); //$NON-NLS-1$
if (metadataColumn.getPrecision() == null) {
precision.setNodeValue("-1"); //$NON-NLS-1$

View File

@@ -15,6 +15,8 @@ package org.talend.core.model.metadata.connection.hive;
import java.util.ArrayList;
import java.util.List;
import org.talend.commons.utils.platform.PluginChecker;
/**
* @author Marvin Wang
* @version 1.0 jdk1.6
@@ -124,9 +126,9 @@ public class HiveConnUtils {
}
}
// ADD msjian TDQ-6407 2012-11-26:for top not support hive embedded mode
// if (PluginChecker.isOnlyTopLoaded() && (level == 0 || level == 2)) {
// list.remove(0);
// }
if (PluginChecker.isOnlyTopLoaded() && (level == 0 || level == 2)) {
list.remove(0);
}
// TDQ-6407~
return list;
}
@@ -240,9 +242,9 @@ public class HiveConnUtils {
protected static List<String> getHiveModeNameList(int distributionIndex, int versionIndex, int hiveServerIndex) {
List<HiveConnVersionInfo> hiveModeObjs = getHiveModes(distributionIndex, versionIndex, hiveServerIndex);
// ADD msjian TDQ-6407 2012-11-26: for top, not support hive embeded mode,hide this menu
// if (PluginChecker.isOnlyTopLoaded() && hiveModeObjs.size() > 1) {
// hiveModeObjs.remove(0);
// }
if (PluginChecker.isOnlyTopLoaded() && hiveModeObjs.size() > 1) {
hiveModeObjs.remove(0);
}
// TDQ-6407~
if (hiveModeObjs != null && hiveModeObjs.size() > 0) {
List<String> hiveModeNameList = new ArrayList<String>();

View File

@@ -43,46 +43,38 @@ public enum HiveConnVersionInfo {
DISTRO_CUSTOM(0, "CUSTOM", "Custom - Unsupported", false), //$NON-NLS-1$//$NON-NLS-2$
HDP_2_1(1, "HDP_2_1", "Hortonworks Data Platform V2.1.0(Baikal)", true, false, true, true, HiveConnVersionInfo.HORTONWORKS), //$NON-NLS-1$//$NON-NLS-2$
HDP_2_0(1, "HDP_2_0", "Hortonworks Data Platform V2.0.0(BigWheel)", true, false, true, true, HiveConnVersionInfo.HORTONWORKS), //$NON-NLS-1$//$NON-NLS-2$
HDP_1_3(1, "HDP_1_3", "Hortonworks Data Platform V1.3.0(Condor)", true, true, HiveConnVersionInfo.HORTONWORKS), //$NON-NLS-1$//$NON-NLS-2$
HDP_1_0(1, "HDP_1_0", "Hortonworks Data Platform V1.0.0", true, HiveConnVersionInfo.HORTONWORKS), //$NON-NLS-1$//$NON-NLS-2$
HDP_1_2(1, "HDP_1_2", "Hortonworks Data Platform V1.2.0(Bimota)", true, true, HiveConnVersionInfo.HORTONWORKS), //$NON-NLS-1$//$NON-NLS-2$
HDP_1_0(1, "HDP_1_0", "Hortonworks Data Platform V1.0.0(deprecated)", true, HiveConnVersionInfo.HORTONWORKS), //$NON-NLS-1$//$NON-NLS-2$
HDP_1_3(1, "HDP_1_3", "Hortonworks Data Platform V1.3.0(Condor)", true, true, HiveConnVersionInfo.HORTONWORKS), //$NON-NLS-1$//$NON-NLS-2$
APACHE_1_0_0(1, "APACHE_1_0_0", "Apache 1.0.0 (Hive 0.9.0)", false, HiveConnVersionInfo.APACHE), //$NON-NLS-1$//$NON-NLS-2$
HDP_2_0(1, "HDP_2_0", "Hortonworks Data Platform V2.0.0", true, false, true, false, HiveConnVersionInfo.HORTONWORKS), //$NON-NLS-1$//$NON-NLS-2$
APACHE_0_20_203(1, "APACHE_0_20_203", "Apache 0.20.203 (Hive 0.7.1)", false, HiveConnVersionInfo.APACHE), //$NON-NLS-1$//$NON-NLS-2$
Cloudera_CDH5(1, "Cloudera_CDH5", "Cloudera CDH5", true, false, true, true, HiveConnVersionInfo.CLOUDERA), //$NON-NLS-1$//$NON-NLS-2$
APACHE_1_0_0(1, "APACHE_1_0_0", "Apache 1.0.0 (Hive 0.9.0)", false, HiveConnVersionInfo.APACHE), //$NON-NLS-1$//$NON-NLS-2$
Cloudera_CDH4_YARN(1, "Cloudera_CDH4_YARN", "Cloudera CDH4 YARN", true, false, true, false, HiveConnVersionInfo.CLOUDERA), //$NON-NLS-1$//$NON-NLS-2$
Cloudera_CDH3(1, "Cloudera_CDH3", "Cloudera CDH3", false, HiveConnVersionInfo.CLOUDERA), //$NON-NLS-1$//$NON-NLS-2$
Cloudera_CDH4(1, "Cloudera_CDH4", "Cloudera CDH4", true, true, HiveConnVersionInfo.CLOUDERA), //$NON-NLS-1$//$NON-NLS-2$
Cloudera_CDH3(1, "Cloudera_CDH3", "Cloudera CDH3(deprecated)", false, HiveConnVersionInfo.CLOUDERA), //$NON-NLS-1$//$NON-NLS-2$
MAPR3_1_0(1, "MAPR310", "MapR 3.1.0", true, false, HiveConnVersionInfo.MAPR), //$NON-NLS-1$//$NON-NLS-2$
MAPR3_0_1(1, "MAPR301", "MapR 3.0.1", true, false, HiveConnVersionInfo.MAPR), //$NON-NLS-1$//$NON-NLS-2$
MAPR2_1_3(1, "MAPR213", "MapR 2.1.3", true, false, HiveConnVersionInfo.MAPR), //$NON-NLS-1$//$NON-NLS-2$
MAPR2_1_2(1, "MAPR212", "MapR 2.1.2", false, HiveConnVersionInfo.MAPR), //$NON-NLS-1$//$NON-NLS-2$
MAPR2(1, "MAPR2", "MapR 2.0.0", false, HiveConnVersionInfo.MAPR), //$NON-NLS-1$//$NON-NLS-2$
Cloudera_CDH4_YARN(1, "Cloudera_CDH4_YARN", "Cloudera CDH4 YARN", true, false, true, false, HiveConnVersionInfo.CLOUDERA), //$NON-NLS-1$//$NON-NLS-2$
MAPR1(1, "MAPR1", "MapR 1.2.0", false, HiveConnVersionInfo.MAPR), //$NON-NLS-1$//$NON-NLS-2$
MAPR2(1, "MAPR2", "MapR 2.0.0", false, HiveConnVersionInfo.MAPR), //$NON-NLS-1$//$NON-NLS-2$
MAPR2_1_2(1, "MAPR212", "MapR 2.1.2", false, HiveConnVersionInfo.MAPR), //$NON-NLS-1$//$NON-NLS-2$
MAPR2_1_3(1, "MAPR213", "MapR 2.1.3", true, false, HiveConnVersionInfo.MAPR), //$NON-NLS-1$//$NON-NLS-2$
MAPR3_0_1(1, "MAPR301", "MapR 3.0.1", true, false, HiveConnVersionInfo.MAPR), //$NON-NLS-1$//$NON-NLS-2$
MapR_EMR(1, "MapR_EMR", "MapR 1.2.8", false, HiveConnVersionInfo.AMAZON_EMR), //$NON-NLS-1$//$NON-NLS-2$
APACHE_1_0_3_EMR(1, "APACHE_1_0_3_EMR", "Apache 1.0.3 (Hive 0.8.1)", false, HiveConnVersionInfo.AMAZON_EMR), //$NON-NLS-1$//$NON-NLS-2$
MapR_EMR(1, "MapR_EMR", "MapR 1.2.8(deprecated)", false, HiveConnVersionInfo.AMAZON_EMR), //$NON-NLS-1$//$NON-NLS-2$
PIVOTAL_HD_2_0(1, "PIVOTAL_HD_2_0", "Pivotal HD 2.0", true, false, true, true, HiveConnVersionInfo.PIVOTAL_HD), //$NON-NLS-1$//$NON-NLS-2$
PIVOTAL_HD_1_0_1(1, "PIVOTAL_HD_1_0_1", "Pivotal HD 1.0.1", false, false, true, false, HiveConnVersionInfo.PIVOTAL_HD), //$NON-NLS-1$//$NON-NLS-2$
DISTRO_VERSION_CUSTOM(1, "DISTRO_VERSION_CUSTOM", "Customized Version", true, true, HiveConnVersionInfo.DISTRO_CUSTOM), //$NON-NLS-1$//$NON-NLS-2$
@@ -94,19 +86,15 @@ public enum HiveConnVersionInfo {
HiveConnVersionInfo.HDP_1_2,
HiveConnVersionInfo.HDP_1_3,
HiveConnVersionInfo.HDP_2_0,
HiveConnVersionInfo.HDP_2_1,
HiveConnVersionInfo.APACHE_1_0_0,
HiveConnVersionInfo.Cloudera_CDH4,
HiveConnVersionInfo.Cloudera_CDH4_YARN,
HiveConnVersionInfo.Cloudera_CDH5,
HiveConnVersionInfo.MAPR2,
HiveConnVersionInfo.MAPR2_1_2,
HiveConnVersionInfo.MAPR2_1_3,
HiveConnVersionInfo.MAPR3_0_1,
HiveConnVersionInfo.MAPR3_1_0,
HiveConnVersionInfo.APACHE_1_0_3_EMR,
HiveConnVersionInfo.PIVOTAL_HD_1_0_1,
HiveConnVersionInfo.PIVOTAL_HD_2_0,
HiveConnVersionInfo.DISTRO_VERSION_CUSTOM),
MODE_STANDALONE(2, "STANDALONE",//$NON-NLS-1$
@@ -115,23 +103,19 @@ public enum HiveConnVersionInfo {
HiveConnVersionInfo.HDP_1_2,
HiveConnVersionInfo.HDP_1_3,
HiveConnVersionInfo.HDP_2_0,
HiveConnVersionInfo.HDP_2_1,
HiveConnVersionInfo.APACHE_0_20_203,
HiveConnVersionInfo.APACHE_1_0_0,
HiveConnVersionInfo.Cloudera_CDH3,
HiveConnVersionInfo.Cloudera_CDH4,
HiveConnVersionInfo.Cloudera_CDH4_YARN,
HiveConnVersionInfo.Cloudera_CDH5,
HiveConnVersionInfo.MAPR1,
HiveConnVersionInfo.MAPR2,
HiveConnVersionInfo.MAPR2_1_2,
HiveConnVersionInfo.MAPR2_1_3,
HiveConnVersionInfo.MAPR3_0_1,
HiveConnVersionInfo.MAPR3_1_0,
HiveConnVersionInfo.MapR_EMR,
HiveConnVersionInfo.APACHE_1_0_3_EMR,
HiveConnVersionInfo.PIVOTAL_HD_1_0_1,
HiveConnVersionInfo.PIVOTAL_HD_2_0,
HiveConnVersionInfo.DISTRO_VERSION_CUSTOM);
private int level;
@@ -150,9 +134,6 @@ public enum HiveConnVersionInfo {
private boolean supportSecurity;
private static HiveConnVersionInfo[] hiveVersions = new HiveConnVersionInfo[] { HiveConnVersionInfo.Cloudera_CDH5,
HiveConnVersionInfo.HDP_2_1, HiveConnVersionInfo.HDP_2_0, HiveConnVersionInfo.PIVOTAL_HD_2_0 };
private HiveConnVersionInfo(int level, String key, String displayName, boolean supportSecurity,
HiveConnVersionInfo... follows) {
this(level, key, displayName, false, supportSecurity, follows);
@@ -212,8 +193,4 @@ public enum HiveConnVersionInfo {
return this.supportSecurity;
}
public static HiveConnVersionInfo[] getHiveVersionsNotSupportOnWindows() {
return hiveVersions;
}
}

View File

@@ -92,13 +92,7 @@ public abstract class AbstractNode implements INode {
private List<ModuleNeeded> modulesNeeded = new ArrayList<ModuleNeeded>();
// for DI job, and indicate if the component after the iterator
// connection which enable parallel, even this component after
// onComponentOk
private String parallelIterator = null;
// as the talend job contains multiple mapreduce jobs, use this to indicate
// which mapreduce job contains this
// as the talend job contains multiple mapreduce jobs, use this to indicate which mapreduce job contains this
// graphic node
private Integer mrGroupId;
@@ -111,11 +105,6 @@ public abstract class AbstractNode implements INode {
// indicate if this MR component will generate Reduce part
private boolean mrContainsReduce;
private boolean mapOnlyAfterReduce;
// for MR, tag this component is the ref(lookup) start node
private boolean isRefNode = false;
public String getComponentName() {
return componentName;
}
@@ -534,7 +523,7 @@ public abstract class AbstractNode implements INode {
}
for (IElementParameter param : this.getElementParameters()) {
if (param.getName().equals("UNIQUE_NAME") || isSQLQueryParameter(param) || isTDMParameter(param)) { //$NON-NLS-1$
if (param.getName().equals("UNIQUE_NAME") || isSQLQueryParameter(param)) { //$NON-NLS-1$
continue;
}
ParameterValueUtil.renameValues(param, oldName, newName);
@@ -553,20 +542,6 @@ public abstract class AbstractNode implements INode {
return parameter.getFieldType().equals(EParameterFieldType.MEMO_SQL) && parameter.getName().equals("QUERY"); //$NON-NLS-1$
}
/**
* bug TDM-409
* <p>
* DOC hwang Comment method "isTDMParameter".
*
* @param parameter
* @return
*/
private boolean isTDMParameter(final IElementParameter parameter) {
return parameter.getFieldType().equals(EParameterFieldType.HMAP_PATH)
&& parameter.getName().equals(EParameterFieldType.HMAP_PATH.getName());
}
/*
* (non-Javadoc)
*
@@ -731,9 +706,6 @@ public abstract class AbstractNode implements INode {
@Override
public List<? extends IElementParameter> getElementParametersWithChildrens() {
if (this.elementParameters == null) {
return new ArrayList<IElementParameter>();
}
List<IElementParameter> fullListParam = new ArrayList<IElementParameter>(this.elementParameters);
for (IElementParameter curParam : elementParameters) {
@@ -1166,59 +1138,4 @@ public abstract class AbstractNode implements INode {
public void setRefNodes(List<INode> refNodes) {
this.refNodes = refNodes;
}
/**
* Getter for mapOnlyAfterReduce.
*
* @return the mapOnlyAfterReduce
*/
public boolean isMapOnlyAfterReduce() {
return this.mapOnlyAfterReduce;
}
/**
* Sets the mapOnlyAfterReduce.
*
* @param mapOnlyAfterReduce the mapOnlyAfterReduce to set
*/
public void setMapOnlyAfterReduce(boolean mapOnlyAfterReduce) {
this.mapOnlyAfterReduce = mapOnlyAfterReduce;
}
/**
* Getter for isRefNode.
*
* @return the isRefNode
*/
public boolean isRefNode() {
return this.isRefNode;
}
/**
* Sets the isRefNode.
*
* @param isRefNode the isRefNode to set
*/
public void setRefNode(boolean isRefNode) {
this.isRefNode = isRefNode;
}
/**
* Getter for parallelIterator.
*
* @return the parallelIterator
*/
public String getParallelIterator() {
return parallelIterator;
}
/**
* Sets the parallelIterator.
*
* @param parallelIterator the parallelIterator to set
*/
public void setParallelIterator(String parallelIterator) {
this.parallelIterator = parallelIterator;
}
}

View File

@@ -92,7 +92,7 @@ public final class ElementParameterParser {
public static boolean canEncrypt(final IElement node, final String parameterName) {
String value = getValue(node, parameterName);
if (value != null && value.startsWith("\"") && value.endsWith("\"") && TalendQuoteUtils.filterQuote(value).length() == 0) { //$NON-NLS-1$//$NON-NLS-2$
if (value.startsWith("\"") && value.endsWith("\"") && TalendQuoteUtils.filterQuote(value).length() == 0) { //$NON-NLS-1$//$NON-NLS-2$
return true;
} else {
return false;
@@ -191,7 +191,7 @@ public final class ElementParameterParser {
* @return
*/
public static Object getObjectValue(final IElement element, final String text) {
if (text == null || element == null) {
if (text == null) {
return null;
}
IElementParameter param;

View File

@@ -112,8 +112,6 @@ public class RepositoryViewObject implements IRepositoryViewObject {
private static final String TIP = "same name item with other project";
private boolean avoidGuiInfos;
public RepositoryViewObject(Property property, boolean avoidGuiInfos) {
this.id = property.getId();
this.author = property.getAuthor();
@@ -139,7 +137,6 @@ public class RepositoryViewObject implements IRepositoryViewObject {
informationStatus = factory.getStatus(informationLevel);
modified = factory.isModified(property);
}
this.avoidGuiInfos = avoidGuiInfos;
if (!avoidGuiInfos) {
if (type == ERepositoryObjectType.JOBLET) {
JobletProcessItem item = (JobletProcessItem) property.getItem();
@@ -310,41 +307,39 @@ public class RepositoryViewObject implements IRepositoryViewObject {
repositoryStatus = factory.getStatus(property.getItem());
InformationLevel informationLevel = property.getMaxInformationLevel();
informationStatus = factory.getStatus(informationLevel);
if (!this.avoidGuiInfos) {
if (type == ERepositoryObjectType.JOBLET) {
JobletProcessItem item = (JobletProcessItem) property.getItem();
if (item.getIcon() != null && item.getIcon().getInnerContent() != null
&& item.getIcon().getInnerContent().length != 0) {
customImage = getJobletCustomIcon(property);
customImage = ImageUtils.propertyLabelScale(property.getId(), customImage, ICON_SIZE.ICON_16);
}
IComponentsService service = (IComponentsService) GlobalServiceRegister.getDefault().getService(
IComponentsService.class);
IJobletProviderService jobletservice = (IJobletProviderService) GlobalServiceRegister.getDefault()
.getService(IJobletProviderService.class);
if (service != null && jobletservice != null) {
IComponentsFactory factorySingleton = service.getComponentsFactory();
IComponent component = factorySingleton.get(property.getLabel(), DI);
if (component != null) {
try {
Property tProperty = jobletservice.getJobletComponentItem(component);
if (!tProperty.getId().equals(this.id)) {
informationStatus = ERepositoryStatus.WARN;
property.setDescription(TIP);
}
} catch (Exception e) {
// tProperty is null
if (type == ERepositoryObjectType.JOBLET) {
JobletProcessItem item = (JobletProcessItem) property.getItem();
if (item.getIcon() != null && item.getIcon().getInnerContent() != null
&& item.getIcon().getInnerContent().length != 0) {
customImage = getJobletCustomIcon(property);
customImage = ImageUtils.propertyLabelScale(property.getId(), customImage, ICON_SIZE.ICON_16);
}
IComponentsService service = (IComponentsService) GlobalServiceRegister.getDefault().getService(
IComponentsService.class);
IJobletProviderService jobletservice = (IJobletProviderService) GlobalServiceRegister.getDefault().getService(
IJobletProviderService.class);
if (service != null && jobletservice != null) {
IComponentsFactory factorySingleton = service.getComponentsFactory();
IComponent component = factorySingleton.get(property.getLabel(), DI);
if (component != null) {
try {
Property tProperty = jobletservice.getJobletComponentItem(component);
if (!tProperty.getId().equals(this.id)) {
informationStatus = ERepositoryStatus.WARN;
property.setDescription(TIP);
}
} catch (Exception e) {
// tProperty is null
}
}
} else if (type == ERepositoryObjectType.DOCUMENTATION) {
this.customImage = ImageProvider.getImage(RepositoryImageProvider.getIcon(type));
Item item = property.getItem();
if (item instanceof DocumentationItem) {
customImage = coreSerivce.getImageWithDocExt(((DocumentationItem) item).getExtension());
} else if (item instanceof LinkDocumentationItem) {
customImage = coreSerivce.getImageWithSpecial(customImage).createImage();
}
}
} else if (type == ERepositoryObjectType.DOCUMENTATION) {
this.customImage = ImageProvider.getImage(RepositoryImageProvider.getIcon(type));
Item item = property.getItem();
if (item instanceof DocumentationItem) {
customImage = coreSerivce.getImageWithDocExt(((DocumentationItem) item).getExtension());
} else if (item instanceof LinkDocumentationItem) {
customImage = coreSerivce.getImageWithSpecial(customImage).createImage();
}
}
return property;

View File

@@ -1160,7 +1160,6 @@ public abstract class RepositoryUpdateManager {
types.add(EUpdateItemType.JOB_PROPERTY_EXTRA);
types.add(EUpdateItemType.JOB_PROPERTY_STATS_LOGS);
types.add(EUpdateItemType.JOB_PROPERTY_HEADERFOOTER);
types.add(EUpdateItemType.JOB_PROPERTY_MAPREDUCE);
return types;
}

View File

@@ -875,7 +875,8 @@ public class NodeUtil {
}
// System.out.println("leftQuote="+leftQuotes + ", rightQuote="+rightQuotes);
if (leftQuotes < rightQuotes) {
result += original.substring(leftQuotes, rightQuotes + 1).replace("\r", "\\r").replace("\n", "\\n");
result += original.substring(leftQuotes, rightQuotes + 1).replace("\r\n", "\\n").replace("\r", "\\r")
.replace("\n", "\\n");
}
leftQuotes = original.indexOf("\"", rightQuotes + 1);

View File

@@ -12,19 +12,25 @@
// ============================================================================
package org.talend.core.model.utils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.Matcher;
import org.apache.oro.text.regex.MalformedPatternException;
import org.apache.oro.text.regex.Pattern;
import org.apache.oro.text.regex.PatternCompiler;
import org.apache.oro.text.regex.PatternMatcher;
import org.apache.oro.text.regex.Perl5Compiler;
import org.apache.oro.text.regex.Perl5Matcher;
import org.apache.oro.text.regex.Perl5Substitution;
import org.apache.oro.text.regex.Substitution;
import org.apache.oro.text.regex.Util;
import org.eclipse.emf.common.util.EList;
import org.eclipse.swt.graphics.Point;
import org.talend.core.model.context.UpdateContextVariablesHelper;
import org.talend.core.model.process.EParameterFieldType;
import org.talend.core.model.process.IElementParameter;
@@ -101,6 +107,7 @@ public final class ParameterValueUtil {
// replace
String returnValue = "";
if (value.contains(TalendQuoteUtils.getQuoteChar()) && !flag) {
// returnValue = splitQueryData(matcher, pattern, substitution, value, Util.SUBSTITUTE_ALL);
returnValue = splitQueryData(oldName, newName, value);
} else {
returnValue = Util.substitute(matcher, pattern, substitution, value, Util.SUBSTITUTE_ALL);
@@ -112,21 +119,203 @@ public final class ParameterValueUtil {
}
// function before TDI-29092 modify,this function seems only rename variables in context,I put this funciton back
// incase any problem with the new function and we can refer the old one to check the problem.
public static String splitQueryData(PatternMatcher matcher, Pattern pattern, Substitution sub, String value, int numSubs) {
String[] split = value.split("\"");
int i = 0;
String replace = "";
for (String s : split) {
if (i % 2 == 0) {
replace = s;
if (i != 0) {
if (matcher.contains(value, pattern)) {
replace = split[i].toString();
split[i] = Util.substitute(matcher, pattern, sub, replace, numSubs);
}
}
}
i++;
}
String returnValue = "";
for (int t = 1; t < split.length; t++) {
if (t % 2 == 0) {
returnValue += split[t];
} else {
returnValue += "\"" + split[t] + "\"";
}
}
return returnValue;
}
// for bug 12594 split "; for bug 29092(it has JUnits)
public static String splitQueryData(String oldName, String newName, String value) {
// example:"drop table "+context.oracle_schema+".\"TDI_26803\""
// >>>>>>>>__(const)__ ______(varible)_______ ___(const)___
String regex = "\"\"|\".*?([^\\\\]\")";
// >>>>>>>>_*_(const)__ _____*_(varible)_______ __*_(const)___
final int length = value.length();
// quotaStrings which stores the start and end point for all const strings in the value
LinkedHashMap<Integer, Integer> quotaStrings = new LinkedHashMap<Integer, Integer>();
List<Point> functionNameAreas = new ArrayList<Point>();
// get and store all start and end point of const strings
int start = -1;
int end = -2;
char ch;
for (int i = 0; i < length; i++) {
ch = value.charAt(i);
if (ch == '\"') {
// in case of cases :
// case 1 : [ "select * from " + context.table + " where value = \"context.table\"" ]
// case 2 : [ "select * from " + context.table + " where value = \"\\" + context.table +
// "\\context.table\"" ]
if (isEscapeSequence(value, i)) {
continue;
}
// [0 <= start] >> in case the first const String position compute error
if (0 <= start && end < start) {
end = i;
quotaStrings.put(start, end);
} else {
start = i;
}
}
}
{
// in case the value has not complete quota
// exapmle > "select a,context.b from " + context.b + "where value = context.b
// **but** maybe more impossible truth is that
// they write this(context.b) just want to use it as a varible...
// so maybe should not set the string behind the quota as a const by default..
// ---*--- the following code is set the string behind the quota as a const
// if (0 <= start && end < start) {
// end = length - 1;
// quotaStrings.put(start, end);
// }
}
// find the varible string, do replace, then concat them
StringBuffer strBuffer = new StringBuffer();
String subString = null;
int vStart = 0;
int vEnd = 0;
int methodMaxIndex = 0;
int calcMaxIndex = 0;
start = 0;
end = 0;
for (Entry<Integer, Integer> entry : quotaStrings.entrySet()) {
start = entry.getKey();
end = entry.getValue() + 1;
vEnd = start;
if (vStart == start) {
// const string follow with const string, maybe won't happen...
// get the const string
subString = value.substring(start, end);
if (start < methodMaxIndex) {
subString = subString.replaceAll(oldName, newName);
}
} else {
// get the varible string, do replace, then append it
subString = value.substring(vStart, vEnd);
calcMaxIndex = calcMethodArea(subString, value, vStart, functionNameAreas, methodMaxIndex);
if (methodMaxIndex < calcMaxIndex) {
methodMaxIndex = calcMaxIndex;
}
String replacedString = doVaribleReplace(oldName, newName, value, functionNameAreas, vStart, vEnd);
strBuffer.append(replacedString);
// get the const string
subString = value.substring(start, end);
if (start < methodMaxIndex) {
subString = subString.replaceAll(oldName, newName);
}
}
// append the const string
strBuffer.append(subString);
// update the varible string start point
vStart = end;
}
// in case the last string of the value is a varible string
// then get it, and do replace, finally append it.
if (vStart < length) {
vEnd = length;
String replacedString = doVaribleReplace(oldName, newName, value, functionNameAreas, vStart, vEnd);
strBuffer.append(replacedString);
}
return strBuffer.toString();
}
/**
* DOC cmeng Comment method "doVaribleReplace".
*
* @param oldName
* @param newName
* @param value
* @param functionNameAreas
* @param vStart
* @param vEnd
*/
private static String doVaribleReplace(String oldName, String newName, String value, List<Point> functionNameAreas,
int vStart, int vEnd) {
StringBuffer replacedString = new StringBuffer();
int replaceableStart = vStart;
int replaceableEnd = vEnd;
for (Point functionNameArea : functionNameAreas) {
if (vEnd <= functionNameArea.x) {
break;
}
if (functionNameArea.y <= vStart) {
continue;
}
if (replaceableStart < functionNameArea.x) {
replaceableEnd = functionNameArea.x;
String replaceableString = value.substring(replaceableStart, replaceableEnd);
replacedString.append(doReplace(oldName, newName, replaceableString));
replacedString.append(value.substring(functionNameArea.x, functionNameArea.y));
} else {
replacedString.append(value.substring(functionNameArea.x, functionNameArea.y));
}
replaceableStart = functionNameArea.y;
}
if (replaceableStart < vEnd) {
String replaceableString = value.substring(replaceableStart, vEnd);
replacedString.append(doReplace(oldName, newName, replaceableString));
}
return replacedString.toString();
}
private static String doReplace(String oldName, String newName, String value) {
String vOldName = oldName.replaceAll("\\.", "\\\\."); //$NON-NLS-1$ //$NON-NLS-2$
// ((\b\w+\s*\.\s*)+schema(\s*\.\s*\w+)*)|((\b\w+\s*\.\s*)*schema(\s*\.\s*\w+)+)
String regex = "((\\b\\w+\\s*\\.\\s*)+" + vOldName + "(\\s*\\.\\s*\\w+)*)|((\\b\\w+\\s*\\.\\s*)*" + vOldName + "(\\s*\\.\\s*\\w+)+)"; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// obtain all varibles
String[] split = value.split(regex);
Map<String, String> replacedStrings = new HashMap<String, String>();
String returnValue = "";
StringBuffer returnValue = new StringBuffer();
// replace the variables & store both value of old and new
for (String s : split) {
if (s.contains(oldName)) {
replacedStrings.put(s, s.replaceAll("\\b" + oldName + "\\b", newName));
replacedStrings.put(s, s.replaceAll("\\b" + oldName + "\\b", newName)); //$NON-NLS-1$ //$NON-NLS-2$
} else {
replacedStrings.put(s, s);
}
}
if (split.length == 1) {
returnValue.append(replacedStrings.get(split[0]));
}
// obtain consts & concat the consts with the variables
java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(regex);
Matcher matcher = pattern.matcher(value);
@@ -144,14 +333,14 @@ public final class ParameterValueUtil {
if (curPos < x) {
oldFill = value.substring(curPos, x);
if ((newFill = replacedStrings.get(oldFill)) != null) {
returnValue += newFill;
returnValue.append(newFill);
} else {
returnValue += oldFill;
returnValue.append(oldFill);
}
curPos = x;
continue;
}
returnValue += matcher.group();
returnValue.append(matcher.group());
curPos = y;
if (!matcher.find()) {
x = valueLength;
@@ -161,7 +350,68 @@ public final class ParameterValueUtil {
}
}
}
return returnValue;
return returnValue.toString();
}
/**
* DOC cmeng Comment method "isEscapeSequence".
*
* @param value
* @param i
* @return
*/
private static boolean isEscapeSequence(String value, int i) {
boolean isEscapeSequence = false;
for (int index = i; 0 < index; index--) {
if (value.charAt(index - 1) == '\\') {
isEscapeSequence = !isEscapeSequence;
} else {
break;
}
}
return isEscapeSequence;
}
private static int calcMethodArea(String varibleString, String wholeString, int beginIndex, List<Point> functionNameAreas,
int lastIndex) {
// globalMap.get(...)
// String regex = "\\b\\S*\\s*\\.\\s*\\S*\\s*\\(\\z"; //$NON-NLS-1$
// maybe get(...) also is target
String regex = "\\b[\\S\\.]*?\\s*\\("; //$NON-NLS-1$
java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(regex);
Matcher matcher = pattern.matcher(varibleString);
int i = 0;
int varibleStringMaxIndex = beginIndex + varibleString.length() - 1;
while (matcher.find()) {
boolean isInQuota = false;
int parenthesisNum = 0;
Point functionNameArea = new Point(beginIndex + matcher.start(), beginIndex + matcher.end());
functionNameAreas.add(functionNameArea);
if (varibleStringMaxIndex < i || varibleStringMaxIndex < lastIndex) {
continue;
}
for (i = matcher.end(); i < wholeString.length(); i++) {
char ch = wholeString.charAt(i);
if (ch == '\"' && !isEscapeSequence(wholeString, i)) {
isInQuota = !isInQuota;
}
if (isInQuota) {
continue;
}
if (ch == '(') {
parenthesisNum++;
} else if (ch == ')') {
parenthesisNum--;
}
if (parenthesisNum < 0) {
break;
}
}
}
return i;
}
public static boolean isUseData(final IElementParameter param, final String name) {

View File

@@ -155,5 +155,4 @@ public interface ITalendCorePrefConstants {
public static final String CoreUIPlugin_ID = "org.talend.core.ui"; //$NON-NLS-1$
public static final String TOP_INSTALL_DONE = "top.install.done"; //$NON-NLS-1$
}

View File

@@ -273,8 +273,7 @@ public class ReflectionUtils {
argsClass[i] = args[i].getClass();
}
}
Constructor cons = newClass.getDeclaredConstructor(argsClass);
cons.setAccessible(true);
Constructor cons = newClass.getConstructor(argsClass);
instance = cons.newInstance(args);
return instance;

View File

@@ -110,8 +110,6 @@ public interface IDesignerCoreService extends IService {
public boolean isTalendEditor(IEditorPart activeEditor);
public INode getRefrenceNode(String componentName);
public INode getRefrenceNode(String componentName, String paletteType);
public boolean executeUpdatesManager(List<UpdateResult> results, boolean onlySimpleShow);

View File

@@ -23,7 +23,6 @@ import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.ISelectionProvider;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.osgi.util.NLS;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Display;
@@ -273,15 +272,7 @@ public abstract class AContextualAction extends Action implements ITreeContextua
}
if (activePart instanceof IRepositoryView) {
workbenchPart = activePart;
ISelection selection = ((IRepositoryView) activePart).getViewer().getSelection();
if (!selection.isEmpty()) {
return ((IRepositoryView) activePart).getViewer().getSelection();
} else {
if (node != null) {
selection = new StructuredSelection(node);
}
return selection;
}
return ((IRepositoryView) activePart).getViewer().getSelection();
}
if (workbenchPart != null) {

View File

@@ -165,7 +165,6 @@ MetadataTableEditorView.DefaultTitle=Default
MetadataTableEditorView.KeyTitle=Key
MetadataTableEditorView.LengthTitle=Length
MetadataTableEditorView.NullableTitle=Nullable
MetadataTableEditorView.OriginalLengthTitle=OriginalLength
MetadataTableEditorView.PatternTitle=Date Pattern (Ctrl+Space available)
MetadataTableEditorView.PrecisionTitle=Precision
MetadataTableEditorView.TypleTitle=Type

View File

@@ -27,6 +27,7 @@ import org.eclipse.jface.resource.JFaceResources;
import org.eclipse.jface.viewers.CheckboxTreeViewer;
import org.eclipse.jface.viewers.IContentProvider;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.ViewerFilter;
import org.eclipse.swt.SWT;
import org.eclipse.swt.accessibility.AccessibleAdapter;
import org.eclipse.swt.accessibility.AccessibleEvent;
@@ -69,6 +70,8 @@ public class FilteredCheckboxTree extends Composite {
*/
protected Text filterText;
protected ModifyListener filterTextModifyListener;
/**
* The control representing the clear button for the filter text entry. This value may be <code>null</code> if no
* such button exists, or if the controls have not yet been created.
@@ -199,7 +202,7 @@ public class FilteredCheckboxTree extends Composite {
showFilterControls = PlatformUI.getPreferenceStore().getBoolean(IWorkbenchPreferenceConstants.SHOW_FILTERED_TEXTS);
createControl(parent, treeStyle);
createRefreshJob();
setInitialText(WorkbenchMessages.FilteredTree_FilterMessage);
setFont(parent.getFont());
}
@@ -284,7 +287,6 @@ public class FilteredCheckboxTree extends Composite {
if (treeViewer instanceof NotifyingTreeViewer) {
patternFilter.setUseCache(true);
}
treeViewer.addFilter(patternFilter);
return treeViewer.getControl();
}
@@ -363,7 +365,6 @@ public class FilteredCheckboxTree extends Composite {
// }
// }
treeViewer.refresh(true);
if (text.length() > 0 && !initial) {
/*
* Expand elements one at a time. After each is expanded, check to see if the filter text has
@@ -453,6 +454,7 @@ public class FilteredCheckboxTree extends Composite {
*/
protected void createFilterText(Composite parent) {
filterText = doCreateFilterText(parent);
setInitialText(WorkbenchMessages.FilteredTree_FilterMessage);
filterText.getAccessible().addAccessibleListener(new AccessibleAdapter() {
/*
@@ -545,7 +547,7 @@ public class FilteredCheckboxTree extends Composite {
}
});
filterText.addModifyListener(new ModifyListener() {
filterTextModifyListener = new ModifyListener() {
/*
* (non-Javadoc)
@@ -556,7 +558,8 @@ public class FilteredCheckboxTree extends Composite {
public void modifyText(ModifyEvent e) {
textChanged();
}
});
};
filterText.addModifyListener(filterTextModifyListener);
GridData gridData = new GridData(SWT.FILL, SWT.BEGINNING, true, false);
// if the text widget supported cancel then it will have it's own
@@ -594,6 +597,19 @@ public class FilteredCheckboxTree extends Composite {
calculateCheckedLeafNodes();
// narrowingDown = previousFilterText==null || getFilterString().startsWith(previousFilterText);
previousFilterText = getFilterString();
boolean hasPatternFilter = false;
for (ViewerFilter filter : treeViewer.getFilters()) {
if (filter == patternFilter) {
hasPatternFilter = true;
}
}
// add pattern filter to be the last filter
if (!hasPatternFilter) {
patternFilter.setOtherFilters(treeViewer.getFilters());
treeViewer.addFilter(patternFilter);
}
// cancel currently running job first, to prevent unnecessary redraw
refreshJob.cancel();
refreshJob.schedule(200);
@@ -700,6 +716,7 @@ public class FilteredCheckboxTree extends Composite {
}
};
// clearTextAction.setToolTipText(WorkbenchMessages.FilteredTree_ClearToolTip);
clearTextAction.setToolTipText(WorkbenchMessages.FilteredTree_ClearToolTip);
clearTextAction.setImageDescriptor(JFaceResources.getImageRegistry().getDescriptor(CLEAR_ICON));
clearTextAction.setDisabledImageDescriptor(JFaceResources.getImageRegistry().getDescriptor(DCLEAR_ICON));
@@ -774,8 +791,13 @@ public class FilteredCheckboxTree extends Composite {
*/
public void setInitialText(String text) {
initialText = text;
if (filterTextModifyListener != null) {
filterText.removeModifyListener(filterTextModifyListener);
}
setFilterText(initialText);
textChanged();
if (filterTextModifyListener != null) {
filterText.addModifyListener(filterTextModifyListener);
}
}
/**

View File

@@ -28,6 +28,13 @@ import org.eclipse.ui.internal.misc.StringMatcher;
*/
public class PatternFilter extends ViewerFilter {
private ViewerFilter[] otherFilters;
/**
* Cache of element filtered by other filters
*/
private Map<Object, Object[]> filteredByOthersCache = new HashMap<Object, Object[]>();
/*
* Cache of filtered elements in the tree
*/
@@ -69,6 +76,10 @@ public class PatternFilter extends ViewerFilter {
return elements;
}
if (elements.length == 0) {
return elements;
}
if (!useCache) {
return super.filter(viewer, parent, elements);
}
@@ -240,7 +251,17 @@ public class PatternFilter extends ViewerFilter {
* @return true if the given element has children that matches the filter text
*/
protected boolean isParentMatch(Viewer viewer, Object element) {
Object[] children = ((ITreeContentProvider) ((AbstractTreeViewer) viewer).getContentProvider()).getChildren(element);
Object[] children = filteredByOthersCache.get(element);
if (children == null) {
// fix for TDI-31520 , no need to check child elements already filtered by others
children = ((ITreeContentProvider) ((AbstractTreeViewer) viewer).getContentProvider()).getChildren(element);
if (otherFilters != null) {
for (ViewerFilter filter : otherFilters) {
children = filter.filter(viewer, element, children);
}
}
filteredByOthersCache.put(element, children);
}
if ((children != null) && (children.length > 0)) {
return isAnyVisible(viewer, element, children);
@@ -302,4 +323,13 @@ public class PatternFilter extends ViewerFilter {
void setUseCache(boolean useCache) {
this.useCache = useCache;
}
/**
* Sets the otherFilters.
*
* @param otherFilters the otherFilters to set
*/
public void setOtherFilters(ViewerFilter[] otherFilters) {
this.otherFilters = otherFilters;
}
}

View File

@@ -14,5 +14,6 @@
<classpathentry exported="true" kind="lib" path="lib/xmlschema-core-2.0.1.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src/main/java"/>
<classpathentry kind="output" path="class"/>
</classpath>

View File

@@ -2,5 +2,6 @@
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@@ -2,5 +2,6 @@
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@@ -2,6 +2,7 @@
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src"/>
<classpathentry exported="true" kind="lib" path="lib/commons-digester-2.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-cli-2.0-SNAPSHOT.jar"/>
<classpathentry exported="true" kind="lib" path="lib/commons-codec-1.6.jar"/>

View File

@@ -6,6 +6,7 @@
<classpathentry exported="true" kind="lib" path="lib/slf4j-log4j12-1.6.1.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src"/>
<classpathentry exported="true" kind="lib" path="lib/hive-exec-0.9.0.jar"/>
<classpathentry exported="true" kind="lib" path="lib/hive-jdbc-0.9.0.jar"/>
<classpathentry exported="true" kind="lib" path="lib/hive-metastore-0.9.0.jar"/>

View File

@@ -2,5 +2,6 @@
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@@ -2,5 +2,6 @@
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@@ -2,5 +2,6 @@
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@@ -15,6 +15,7 @@ Export-Package:
org.hsqldb.resources,
org.hsqldb.rowio,
org.hsqldb.scriptio,
org.hsqldb.store,
org.hsqldb.types,
org.hsqldb.util
Bundle-RequiredExecutionEnvironment: J2SE-1.5

View File

@@ -2,5 +2,6 @@
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src/main/java"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@@ -1,6 +1,5 @@
source.. = src/main/java/
source.. = src/
output.. = bin/
bin.includes = META-INF/,\
.,\
plugin_en.properties,\
plugin_fr.properties
plugin_en.properties

View File

@@ -8,7 +8,7 @@
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
//
// ============================================================================
package routines.system;
@@ -19,13 +19,10 @@ public class TalendThread extends Thread {
public Integer errorCode = null;
public String status = ""; //$NON-NLS-1$
public Exception exception = null;
public Error error = null;
// this is a template for Iterate Parallel
@Override
public void run() {
try {

View File

@@ -414,6 +414,7 @@ public class DBConnectionFillerImplTest {
ResultSet rs = mock(ResultSet.class);
when(rs.next()).thenReturn(true).thenReturn(true).thenReturn(false);
verify(rs, atMost(5)).next();
when(rs.getString(GetTable.TABLE_NAME.name())).thenReturn("Table1").thenReturn("Table2");//$NON-NLS-1$ //$NON-NLS-2$
when(rs.getString(GetTable.TABLE_TYPE.name())).thenReturn("Table");//$NON-NLS-1$
when(rs.getString(GetTable.REMARKS.name())).thenReturn("");//$NON-NLS-1$

View File

@@ -316,12 +316,20 @@ public class DqRepositoryViewServiceTest {
containsTable = DqRepositoryViewService.isCatalogHasChildren(dataProvider, catalog, tablePattern,
DqRepositoryViewService.TABLE_TYPES);
Mockito.verify(metaData).getTables(catalogName, null, tablePattern, DqRepositoryViewService.TABLE_TYPES);
Mockito.verify(tables).next();
// Mockito.verify(sqlConn).getMetaData();
Mockito.verify(sqlConn).isClosed();
Mockito.verify(catalog).getName();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
Mockito.verifyZeroInteractions(metaData, tables, dataProvider, sqlConn, catalog);
assertTrue(containsTable);
// fail("Not yet implemented");
}
// //
@@ -397,12 +405,20 @@ public class DqRepositoryViewServiceTest {
containsTable = DqRepositoryViewService.isSchemaHasChildren(dataProvider, schema, tablePattern,
DqRepositoryViewService.TABLE_TYPES);
Mockito.verify(metaData).getTables(null, schemaName, tablePattern, DqRepositoryViewService.TABLE_TYPES);
Mockito.verify(tables).next();
// Mockito.verify(sqlConn).getMetaData();
Mockito.verify(sqlConn).isClosed();
Mockito.verify(schema).getName();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
Mockito.verifyZeroInteractions(metaData, tables, dataProvider, sqlConn, schema);
assertTrue(containsTable);
// fail("Not yet implemented");
}
//
@@ -474,12 +490,20 @@ public class DqRepositoryViewServiceTest {
containsTable = DqRepositoryViewService.isCatalogHasChildren(dataProvider, catalog, tablePattern,
DqRepositoryViewService.VIEW_TYPES);
Mockito.verify(metaData).getTables(catalogName, null, tablePattern, DqRepositoryViewService.VIEW_TYPES);
Mockito.verify(tables).next();
// Mockito.verify(sqlConn).getMetaData();
Mockito.verify(sqlConn).isClosed();
Mockito.verify(catalog).getName();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
Mockito.verifyZeroInteractions(metaData, tables, dataProvider, sqlConn, catalog);
assertTrue(containsTable);
// fail("Not yet implemented");
}
//
@@ -552,11 +576,21 @@ public class DqRepositoryViewServiceTest {
containsTable = DqRepositoryViewService.isSchemaHasChildren(dataProvider, schema, tablePattern,
DqRepositoryViewService.VIEW_TYPES);
Mockito.verify(metaData).getTables(null, schemaName, tablePattern, DqRepositoryViewService.VIEW_TYPES);
Mockito.verify(tables).next();
// Mockito.verify(sqlConn).getMetaData();
Mockito.verify(sqlConn).isClosed();
Mockito.verify(schema).getName();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
Mockito.verifyZeroInteractions(metaData, tables, dataProvider, sqlConn, schema);
assertTrue(containsTable);
// fail("Not yet implemented");
}
/**
@@ -649,6 +683,13 @@ public class DqRepositoryViewServiceTest {
loadFromDB = false;
resultList3 = DqRepositoryViewService.getTables(dataProvider, catalog, tablePattern, loadFromDB);
Mockito.verify(catalog, Mockito.times(2)).getName();
Mockito.verify(metadataMock, Mockito.times(2)).fillTables((orgomg.cwm.objectmodel.core.Package) Mockito.eq(catalog),
Mockito.eq(metaData), Mockito.anyList(), Mockito.eq(tablePattern), (String[]) Mockito.any());
Mockito.verify(metadataMock).setLinked(true);
Mockito.verify(metadataMock).setLinked(false);
Mockito.verifyZeroInteractions(tableMock, tables, dataProvider, catalog, sqlConn, metaData, metadataMock);
assertTrue(resultList1.size() == 1);
assertTrue(resultList2.size() == 1);
assertTrue(resultList3.size() == 1);
@@ -761,6 +802,13 @@ public class DqRepositoryViewServiceTest {
loadFromDB = false;
resultList3 = DqRepositoryViewService.getTables(dataProvider, schema, tablePattern, loadFromDB);
// Mockito.verify(schema, Mockito.times(2)).getName();
Mockito.verify(metadataMock, Mockito.times(2)).fillTables((orgomg.cwm.objectmodel.core.Package) Mockito.eq(schema),
Mockito.eq(metaData), Mockito.anyList(), Mockito.eq(tablePattern), (String[]) Mockito.any());
Mockito.verify(metadataMock).setLinked(true);
Mockito.verify(metadataMock).setLinked(false);
Mockito.verifyZeroInteractions(tableMock, tables, dataProvider, schema, sqlConn, metaData, metadataMock, catalog);
assertTrue(resultList1.size() == 1);
assertTrue(resultList2.size() == 1);
assertTrue(resultList3.size() == 1);
@@ -862,6 +910,13 @@ public class DqRepositoryViewServiceTest {
loadFromDB = false;
resultList3 = DqRepositoryViewService.getViews(dataProvider, catalog, tablePattern, loadFromDB);
// Mockito.verify(catalog, Mockito.times(2)).getName();
Mockito.verify(metadataMock, Mockito.times(2)).fillViews(Mockito.eq(catalog), Mockito.eq(metaData),
Mockito.anyList(), Mockito.eq(tablePattern), Mockito.eq(DqRepositoryViewService.VIEW_TYPES));
Mockito.verify(metadataMock).setLinked(true);
Mockito.verify(metadataMock).setLinked(false);
Mockito.verifyZeroInteractions(tableMock, tables, dataProvider, catalog, sqlConn, metaData, metadataMock);
assertTrue(resultList1.size() == 1);
assertTrue(resultList2.size() == 1);
assertTrue(resultList3.size() == 1);
@@ -971,6 +1026,13 @@ public class DqRepositoryViewServiceTest {
loadFromDB = false;
resultList3 = DqRepositoryViewService.getViews(dataProvider, schema, tablePattern, loadFromDB);
// Mockito.verify(schema, Mockito.times(2)).getName();
Mockito.verify(metadataMock, Mockito.times(2)).fillViews(Mockito.eq(schema), Mockito.eq(metaData), Mockito.anyList(),
Mockito.eq(tablePattern), Mockito.eq(DqRepositoryViewService.VIEW_TYPES));
Mockito.verify(metadataMock).setLinked(true);
Mockito.verify(metadataMock).setLinked(false);
Mockito.verifyZeroInteractions(tableMock, tables, dataProvider, schema, sqlConn, metaData, metadataMock, catalog);
assertTrue(resultList1.size() == 1);
assertTrue(resultList2.size() == 1);
assertTrue(resultList3.size() == 1);

View File

@@ -13,6 +13,7 @@
package org.talend.repository.ui.utils;
import java.io.File;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -159,7 +160,38 @@ public class ManagerConnection {
HiveConnectionManager.getInstance().checkConnection(metadataConn);
isValide = true;
messageException = Messages.getString("ExtractMetaDataFromDataBase.connectionSuccessful"); //$NON-NLS-1$
} catch (Exception e) {
} catch (ClassNotFoundException e) {
isValide = false;
// if (metadataConn.getCurrentConnection() instanceof DatabaseConnection) {
// DatabaseConnection connection = (DatabaseConnection) metadataConn.getCurrentConnection();
// String hiveDistribution =
// connection.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_HIVE_DISTRIBUTION);
// String hiveDVersion = connection.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_HIVE_VERSION);
// if (hiveDistribution.equals(EHadoopDistributions.MAPR.getName())) {
// Set<String> jars = new HashSet<String>();
// String missJarMsg = "";
// if (GlobalServiceRegister.getDefault().isServiceRegistered(IHadoopService.class)) {
// IHadoopService hadoopService = (IHadoopService) GlobalServiceRegister.getDefault().getService(
// IHadoopService.class);
// jars = hadoopService.getMissingLibraries(hiveDistribution, hiveDVersion);
// if (jars.size() > 0) {
// missJarMsg = "Missing jars:" + jars.toString() + "; " + "Need to check them in modules view.";
// messageException = messageException + "\n" + missJarMsg;
// }
// }
// }
// }
messageException = ExceptionUtils.getFullStackTrace(e);
CommonExceptionHandler.process(e);
} catch (InstantiationException e) {
isValide = false;
messageException = ExceptionUtils.getFullStackTrace(e);
CommonExceptionHandler.process(e);
} catch (IllegalAccessException e) {
isValide = false;
messageException = ExceptionUtils.getFullStackTrace(e);
CommonExceptionHandler.process(e);
} catch (SQLException e) {
isValide = false;
messageException = ExceptionUtils.getFullStackTrace(e);
CommonExceptionHandler.process(e);

View File

@@ -51,22 +51,6 @@
index="HIVE:CLOUDERA:Cloudera_CDH4:EMBEDDED"
libraries="hive-hbase-handler-0.9.0-cdh4.1.2.jar;hbase-0.92.1-cdh4.0.1-security.jar;hive-exec-0.9.0-cdh4.1.2.jar;hive-jdbc-0.9.0-cdh4.1.2.jar;hive-metastore-0.9.0-cdh4.1.2.jar;hive-service-0.9.0-cdh4.1.2.jar;libfb303_new.jar;libthrift.jar;log4j-1.2.16.jar;slf4j-api-1.6.1.jar;slf4j-log4j12-1.6.1.jar;hadoop-common-2.0.0-cdh4.0.1.jar;hadoop-core-2.0.0-mr1-cdh4.0.1.jar;hadoop-auth-2.0.0-cdh4.0.1.jar;hadoop-hdfs-2.0.0-cdh4.0.1.jar;protobuf-java-2.4.0a.jar;guava-11.0.2.jar;commons-logging-1.1.1.jar;antlr-runtime-3.0.1.jar;commons-dbcp-1.4.jar;commons-pool-1.5.4.jar;datanucleus-connectionpool-2.0.3.jar;datanucleus-enhancer-2.0.3.jar;derby-10.4.2.0.jar;hive-builtins-0.9.0-cdh4.1.2.jar;commons-configuration-1.6.jar;datanucleus-core-2.0.3.jar;datanucleus-rdbms-2.0.3.jar;jdo2-api-2.3-ec.jar;commons-lang-2.4.jar;jackson-core-asl-1.8.8.jar;jackson-mapper-asl-1.8.8.jar">
</classloader>
<classloader
index="HIVE:CLOUDERA:Cloudera_CDH5:EMBEDDED"
libraries="hive-exec-0.12.0-cdh5.0.0.jar;hive-jdbc-0.12.0-cdh5.0.0.jar;hive-metastore-0.12.0-cdh5.0.0.jar;hive-service-0.12.0-cdh5.0.0.jar;libfb303-0.9.0.jar;libthrift-0.9.0.cloudera.2.jar;log4j-1.2.17.jar;slf4j-api-1.7.5.jar;slf4j-log4j12-1.7.5.jar;hadoop-hdfs-2.3.0-cdh5.0.0.jar;hadoop-common-2.3.0-cdh5.0.0.jar;hadoop-auth-2.3.0-cdh5.0.0.jar;hadoop-mapreduce-client-common-2.3.0-cdh5.0.0.jar;hadoop-mapreduce-client-core-2.3.0-cdh5.0.0.jar;hadoop-mapreduce-client-jobclient-2.3.0-cdh5.0.0.jar;hadoop-yarn-api-2.3.0-cdh5.0.0.jar;hadoop-yarn-client-2.3.0-cdh5.0.0.jar;hadoop-yarn-common-2.3.0-cdh5.0.0.jar;protobuf-java-2.5.0.jar;guava-11.0.2.jar;commons-logging-1.1.3.jar;commons-cli-1.2.jar;commons-httpclient-3.0.1.jar;commons-collections-3.2.1.jar;httpcore-4.2.5.jar;httpclient-4.2.5.jar;commons-codec-1.4.jar;commons-io-2.4.jar;avro-1.7.5-cdh5.0.0.jar;antlr-runtime-3.4.jar;commons-dbcp-1.4.jar;commons-pool-1.5.4.jar;datanucleus-api-jdo-3.2.1.jar;datanucleus-core-3.2.2.jar;datanucleus-rdbms-3.2.1.jar;derby-10.4.2.0.jar;commons-configuration-1.6.jar;jdo-api-3.0.1.jar;commons-lang-2.6.jar;jackson-core-asl-1.8.8.jar;jackson-mapper-asl-1.8.8.jar;hbase-server-0.96.1.1-cdh5.0.0.jar;hbase-client-0.96.1.1-cdh5.0.0.jar;hbase-common-0.96.1.1-cdh5.0.0.jar;hbase-protocol-0.96.1.1-cdh5.0.0.jar;zookeeper-3.4.5-cdh5.0.0.jar;hive-hbase-handler-0.12.0-cdh5.0.0.jar;servlet-api-2.5-20081211.jar;htrace-core-2.01.jar;netty-3.6.6.Final.jar">
</classloader>
<classloader
index="HIVE:CLOUDERA:Cloudera_CDH5:STANDALONE"
libraries="hive-exec-0.12.0-cdh5.0.0.jar;hive-jdbc-0.12.0-cdh5.0.0.jar;hive-metastore-0.12.0-cdh5.0.0.jar;hive-service-0.12.0-cdh5.0.0.jar;libfb303-0.9.0.jar;libthrift-0.9.0.cloudera.2.jar;log4j-1.2.17.jar;slf4j-api-1.7.5.jar;slf4j-log4j12-1.7.5.jar;hadoop-hdfs-2.3.0-cdh5.0.0.jar;hadoop-common-2.3.0-cdh5.0.0.jar;hadoop-auth-2.3.0-cdh5.0.0.jar;hadoop-mapreduce-client-common-2.3.0-cdh5.0.0.jar;hadoop-mapreduce-client-core-2.3.0-cdh5.0.0.jar;hadoop-mapreduce-client-jobclient-2.3.0-cdh5.0.0.jar;hadoop-yarn-api-2.3.0-cdh5.0.0.jar;hadoop-yarn-client-2.3.0-cdh5.0.0.jar;hadoop-yarn-common-2.3.0-cdh5.0.0.jar;protobuf-java-2.5.0.jar;guava-11.0.2.jar;commons-logging-1.1.3.jar;commons-cli-1.2.jar;commons-httpclient-3.0.1.jar;commons-collections-3.2.1.jar;httpcore-4.2.5.jar;httpclient-4.2.5.jar;commons-codec-1.4.jar;commons-io-2.4.jar;avro-1.7.5-cdh5.0.0.jar;antlr-runtime-3.4.jar;commons-dbcp-1.4.jar;commons-pool-1.5.4.jar;datanucleus-api-jdo-3.2.1.jar;datanucleus-core-3.2.2.jar;datanucleus-rdbms-3.2.1.jar;derby-10.4.2.0.jar;commons-configuration-1.6.jar;jdo-api-3.0.1.jar;commons-lang-2.6.jar;jackson-core-asl-1.8.8.jar;jackson-mapper-asl-1.8.8.jar">
</classloader>
<classloader
index="HIVE2:CLOUDERA:Cloudera_CDH5:EMBEDDED"
libraries="hive-exec-0.12.0-cdh5.0.0.jar;hive-jdbc-0.12.0-cdh5.0.0.jar;hive-metastore-0.12.0-cdh5.0.0.jar;hive-service-0.12.0-cdh5.0.0.jar;libfb303-0.9.0.jar;libthrift-0.9.0.cloudera.2.jar;log4j-1.2.17.jar;slf4j-api-1.7.5.jar;slf4j-log4j12-1.7.5.jar;hadoop-hdfs-2.3.0-cdh5.0.0.jar;hadoop-common-2.3.0-cdh5.0.0.jar;hadoop-auth-2.3.0-cdh5.0.0.jar;hadoop-mapreduce-client-common-2.3.0-cdh5.0.0.jar;hadoop-mapreduce-client-core-2.3.0-cdh5.0.0.jar;hadoop-mapreduce-client-jobclient-2.3.0-cdh5.0.0.jar;hadoop-yarn-api-2.3.0-cdh5.0.0.jar;hadoop-yarn-client-2.3.0-cdh5.0.0.jar;hadoop-yarn-common-2.3.0-cdh5.0.0.jar;protobuf-java-2.5.0.jar;guava-11.0.2.jar;commons-logging-1.1.3.jar;commons-cli-1.2.jar;commons-httpclient-3.0.1.jar;commons-collections-3.2.1.jar;httpcore-4.2.5.jar;httpclient-4.2.5.jar;commons-codec-1.4.jar;commons-io-2.4.jar;avro-1.7.5-cdh5.0.0.jar;antlr-runtime-3.4.jar;commons-dbcp-1.4.jar;commons-pool-1.5.4.jar;datanucleus-api-jdo-3.2.1.jar;datanucleus-core-3.2.2.jar;datanucleus-rdbms-3.2.1.jar;derby-10.4.2.0.jar;commons-configuration-1.6.jar;jdo-api-3.0.1.jar;commons-lang-2.6.jar;jackson-core-asl-1.8.8.jar;jackson-mapper-asl-1.8.8.jar;hbase-server-0.96.1.1-cdh5.0.0.jar;hbase-client-0.96.1.1-cdh5.0.0.jar;hbase-common-0.96.1.1-cdh5.0.0.jar;hbase-protocol-0.96.1.1-cdh5.0.0.jar;zookeeper-3.4.5-cdh5.0.0.jar;hive-hbase-handler-0.12.0-cdh5.0.0.jar;servlet-api-2.5-20081211.jar;htrace-core-2.01.jar;netty-3.6.6.Final.jar">
</classloader>
<classloader
index="HIVE2:CLOUDERA:Cloudera_CDH5:STANDALONE"
libraries="hive-exec-0.12.0-cdh5.0.0.jar;hive-jdbc-0.12.0-cdh5.0.0.jar;hive-metastore-0.12.0-cdh5.0.0.jar;hive-service-0.12.0-cdh5.0.0.jar;libfb303-0.9.0.jar;libthrift-0.9.0.cloudera.2.jar;log4j-1.2.17.jar;slf4j-api-1.7.5.jar;slf4j-log4j12-1.7.5.jar;hadoop-hdfs-2.3.0-cdh5.0.0.jar;hadoop-common-2.3.0-cdh5.0.0.jar;hadoop-auth-2.3.0-cdh5.0.0.jar;hadoop-mapreduce-client-common-2.3.0-cdh5.0.0.jar;hadoop-mapreduce-client-core-2.3.0-cdh5.0.0.jar;hadoop-mapreduce-client-jobclient-2.3.0-cdh5.0.0.jar;hadoop-yarn-api-2.3.0-cdh5.0.0.jar;hadoop-yarn-client-2.3.0-cdh5.0.0.jar;hadoop-yarn-common-2.3.0-cdh5.0.0.jar;protobuf-java-2.5.0.jar;guava-11.0.2.jar;commons-logging-1.1.3.jar;commons-cli-1.2.jar;commons-httpclient-3.0.1.jar;commons-collections-3.2.1.jar;httpcore-4.2.5.jar;httpclient-4.2.5.jar;commons-codec-1.4.jar;commons-io-2.4.jar;avro-1.7.5-cdh5.0.0.jar;antlr-runtime-3.4.jar;commons-dbcp-1.4.jar;commons-pool-1.5.4.jar;datanucleus-api-jdo-3.2.1.jar;datanucleus-core-3.2.2.jar;datanucleus-rdbms-3.2.1.jar;derby-10.4.2.0.jar;commons-configuration-1.6.jar;jdo-api-3.0.1.jar;commons-lang-2.6.jar;jackson-core-asl-1.8.8.jar;jackson-mapper-asl-1.8.8.jar">
</classloader>
<classloader
index="HIVE:MAPR:MAPR1:STANDALONE"
libraries="hadoop-core-0.20.203.0.jar;log4j-1.2.15.jar;hive-exec-0.7.1.jar;hive-jdbc-0.7.1.jar;hive-metastore-0.7.1.jar;hive-service-0.7.1.jar;libfb303.jar;slf4j-api-1.6.1.jar;slf4j-log4j12-1.6.1.jar;commons-logging-1.1.1.jar;commons-logging-api-1.0.4.jar">
@@ -195,54 +179,6 @@
index="HIVE2:CLOUDERA:Cloudera_CDH4_YARN:STANDALONE"
libraries="hive-exec-0.10.0-cdh4.4.0.jar;hive-jdbc-0.10.0-cdh4.4.0.jar;hive-metastore-0.10.0-cdh4.4.0.jar;hive-service-0.10.0-cdh4.4.0.jar;libfb303-0.9.0.jar;libthrift-0.9.0-cdh4-1.jar;log4j-1.2.16.jar;slf4j-api-1.6.1.jar;slf4j-log4j12-1.6.1.jar;hadoop-auth-2.0.0-cdh4.4.0.jar;hadoop-common-2.0.0-cdh4.4.0.jar;hadoop-hdfs-2.0.0-cdh4.4.0.jar;hadoop-mapreduce-client-common-2.0.0-cdh4.4.0.jar;hadoop-mapreduce-client-core-2.0.0-cdh4.4.0.jar;hadoop-mapreduce-client-jobclient-2.0.0-cdh4.4.0.jar;hadoop-yarn-api-2.0.0-cdh4.4.0.jar;hadoop-yarn-client-2.0.0-cdh4.4.0.jar;hadoop-yarn-common-2.0.0-cdh4.4.0.jar;protobuf-java-2.4.0a.jar;guava-11.0.2.jar;commons-logging-1.1.1.jar;commons-cli-1.2.jar;commons-httpclient-3.0.1.jar;commons-codec-1.4.jar;commons-io-2.4.jar;avro-1.5.4.jar;antlr-runtime-3.4.jar;commons-dbcp-1.4.jar;commons-pool-1.5.4.jar;datanucleus-api-jdo-3.2.1.jar;datanucleus-core-3.2.2.jar;datanucleus-rdbms-3.2.1.jar;derby-10.4.2.0.jar;hive-builtins-0.10.0-cdh4.4.0.jar;commons-configuration-1.6.jar;jdo2-api-2.3-ec.jar;commons-lang-2.5.jar;jackson-core-asl-1.8.8.jar;jackson-mapper-asl-1.8.8.jar;hbase-0.94.6-cdh4.4.0-security.jar;zookeeper-3.4.5-cdh4.4.0.jar;hive-hbase-handler-0.10.0-cdh4.4.0.jar;servlet-api-2.5-20081211.jar;">
</classloader>
<classloader
index="HIVE:MAPR:MAPR310:EMBEDDED"
libraries="hadoop-auth-1.0.3-mapr-3.1.0.jar;hadoop-core-1.0.3-mapr-3.1.0.jar;httpcore-4.2.4.jar;httpclient-4.2.5.jar;log4j-1.2.16.jar;hive-exec-0.12-mapr-1401-140130.jar;hive-jdbc-0.12-mapr-1401-140130.jar;hive-metastore-0.12-mapr-1401-140130.jar;hive-service-0.12-mapr-1401-140130.jar;libfb303-0.9.0.jar;libthrift-0.9.0.jar;slf4j-api-1.6.1.jar;slf4j-log4j12-1.6.1.jar;commons-logging-1.0.4.jar;commons-logging-api-1.0.4.jar;jdo-api-3.0.1.jar;antlr-runtime-3.4.jar;datanucleus-api-jdo-3.2.1.jar;datanucleus-core-3.2.2.jar;datanucleus-rdbms-3.2.1.jar;derby-10.4.2.0.jar;maprfs-1.0.3-mapr-3.1.0.jar;zookeeper-3.4.5-mapr-1401.jar;commons-lang-2.4.jar;jackson-mapper-asl-1.8.8.jar;jackson-core-asl-1.8.8.jar;commons-pool-1.5.4.jar;guava-13.0.1.jar;hive-hbase-handler-0.12-mapr-1401-140130.jar;servlet-api-2.5-20081211.jar;hbase-0.94.13-mapr-1401.jar;protobuf-java-2.4.1.jar;commons-dbcp-1.4.jar;commons-httpclient-3.1.jar;commons-codec-1.5.jar;">
</classloader>
<classloader
index="HIVE:MAPR:MAPR310:STANDALONE"
libraries="hadoop-auth-1.0.3-mapr-3.1.0.jar;hadoop-core-1.0.3-mapr-3.1.0.jar;httpcore-4.2.4.jar;httpclient-4.2.5.jar;log4j-1.2.16.jar;hive-exec-0.12-mapr-1401-140130.jar;hive-jdbc-0.12-mapr-1401-140130.jar;hive-metastore-0.12-mapr-1401-140130.jar;hive-service-0.12-mapr-1401-140130.jar;libfb303-0.9.0.jar;libthrift-0.9.0.jar;slf4j-api-1.6.1.jar;slf4j-log4j12-1.6.1.jar;commons-logging-1.0.4.jar;commons-logging-api-1.0.4.jar;jdo-api-3.0.1.jar;antlr-runtime-3.4.jar;datanucleus-api-jdo-3.2.1.jar;datanucleus-core-3.2.2.jar;datanucleus-rdbms-3.2.1.jar;derby-10.4.2.0.jar;maprfs-1.0.3-mapr-3.1.0.jar;zookeeper-3.4.5-mapr-1401.jar;commons-lang-2.4.jar;jackson-mapper-asl-1.8.8.jar;jackson-core-asl-1.8.8.jar;commons-pool-1.5.4.jar;guava-13.0.1.jar;hive-hbase-handler-0.12-mapr-1401-140130.jar;servlet-api-2.5-20081211.jar;hbase-0.94.13-mapr-1401.jar;protobuf-java-2.4.1.jar;commons-dbcp-1.4.jar;commons-httpclient-3.1.jar;commons-codec-1.5.jar;">
</classloader>
<classloader
index="HIVE2:MAPR:MAPR310:EMBEDDED"
libraries="hadoop-auth-1.0.3-mapr-3.1.0.jar;hadoop-core-1.0.3-mapr-3.1.0.jar;httpcore-4.2.4.jar;httpclient-4.2.5.jar;log4j-1.2.16.jar;hive-exec-0.12-mapr-1401-140130.jar;hive-jdbc-0.12-mapr-1401-140130.jar;hive-metastore-0.12-mapr-1401-140130.jar;hive-service-0.12-mapr-1401-140130.jar;libfb303-0.9.0.jar;libthrift-0.9.0.jar;slf4j-api-1.6.1.jar;slf4j-log4j12-1.6.1.jar;commons-logging-1.0.4.jar;commons-logging-api-1.0.4.jar;jdo-api-3.0.1.jar;antlr-runtime-3.4.jar;datanucleus-api-jdo-3.2.1.jar;datanucleus-core-3.2.2.jar;datanucleus-rdbms-3.2.1.jar;derby-10.4.2.0.jar;maprfs-1.0.3-mapr-3.1.0.jar;zookeeper-3.4.5-mapr-1401.jar;commons-lang-2.4.jar;jackson-mapper-asl-1.8.8.jar;jackson-core-asl-1.8.8.jar;commons-pool-1.5.4.jar;guava-13.0.1.jar;hive-hbase-handler-0.12-mapr-1401-140130.jar;servlet-api-2.5-20081211.jar;hbase-0.94.13-mapr-1401.jar;protobuf-java-2.4.1.jar;commons-dbcp-1.4.jar;commons-httpclient-3.1.jar;commons-codec-1.5.jar;">
</classloader>
<classloader
index="HIVE2:MAPR:MAPR310:STANDALONE"
libraries="hadoop-auth-1.0.3-mapr-3.1.0.jar;hadoop-core-1.0.3-mapr-3.1.0.jar;httpcore-4.2.4.jar;httpclient-4.2.5.jar;log4j-1.2.16.jar;hive-exec-0.12-mapr-1401-140130.jar;hive-jdbc-0.12-mapr-1401-140130.jar;hive-metastore-0.12-mapr-1401-140130.jar;hive-service-0.12-mapr-1401-140130.jar;libfb303-0.9.0.jar;libthrift-0.9.0.jar;slf4j-api-1.6.1.jar;slf4j-log4j12-1.6.1.jar;commons-logging-1.0.4.jar;commons-logging-api-1.0.4.jar;jdo-api-3.0.1.jar;antlr-runtime-3.4.jar;datanucleus-api-jdo-3.2.1.jar;datanucleus-core-3.2.2.jar;datanucleus-rdbms-3.2.1.jar;derby-10.4.2.0.jar;maprfs-1.0.3-mapr-3.1.0.jar;zookeeper-3.4.5-mapr-1401.jar;commons-lang-2.4.jar;jackson-mapper-asl-1.8.8.jar;jackson-core-asl-1.8.8.jar;commons-pool-1.5.4.jar;guava-13.0.1.jar;hive-hbase-handler-0.12-mapr-1401-140130.jar;servlet-api-2.5-20081211.jar;hbase-0.94.13-mapr-1401.jar;protobuf-java-2.4.1.jar;commons-dbcp-1.4.jar;commons-httpclient-3.1.jar;commons-codec-1.5.jar;">
</classloader>
<classloader
index="HIVE:PIVOTAL_HD:PIVOTAL_HD_2_0:EMBEDDED"
libraries="commons-codec-1.6.jar;commons-compress-1.6.jar;commons-httpclient-3.1.jar;commons-io-2.4.jar;hive-exec-0.12.0-gphd-3.0.0.0.jar;hive-jdbc-0.12.0-gphd-3.0.0.0.jar;hive-metastore-0.12.0-gphd-3.0.0.0.jar;hive-service-0.12.0-gphd-3.0.0.0.jar;httpclient-4.2.5.jar;httpcore-4.2.4.jar;libfb303-0.9.0.jar;libthrift-0.9.0.jar;log4j-1.2.17.jar;slf4j-api-1.6.1.jar;slf4j-log4j12-1.6.1.jar;hadoop-mapreduce-client-common-2.2.0-gphd-3.0.0.0.jar;hadoop-mapreduce-client-core-2.2.0-gphd-3.0.0.0.jar;hadoop-mapreduce-client-jobclient-2.2.0-gphd-3.0.0.0.jar;hadoop-yarn-api-2.2.0-gphd-3.0.0.0.jar;hadoop-yarn-client-2.2.0-gphd-3.0.0.0.jar;hadoop-yarn-common-2.2.0-gphd-3.0.0.0.jar;hadoop-auth-2.2.0-gphd-3.0.0.0.jar;hadoop-common-2.2.0-gphd-3.0.0.0.jar;hadoop-hdfs-2.2.0-gphd-3.0.0.0.jar;derby-10.4.2.0.jar;datanucleus-core-3.2.2.jar;datanucleus-rdbms-3.2.1.jar;datanucleus-api-jdo-3.2.1.jar;commons-configuration-1.6.jar;commons-cli-1.2.jar;commons-lang-2.5.jar;commons-dbcp-1.4.jar;commons-pool-1.5.4.jar;servlet-api-2.5-20081211.jar;zookeeper-3.4.5-gphd-3.0.0.0.jar;antlr-runtime-3.4.jar;protobuf-java-2.5.0.jar;guava-11.0.2.jar;jdo2-api-2.3-ec.jar;jackson-core-asl-1.8.8.jar;jackson-mapper-asl-1.8.8.jar;hbase-common-0.96.0-hadoop2-gphd-3.0.0.0.jar;hbase-server-0.96.0-hadoop2-gphd-3.0.0.0.jar;hive-hbase-handler-0.12.0-gphd-3.0.0.0.jar;hbase-protocol-0.96.0-hadoop2-gphd-3.0.0.0.jar;hbase-client-0.96.0-hadoop2-gphd-3.0.0.0.jar;htrace-core-2.01.jar;netty-3.6.6.Final.jar;">
</classloader>
<classloader
index="HIVE:PIVOTAL_HD:PIVOTAL_HD_2_0:STANDALONE"
libraries="commons-codec-1.6.jar;commons-compress-1.6.jar;commons-httpclient-3.1.jar;commons-io-2.4.jar;hive-exec-0.12.0-gphd-3.0.0.0.jar;hive-jdbc-0.12.0-gphd-3.0.0.0.jar;hive-metastore-0.12.0-gphd-3.0.0.0.jar;hive-service-0.12.0-gphd-3.0.0.0.jar;httpclient-4.2.5.jar;httpcore-4.2.4.jar;libfb303-0.9.0.jar;libthrift-0.9.0.jar;log4j-1.2.17.jar;slf4j-api-1.6.1.jar;slf4j-log4j12-1.6.1.jar;hadoop-mapreduce-client-common-2.2.0-gphd-3.0.0.0.jar;hadoop-mapreduce-client-core-2.2.0-gphd-3.0.0.0.jar;hadoop-mapreduce-client-jobclient-2.2.0-gphd-3.0.0.0.jar;hadoop-yarn-api-2.2.0-gphd-3.0.0.0.jar;hadoop-yarn-client-2.2.0-gphd-3.0.0.0.jar;hadoop-yarn-common-2.2.0-gphd-3.0.0.0.jar;hadoop-auth-2.2.0-gphd-3.0.0.0.jar;hadoop-common-2.2.0-gphd-3.0.0.0.jar;hadoop-hdfs-2.2.0-gphd-3.0.0.0.jar;derby-10.4.2.0.jar;datanucleus-core-3.2.2.jar;datanucleus-rdbms-3.2.1.jar;datanucleus-api-jdo-3.2.1.jar;commons-configuration-1.6.jar;commons-cli-1.2.jar;commons-lang-2.5.jar;commons-dbcp-1.4.jar;commons-pool-1.5.4.jar;servlet-api-2.5-20081211.jar;zookeeper-3.4.5-gphd-3.0.0.0.jar;antlr-runtime-3.4.jar;protobuf-java-2.5.0.jar;guava-11.0.2.jar;jdo2-api-2.3-ec.jar;jackson-core-asl-1.8.8.jar;jackson-mapper-asl-1.8.8.jar;hbase-common-0.96.0-hadoop2-gphd-3.0.0.0.jar;hbase-server-0.96.0-hadoop2-gphd-3.0.0.0.jar;hive-hbase-handler-0.12.0-gphd-3.0.0.0.jar;hbase-protocol-0.96.0-hadoop2-gphd-3.0.0.0.jar;hbase-client-0.96.0-hadoop2-gphd-3.0.0.0.jar;htrace-core-2.01.jar;netty-3.6.6.Final.jar;">
</classloader>
<classloader
index="HIVE2:PIVOTAL_HD:PIVOTAL_HD_2_0:EMBEDDED"
libraries="commons-codec-1.6.jar;commons-compress-1.6.jar;commons-httpclient-3.1.jar;commons-io-2.4.jar;hive-exec-0.12.0-gphd-3.0.0.0.jar;hive-jdbc-0.12.0-gphd-3.0.0.0.jar;hive-metastore-0.12.0-gphd-3.0.0.0.jar;hive-service-0.12.0-gphd-3.0.0.0.jar;httpclient-4.2.5.jar;httpcore-4.2.4.jar;libfb303-0.9.0.jar;libthrift-0.9.0.jar;log4j-1.2.17.jar;slf4j-api-1.6.1.jar;slf4j-log4j12-1.6.1.jar;hadoop-mapreduce-client-common-2.2.0-gphd-3.0.0.0.jar;hadoop-mapreduce-client-core-2.2.0-gphd-3.0.0.0.jar;hadoop-mapreduce-client-jobclient-2.2.0-gphd-3.0.0.0.jar;hadoop-yarn-api-2.2.0-gphd-3.0.0.0.jar;hadoop-yarn-client-2.2.0-gphd-3.0.0.0.jar;hadoop-yarn-common-2.2.0-gphd-3.0.0.0.jar;hadoop-auth-2.2.0-gphd-3.0.0.0.jar;hadoop-common-2.2.0-gphd-3.0.0.0.jar;hadoop-hdfs-2.2.0-gphd-3.0.0.0.jar;derby-10.4.2.0.jar;datanucleus-core-3.2.2.jar;datanucleus-rdbms-3.2.1.jar;datanucleus-api-jdo-3.2.1.jar;commons-configuration-1.6.jar;commons-cli-1.2.jar;commons-lang-2.5.jar;commons-dbcp-1.4.jar;commons-pool-1.5.4.jar;servlet-api-2.5-20081211.jar;zookeeper-3.4.5-gphd-3.0.0.0.jar;antlr-runtime-3.4.jar;protobuf-java-2.5.0.jar;guava-11.0.2.jar;jdo2-api-2.3-ec.jar;jackson-core-asl-1.8.8.jar;jackson-mapper-asl-1.8.8.jar;hbase-common-0.96.0-hadoop2-gphd-3.0.0.0.jar;hbase-server-0.96.0-hadoop2-gphd-3.0.0.0.jar;hive-hbase-handler-0.12.0-gphd-3.0.0.0.jar;hbase-protocol-0.96.0-hadoop2-gphd-3.0.0.0.jar;hbase-client-0.96.0-hadoop2-gphd-3.0.0.0.jar;htrace-core-2.01.jar;netty-3.6.6.Final.jar;">
</classloader>
<classloader
index="HIVE2:PIVOTAL_HD:PIVOTAL_HD_2_0:STANDALONE"
libraries="commons-codec-1.6.jar;commons-compress-1.6.jar;commons-httpclient-3.1.jar;commons-io-2.4.jar;hive-exec-0.12.0-gphd-3.0.0.0.jar;hive-jdbc-0.12.0-gphd-3.0.0.0.jar;hive-metastore-0.12.0-gphd-3.0.0.0.jar;hive-service-0.12.0-gphd-3.0.0.0.jar;httpclient-4.2.5.jar;httpcore-4.2.4.jar;libfb303-0.9.0.jar;libthrift-0.9.0.jar;log4j-1.2.17.jar;slf4j-api-1.6.1.jar;slf4j-log4j12-1.6.1.jar;hadoop-mapreduce-client-common-2.2.0-gphd-3.0.0.0.jar;hadoop-mapreduce-client-core-2.2.0-gphd-3.0.0.0.jar;hadoop-mapreduce-client-jobclient-2.2.0-gphd-3.0.0.0.jar;hadoop-yarn-api-2.2.0-gphd-3.0.0.0.jar;hadoop-yarn-client-2.2.0-gphd-3.0.0.0.jar;hadoop-yarn-common-2.2.0-gphd-3.0.0.0.jar;hadoop-auth-2.2.0-gphd-3.0.0.0.jar;hadoop-common-2.2.0-gphd-3.0.0.0.jar;hadoop-hdfs-2.2.0-gphd-3.0.0.0.jar;derby-10.4.2.0.jar;datanucleus-core-3.2.2.jar;datanucleus-rdbms-3.2.1.jar;datanucleus-api-jdo-3.2.1.jar;commons-configuration-1.6.jar;commons-cli-1.2.jar;commons-lang-2.5.jar;commons-dbcp-1.4.jar;commons-pool-1.5.4.jar;servlet-api-2.5-20081211.jar;zookeeper-3.4.5-gphd-3.0.0.0.jar;antlr-runtime-3.4.jar;protobuf-java-2.5.0.jar;guava-11.0.2.jar;jdo2-api-2.3-ec.jar;jackson-core-asl-1.8.8.jar;jackson-mapper-asl-1.8.8.jar;hbase-common-0.96.0-hadoop2-gphd-3.0.0.0.jar;hbase-server-0.96.0-hadoop2-gphd-3.0.0.0.jar;hive-hbase-handler-0.12.0-gphd-3.0.0.0.jar;hbase-protocol-0.96.0-hadoop2-gphd-3.0.0.0.jar;hbase-client-0.96.0-hadoop2-gphd-3.0.0.0.jar;htrace-core-2.01.jar;netty-3.6.6.Final.jar;">
</classloader>
<classloader
index="HIVE:HORTONWORKS:HDP_2_1:EMBEDDED"
libraries="avro-1.5.4.jar;hadoop-auth-2.4.0.2.1.1.0-385.jar;hadoop-common-2.4.0.2.1.1.0-385.jar;hadoop-hdfs-2.4.0.2.1.1.0-385.jar;hadoop-mapreduce-client-common-2.4.0.2.1.1.0-385.jar;hadoop-mapreduce-client-core-2.4.0.2.1.1.0-385.jar;hadoop-mapreduce-client-jobclient-2.4.0.2.1.1.0-385.jar;hadoop-yarn-api-2.4.0.2.1.1.0-385.jar;hadoop-yarn-client-2.4.0.2.1.1.0-385.jar;hadoop-yarn-common-2.4.0.2.1.1.0-385.jar;hive-jdbc-0.13.0.2.1.1.0-385.jar;hive-exec-0.13.0.2.1.1.0-385.jar;hive-common-0.13.0.2.1.1.0-385.jar;hive-metastore-0.13.0.2.1.1.0-385.jar;hive-service-0.13.0.2.1.1.0-385.jar;hive-hbase-handler-0.13.0.2.1.1.0-385.jar;commons-logging-1.1.1.jar;commons-cli-1.2.jar;commons-codec-1.4.jar;commons-collections-3.2.1.jar;commons-configuration-1.6.jar;commons-lang-2.6.jar;datanucleus-api-jdo-3.2.6.jar;datanucleus-core-3.2.10.jar;datanucleus-rdbms-3.2.9.jar;derby-10.4.2.0.jar;jdo-api-3.0.1.jar;libfb303-0.9.0.jar;libthrift-0.9.0.jar;servlet-api-2.5-20081211.jar;snappy-java-1.0.5.jar;zookeeper-3.4.5.2.1.1.0-385.jar;httpcore-4.2.5.jar;httpclient-4.2.5.jar;hbase-server-0.98.0.2.1.1.0-385-hadoop2.jar;hbase-common-0.98.0.2.1.1.0-385-hadoop2.jar;hbase-client-0.98.0.2.1.1.0-385-hadoop2.jar;hbase-protocol-0.98.0.2.1.1.0-385-hadoop2.jar;htrace-core-2.04.jar;guava-12.0.1.jar;protobuf-java-2.5.0.jar;log4j-1.2.17.jar;slf4j-api-1.7.5.jar;slf4j-log4j12-1.7.5.jar;jackson-mapper-asl-1.8.8.jar;jackson-core-asl-1.8.8.jar;netty-3.6.6.Final.jar;antlr-runtime-3.4.jar;">
</classloader>
<classloader
index="HIVE:HORTONWORKS:HDP_2_1:STANDALONE"
libraries="avro-1.5.4.jar;hadoop-auth-2.4.0.2.1.1.0-385.jar;hadoop-common-2.4.0.2.1.1.0-385.jar;hadoop-hdfs-2.4.0.2.1.1.0-385.jar;hadoop-mapreduce-client-common-2.4.0.2.1.1.0-385.jar;hadoop-mapreduce-client-core-2.4.0.2.1.1.0-385.jar;hadoop-mapreduce-client-jobclient-2.4.0.2.1.1.0-385.jar;hadoop-yarn-api-2.4.0.2.1.1.0-385.jar;hadoop-yarn-client-2.4.0.2.1.1.0-385.jar;hadoop-yarn-common-2.4.0.2.1.1.0-385.jar;hive-jdbc-0.13.0.2.1.1.0-385.jar;hive-exec-0.13.0.2.1.1.0-385.jar;hive-common-0.13.0.2.1.1.0-385.jar;hive-metastore-0.13.0.2.1.1.0-385.jar;hive-service-0.13.0.2.1.1.0-385.jar;hive-hbase-handler-0.13.0.2.1.1.0-385.jar;commons-logging-1.1.1.jar;commons-cli-1.2.jar;commons-codec-1.4.jar;commons-collections-3.2.1.jar;commons-configuration-1.6.jar;commons-lang-2.6.jar;datanucleus-api-jdo-3.2.6.jar;datanucleus-core-3.2.10.jar;datanucleus-rdbms-3.2.9.jar;derby-10.4.2.0.jar;jdo-api-3.0.1.jar;libfb303-0.9.0.jar;libthrift-0.9.0.jar;servlet-api-2.5-20081211.jar;snappy-java-1.0.5.jar;zookeeper-3.4.5.2.1.1.0-385.jar;httpcore-4.2.5.jar;httpclient-4.2.5.jar;hbase-server-0.98.0.2.1.1.0-385-hadoop2.jar;hbase-common-0.98.0.2.1.1.0-385-hadoop2.jar;hbase-client-0.98.0.2.1.1.0-385-hadoop2.jar;hbase-protocol-0.98.0.2.1.1.0-385-hadoop2.jar;htrace-core-2.04.jar;guava-12.0.1.jar;protobuf-java-2.5.0.jar;log4j-1.2.17.jar;slf4j-api-1.7.5.jar;slf4j-log4j12-1.7.5.jar;jackson-mapper-asl-1.8.8.jar;jackson-core-asl-1.8.8.jar;netty-3.6.6.Final.jar;antlr-runtime-3.4.jar;">
</classloader>
<classloader
index="HIVE2:HORTONWORKS:HDP_2_1:EMBEDDED"
libraries="avro-1.5.4.jar;hadoop-auth-2.4.0.2.1.1.0-385.jar;hadoop-common-2.4.0.2.1.1.0-385.jar;hadoop-hdfs-2.4.0.2.1.1.0-385.jar;hadoop-mapreduce-client-common-2.4.0.2.1.1.0-385.jar;hadoop-mapreduce-client-core-2.4.0.2.1.1.0-385.jar;hadoop-mapreduce-client-jobclient-2.4.0.2.1.1.0-385.jar;hadoop-yarn-api-2.4.0.2.1.1.0-385.jar;hadoop-yarn-client-2.4.0.2.1.1.0-385.jar;hadoop-yarn-common-2.4.0.2.1.1.0-385.jar;hive-jdbc-0.13.0.2.1.1.0-385.jar;hive-exec-0.13.0.2.1.1.0-385.jar;hive-common-0.13.0.2.1.1.0-385.jar;hive-metastore-0.13.0.2.1.1.0-385.jar;hive-service-0.13.0.2.1.1.0-385.jar;hive-hbase-handler-0.13.0.2.1.1.0-385.jar;commons-logging-1.1.1.jar;commons-cli-1.2.jar;commons-codec-1.4.jar;commons-collections-3.2.1.jar;commons-configuration-1.6.jar;commons-lang-2.6.jar;datanucleus-api-jdo-3.2.6.jar;datanucleus-core-3.2.10.jar;datanucleus-rdbms-3.2.9.jar;derby-10.4.2.0.jar;jdo-api-3.0.1.jar;libfb303-0.9.0.jar;libthrift-0.9.0.jar;servlet-api-2.5-20081211.jar;snappy-java-1.0.5.jar;zookeeper-3.4.5.2.1.1.0-385.jar;httpcore-4.2.5.jar;httpclient-4.2.5.jar;hbase-server-0.98.0.2.1.1.0-385-hadoop2.jar;hbase-common-0.98.0.2.1.1.0-385-hadoop2.jar;hbase-client-0.98.0.2.1.1.0-385-hadoop2.jar;hbase-protocol-0.98.0.2.1.1.0-385-hadoop2.jar;htrace-core-2.04.jar;guava-12.0.1.jar;protobuf-java-2.5.0.jar;log4j-1.2.17.jar;slf4j-api-1.7.5.jar;slf4j-log4j12-1.7.5.jar;jackson-mapper-asl-1.8.8.jar;jackson-core-asl-1.8.8.jar;netty-3.6.6.Final.jar;antlr-runtime-3.4.jar;">
</classloader>
<classloader
index="HIVE2:HORTONWORKS:HDP_2_1:STANDALONE"
libraries="avro-1.5.4.jar;hadoop-auth-2.4.0.2.1.1.0-385.jar;hadoop-common-2.4.0.2.1.1.0-385.jar;hadoop-hdfs-2.4.0.2.1.1.0-385.jar;hadoop-mapreduce-client-common-2.4.0.2.1.1.0-385.jar;hadoop-mapreduce-client-core-2.4.0.2.1.1.0-385.jar;hadoop-mapreduce-client-jobclient-2.4.0.2.1.1.0-385.jar;hadoop-yarn-api-2.4.0.2.1.1.0-385.jar;hadoop-yarn-client-2.4.0.2.1.1.0-385.jar;hadoop-yarn-common-2.4.0.2.1.1.0-385.jar;hive-jdbc-0.13.0.2.1.1.0-385.jar;hive-exec-0.13.0.2.1.1.0-385.jar;hive-common-0.13.0.2.1.1.0-385.jar;hive-metastore-0.13.0.2.1.1.0-385.jar;hive-service-0.13.0.2.1.1.0-385.jar;hive-hbase-handler-0.13.0.2.1.1.0-385.jar;commons-logging-1.1.1.jar;commons-cli-1.2.jar;commons-codec-1.4.jar;commons-collections-3.2.1.jar;commons-configuration-1.6.jar;commons-lang-2.6.jar;datanucleus-api-jdo-3.2.6.jar;datanucleus-core-3.2.10.jar;datanucleus-rdbms-3.2.9.jar;derby-10.4.2.0.jar;jdo-api-3.0.1.jar;libfb303-0.9.0.jar;libthrift-0.9.0.jar;servlet-api-2.5-20081211.jar;snappy-java-1.0.5.jar;zookeeper-3.4.5.2.1.1.0-385.jar;httpcore-4.2.5.jar;httpclient-4.2.5.jar;hbase-server-0.98.0.2.1.1.0-385-hadoop2.jar;hbase-common-0.98.0.2.1.1.0-385-hadoop2.jar;hbase-client-0.98.0.2.1.1.0-385-hadoop2.jar;hbase-protocol-0.98.0.2.1.1.0-385-hadoop2.jar;htrace-core-2.04.jar;guava-12.0.1.jar;protobuf-java-2.5.0.jar;log4j-1.2.17.jar;slf4j-api-1.7.5.jar;slf4j-log4j12-1.7.5.jar;jackson-mapper-asl-1.8.8.jar;jackson-core-asl-1.8.8.jar;netty-3.6.6.Final.jar;antlr-runtime-3.4.jar;">
</classloader>
</extension>
<extension
point="org.talend.core.runtime.librariesNeeded">

View File

@@ -217,9 +217,7 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl {
}
ResultSet schemas = null;
// teradata use db name to filter schema
if (dbConn != null
&& (EDatabaseTypeName.TERADATA.getProduct().equals(((DatabaseConnection) dbConn).getProductId()) || EDatabaseTypeName.EXASOL
.getProduct().equals(((DatabaseConnection) dbConn).getProductId()))) {
if (dbConn != null && EDatabaseTypeName.TERADATA.getProduct().equals(((DatabaseConnection) dbConn).getProductId())) {
if (!dbConn.isContextMode()) {
String sid = ((DatabaseConnection) dbConn).getSID();
if (sid != null && sid.length() > 0) {
@@ -382,7 +380,12 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl {
}
try {
if (!isDbSupportCatalogNames(dbJDBCMetadata)) {
if (dbJDBCMetadata.getDatabaseProductName() != null
&& dbJDBCMetadata.getDatabaseProductName().indexOf(EDatabaseTypeName.ORACLEFORSID.getProduct()) > -1) {
return catalogList;
}
// ODBC teradata dosen't support 'dbJDBCMetadata.getCatalogs()',return at here.
if (ConnectionUtils.isOdbcTeradata(dbJDBCMetadata)) {
return catalogList;
}
ResultSet catalogNames = null;
@@ -555,15 +558,6 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl {
return catalogList;
}
private boolean isDbSupportCatalogNames(DatabaseMetaData dbJDBCMetadata) throws SQLException {
// Now here that OracleForSid,OdbcTeradata,Exasol dosen't support the catalog name.
if (ConnectionUtils.isOracleForSid(dbJDBCMetadata, EDatabaseTypeName.ORACLEFORSID.getProduct())
|| ConnectionUtils.isOdbcTeradata(dbJDBCMetadata) || ConnectionUtils.isExasol(dbJDBCMetadata)) {
return false;
}
return true;
}
/**
* fill the catalog and schemas into Postgresql database connection.
*

View File

@@ -905,8 +905,7 @@ public class ExtractMetaDataUtils {
// feature TDI-22108
if (EDatabaseTypeName.VERTICA.getXmlName().equals(dbType)
&& (EDatabaseVersion4Drivers.VERTICA_6.getVersionValue().equals(dbVersion)
|| EDatabaseVersion4Drivers.VERTICA_5_1.getVersionValue().equals(dbVersion)
|| EDatabaseVersion4Drivers.VERTICA_6_1_X.getVersionValue().equals(dbVersion) || EDatabaseVersion4Drivers.VERTICA_7
|| EDatabaseVersion4Drivers.VERTICA_5_1.getVersionValue().equals(dbVersion) || EDatabaseVersion4Drivers.VERTICA_6_1_X
.getVersionValue().equals(dbVersion))) {
driverClassName = EDatabase4DriverClassName.VERTICA2.getDriverClass();
}

View File

@@ -149,9 +149,7 @@ public class JDBCDriverLoader {
connection = wapperDriver.connect(url, info);
} else {
if (ConnectionUtils.isHsql(url)) {
url = ConnectionUtils.addShutDownForHSQLUrl(url, additionalParams);
}
url = ConnectionUtils.addShutDownForHSQLUrl(url, additionalParams);
if (dbType != null && dbType.equalsIgnoreCase(EDatabaseTypeName.MSSQL.getDisplayName()) && "".equals(username)) {
ExtractMetaDataUtils.getInstance().setDriverCache(wapperDriver);
}

View File

@@ -23,14 +23,12 @@ import org.apache.commons.lang.StringUtils;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.utils.database.AbstractFakeDatabaseMetaData;
import org.talend.commons.utils.database.EmbeddedHiveResultSet;
import org.talend.commons.utils.system.EnvironmentUtils;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.database.EDatabaseTypeName;
import org.talend.core.database.conn.ConnParameterKeys;
import org.talend.core.model.metadata.IMetadataConnection;
import org.talend.core.model.metadata.builder.database.ExtractMetaDataFromDataBase.ETableTypes;
import org.talend.core.model.metadata.builder.database.TableInfoParameters;
import org.talend.core.model.metadata.connection.hive.HiveConnVersionInfo;
import org.talend.core.utils.ReflectionUtils;
import org.talend.designer.core.IDesignerCoreService;
import org.talend.metadata.managment.hive.HiveClassLoaderFactory;
@@ -56,13 +54,10 @@ public class EmbeddedHiveDataBaseMetadata extends AbstractFakeDatabaseMetaData {
private ClassLoader classLoader;
private boolean isSupportJRE;
public EmbeddedHiveDataBaseMetadata(IMetadataConnection metadataConn) {
super();
this.metadataConn = metadataConn;
this.classLoader = HiveClassLoaderFactory.getInstance().getClassLoader(metadataConn);
this.isSupportJRE = true;
init();
}
@@ -95,9 +90,6 @@ public class EmbeddedHiveDataBaseMetadata extends AbstractFakeDatabaseMetaData {
"loginUserFromKeytab", new String[] { principal, keytabPath }); //$NON-NLS-1$
}
}
} catch (UnsupportedClassVersionError e) {
// catch the UnsupportedClassVersionError to show user the current jre version is lower
isSupportJRE = false;
} catch (Exception e) {
ExceptionHandler.process(e);
} finally {
@@ -115,18 +107,6 @@ public class EmbeddedHiveDataBaseMetadata extends AbstractFakeDatabaseMetaData {
* @throws SQLException
*/
public boolean checkConnection() throws SQLException {
if (!isSupportJRE) {
throw new SQLException("This function is not available with a JDK < 1.7"); //$NON-NLS-1$
}
boolean isWindows = EnvironmentUtils.isWindowsSystem();
String hive_version = (String) this.metadataConn.getParameter("CONN_PARA_KEY_HIVE_VERSION");
boolean isSupportEmbedded = ArrayUtils.contains(HiveConnVersionInfo.getHiveVersionsNotSupportOnWindows(),
HiveConnVersionInfo.valueOf(hive_version));
if (isWindows && isSupportEmbedded) {
throw new SQLException("Function not supported on windows"); //$NON-NLS-1$
}
getTables(this.metadataConn.getDatabase(), null, null, new String[] { "TABLE", "VIEW", "SYSTEM_TABLE" }); //$NON-NLS-1$//$NON-NLS-2$ //$NON-NLS-3$
return true;
}

View File

@@ -172,9 +172,7 @@ public class MetadataConnectionUtils {
String userName = metadataBean.getUsername();
String dbType = metadataBean.getDbType();
if (ConnectionUtils.isHsql(dbUrl)) {
dbUrl = ConnectionUtils.addShutDownForHSQLUrl(dbUrl, metadataBean.getAdditionalParams());
}
dbUrl = ConnectionUtils.addShutDownForHSQLUrl(dbUrl, metadataBean.getAdditionalParams());
Properties props = new Properties();
props.setProperty(TaggedValueHelper.PASSWORD, password == null ? "" : password); //$NON-NLS-1$

View File

@@ -27,7 +27,6 @@ import org.talend.core.model.metadata.builder.connection.DatabaseConnection;
import org.talend.core.model.metadata.builder.database.JavaSqlFactory;
import org.talend.core.model.metadata.builder.database.hive.EmbeddedHiveDataBaseMetadata;
import org.talend.core.model.metadata.connection.hive.HiveConnVersionInfo;
import org.talend.core.utils.ReflectionUtils;
import org.talend.metadata.managment.hive.HiveClassLoaderFactory;
/**
@@ -111,19 +110,6 @@ public class HiveConnectionManager extends DataBaseConnectionManager {
String hivePrincipal = (String) metadataConn.getParameter(ConnParameterKeys.HIVE_AUTHENTICATION_HIVEPRINCIPLA);
if (useKerberos) {
System.setProperty(HiveConfKeysForTalend.HIVE_CONF_KEY_HIVE_METASTORE_KERBEROS_PRINCIPAL.getKey(), hivePrincipal);
String principal = (String) metadataConn.getParameter(ConnParameterKeys.HIVE_AUTHENTICATION_PRINCIPLA);
String keytabPath = (String) metadataConn.getParameter(ConnParameterKeys.HIVE_AUTHENTICATION_KEYTAB);
boolean useKeytab = Boolean.valueOf((String) metadataConn
.getParameter(ConnParameterKeys.HIVE_AUTHENTICATION_USEKEYTAB));
if (useKeytab) {
ClassLoader hiveClassLoader = HiveClassLoaderFactory.getInstance().getClassLoader(metadataConn);
try {
ReflectionUtils.invokeStaticMethod("org.apache.hadoop.security.UserGroupInformation", hiveClassLoader, //$NON-NLS-1$
"loginUserFromKeytab", new String[] { principal, keytabPath }); //$NON-NLS-1$
} catch (Exception e) {
throw new SQLException(e);
}
}
}
if (connURL.startsWith(DatabaseConnConstants.HIVE_2_URL_FORMAT)) {
hiveStandaloneConn = createHive2StandaloneConnection(metadataConn);
@@ -138,62 +124,48 @@ public class HiveConnectionManager extends DataBaseConnectionManager {
private Connection createHive2StandaloneConnection(IMetadataConnection metadataConn) throws ClassNotFoundException,
InstantiationException, IllegalAccessException, SQLException {
Connection conn = null;
String connURL = metadataConn.getUrl();
String username = metadataConn.getUsername();
String password = metadataConn.getPassword();
// 1. Get class loader.
ClassLoader currClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader hiveClassLoader = HiveClassLoaderFactory.getInstance().getClassLoader(metadataConn);
Thread.currentThread().setContextClassLoader(hiveClassLoader);
try {
// 2. Fetch the HiveDriver from the new classloader
Class<?> driver = Class.forName(EDatabase4DriverClassName.HIVE2.getDriverClass(), true, hiveClassLoader);
Driver hiveDriver = (Driver) driver.newInstance();
// 3. Try to connect by driver
Properties info = new Properties();
username = username != null ? username : ""; //$NON-NLS-1$
password = password != null ? password : "";//$NON-NLS-1$
info.setProperty("user", username);//$NON-NLS-1$
info.setProperty("password", password);//$NON-NLS-1$
conn = hiveDriver.connect(connURL, info);
} finally {
Thread.currentThread().setContextClassLoader(currClassLoader);
}
// 2. Fetch the HiveDriver from the new classloader
Class<?> driver = Class.forName(EDatabase4DriverClassName.HIVE2.getDriverClass(), true, hiveClassLoader);
Driver hiveDriver = (Driver) driver.newInstance();
return conn;
// 3. Try to connect by driver
Properties info = new Properties();
username = username != null ? username : ""; //$NON-NLS-1$
password = password != null ? password : "";//$NON-NLS-1$
info.setProperty("user", username);//$NON-NLS-1$
info.setProperty("password", password);//$NON-NLS-1$
return hiveDriver.connect(connURL, info);
}
private Connection createHive1StandaloneConnection(IMetadataConnection metadataConn) throws ClassNotFoundException,
InstantiationException, IllegalAccessException, SQLException {
Connection conn = null;
String connURL = metadataConn.getUrl();
String username = metadataConn.getUsername();
String password = metadataConn.getPassword();
// 1. Get class loader.
ClassLoader currClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader hiveClassLoader = HiveClassLoaderFactory.getInstance().getClassLoader(metadataConn);
Thread.currentThread().setContextClassLoader(hiveClassLoader);
try {
// 2. Fetch the HiveDriver from the new classloader
Class<?> driver = Class.forName(EDatabase4DriverClassName.HIVE.getDriverClass(), true, hiveClassLoader);
Driver hiveDriver = (Driver) driver.newInstance();
// 3. Try to connect by driver
Properties info = new Properties();
username = username != null ? username : ""; //$NON-NLS-1$
password = password != null ? password : "";//$NON-NLS-1$
info.setProperty("user", username);//$NON-NLS-1$
info.setProperty("password", password);//$NON-NLS-1$
conn = hiveDriver.connect(connURL, info);
} finally {
Thread.currentThread().setContextClassLoader(currClassLoader);
}
// 2. Fetch the HiveDriver from the new classloader
Class<?> driver = Class.forName(EDatabase4DriverClassName.HIVE.getDriverClass(), true, hiveClassLoader);
Driver hiveDriver = (Driver) driver.newInstance();
return conn;
// 3. Try to connect by driver
Properties info = new Properties();
username = username != null ? username : ""; //$NON-NLS-1$
password = password != null ? password : "";//$NON-NLS-1$
info.setProperty("user", username);//$NON-NLS-1$
info.setProperty("password", password);//$NON-NLS-1$
return hiveDriver.connect(connURL, info);
}
private Connection createHiveEmbeddedConnection(IMetadataConnection metadataConn) throws ClassNotFoundException,

View File

@@ -15,14 +15,10 @@ package org.talend.metadata.managment.hive;
import java.io.File;
import java.util.Set;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Platform;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.ILibraryManagerService;
import org.talend.core.classloader.ClassLoaderFactory;
import org.talend.core.classloader.DynamicClassLoader;
import org.talend.core.database.conn.ConnParameterKeys;
import org.talend.core.hadoop.EHadoopConfigurationJars;
import org.talend.core.model.metadata.IMetadataConnection;
import org.talend.core.model.metadata.connection.hive.HiveConnUtils;
import org.talend.metadata.managment.connection.manager.DatabaseConnConstants;
@@ -43,8 +39,6 @@ public class HiveClassLoaderFactory {
private static HiveClassLoaderFactory instance = null;
private final static String PATH_SEPARATOR = "/"; //$NON-NLS-1$
private HiveClassLoaderFactory() {
}
@@ -96,7 +90,7 @@ public class HiveClassLoaderFactory {
* DOC ycbai Comment method "appendExtraJars".
*
* <p>
* Add the extra jars which hive connection needed like when creating a hive embedded connection with kerberos.
* Add the extra jars which hive connection needed like when create a hive embedded connection with kerberos.
* </p>
*
* @param metadataConn
@@ -104,47 +98,15 @@ public class HiveClassLoaderFactory {
*/
private void appendExtraJars(IMetadataConnection metadataConn, ClassLoader classLoader) {
if (classLoader instanceof DynamicClassLoader) {
DynamicClassLoader loader = (DynamicClassLoader) classLoader;
loadConfigurationJars(metadataConn, loader);
loadAuthDriverJars(metadataConn, loader);
}
}
private void loadConfigurationJars(IMetadataConnection metadataConn, DynamicClassLoader loader) {
String distroKey = (String) metadataConn.getParameter(ConnParameterKeys.CONN_PARA_KEY_HIVE_DISTRIBUTION);
if (HiveConnUtils.isCustomDistro(distroKey)) {
return;
}
String[] configurationJars;
String useKrb = (String) metadataConn.getParameter(ConnParameterKeys.CONN_PARA_KEY_USE_KRB);
if (Boolean.valueOf(useKrb)) {
configurationJars = EHadoopConfigurationJars.HIVE.getEnableSecurityJars();
} else {
configurationJars = EHadoopConfigurationJars.HIVE.getDisableSecurityJars();
}
ILibraryManagerService librairesManagerService = (ILibraryManagerService) GlobalServiceRegister.getDefault().getService(
ILibraryManagerService.class);
String libStorePath = loader.getLibStorePath();
for (String dependentJar : configurationJars) {
librairesManagerService.retrieve(dependentJar, libStorePath, true, new NullProgressMonitor());
String jarPath = libStorePath + PATH_SEPARATOR + dependentJar;
File jarFile = new File(jarPath);
if (jarFile.exists()) {
loader.addLibraries(jarFile.getAbsolutePath());
}
}
}
private void loadAuthDriverJars(IMetadataConnection metadataConn, DynamicClassLoader loader) {
Set<String> libraries = loader.getLibraries();
String driverJarPath = (String) metadataConn.getParameter(ConnParameterKeys.HIVE_AUTHENTICATION_DRIVERJAR_PATH);
if (driverJarPath != null) {
final File driverJar = new File(driverJarPath);
if (driverJar.exists()) {
if (!libraries.contains(driverJar)) {
loader.addLibraries(driverJar.getAbsolutePath());
String driverJarPath = (String) metadataConn.getParameter(ConnParameterKeys.HIVE_AUTHENTICATION_DRIVERJAR_PATH);
if (driverJarPath != null) {
final File driverJar = new File(driverJarPath);
if (driverJar.exists()) {
DynamicClassLoader loader = (DynamicClassLoader) classLoader;
Set<String> libraries = loader.getLibraries();
if (!libraries.contains(driverJar)) {
loader.addLibraries(driverJar.getAbsolutePath());
}
}
}
}

View File

@@ -24,19 +24,19 @@ import org.talend.designer.joblet.model.JobletProcess;
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.talend.core.model.properties.impl.JobletProcessItemImpl#getJobletProcess <em>Joblet Process</em>}</li>
* <li>{@link org.talend.core.model.properties.impl.JobletProcessItemImpl#getIcon <em>Icon</em>}</li>
* <li>{@link org.talend.core.model.properties.impl.JobletProcessItemImpl#getJobletProcess <em>Joblet Process</em>}</li>
* <li>{@link org.talend.core.model.properties.impl.JobletProcessItemImpl#getIcon <em>Icon</em>}</li>
* </ul>
* </p>
*
*
* @generated
*/
public class JobletProcessItemImpl extends ItemImpl implements JobletProcessItem {
/**
* The cached value of the '{@link #getJobletProcess() <em>Joblet Process</em>}' reference. <!-- begin-user-doc -->
* The cached value of the '{@link #getJobletProcess() <em>Joblet Process</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @see #getJobletProcess()
* @generated
* @ordered
@@ -55,7 +55,6 @@ public class JobletProcessItemImpl extends ItemImpl implements JobletProcessItem
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
protected JobletProcessItemImpl() {
@@ -64,7 +63,6 @@ public class JobletProcessItemImpl extends ItemImpl implements JobletProcessItem
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
@@ -77,7 +75,7 @@ public class JobletProcessItemImpl extends ItemImpl implements JobletProcessItem
*
* @generated NOT
*/
public synchronized JobletProcess getJobletProcess() {
public JobletProcess getJobletProcess() {
if (jobletProcess != null && jobletProcess.eIsProxy()) {
InternalEObject oldJobletProcess = (InternalEObject) jobletProcess;
jobletProcess = (JobletProcess) eResolveProxy(oldJobletProcess);
@@ -95,10 +93,9 @@ public class JobletProcessItemImpl extends ItemImpl implements JobletProcessItem
}
}
if (jobletProcess != oldJobletProcess) {
if (eNotificationRequired()) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE,
PropertiesPackage.JOBLET_PROCESS_ITEM__JOBLET_PROCESS, oldJobletProcess, jobletProcess));
}
}
}
return jobletProcess;
@@ -106,7 +103,6 @@ public class JobletProcessItemImpl extends ItemImpl implements JobletProcessItem
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public JobletProcess basicGetJobletProcess() {
@@ -115,16 +111,13 @@ public class JobletProcessItemImpl extends ItemImpl implements JobletProcessItem
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public void setJobletProcess(JobletProcess newJobletProcess) {
JobletProcess oldJobletProcess = jobletProcess;
jobletProcess = newJobletProcess;
if (eNotificationRequired()) {
eNotify(new ENotificationImpl(this, Notification.SET, PropertiesPackage.JOBLET_PROCESS_ITEM__JOBLET_PROCESS,
oldJobletProcess, jobletProcess));
}
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PropertiesPackage.JOBLET_PROCESS_ITEM__JOBLET_PROCESS, oldJobletProcess, jobletProcess));
}
/**
@@ -150,10 +143,9 @@ public class JobletProcessItemImpl extends ItemImpl implements JobletProcessItem
}
}
if (icon != oldIcon) {
if (eNotificationRequired()) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, PropertiesPackage.JOBLET_PROCESS_ITEM__ICON,
oldIcon, icon));
}
}
}
return icon;
@@ -161,7 +153,6 @@ public class JobletProcessItemImpl extends ItemImpl implements JobletProcessItem
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public ByteArray basicGetIcon() {
@@ -170,87 +161,77 @@ public class JobletProcessItemImpl extends ItemImpl implements JobletProcessItem
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public void setIcon(ByteArray newIcon) {
ByteArray oldIcon = icon;
icon = newIcon;
if (eNotificationRequired()) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PropertiesPackage.JOBLET_PROCESS_ITEM__ICON, oldIcon, icon));
}
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case PropertiesPackage.JOBLET_PROCESS_ITEM__JOBLET_PROCESS:
if (resolve) {
return getJobletProcess();
}
return basicGetJobletProcess();
case PropertiesPackage.JOBLET_PROCESS_ITEM__ICON:
if (resolve) {
return getIcon();
}
return basicGetIcon();
case PropertiesPackage.JOBLET_PROCESS_ITEM__JOBLET_PROCESS:
if (resolve) return getJobletProcess();
return basicGetJobletProcess();
case PropertiesPackage.JOBLET_PROCESS_ITEM__ICON:
if (resolve) return getIcon();
return basicGetIcon();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case PropertiesPackage.JOBLET_PROCESS_ITEM__JOBLET_PROCESS:
setJobletProcess((JobletProcess) newValue);
return;
case PropertiesPackage.JOBLET_PROCESS_ITEM__ICON:
setIcon((ByteArray) newValue);
return;
case PropertiesPackage.JOBLET_PROCESS_ITEM__JOBLET_PROCESS:
setJobletProcess((JobletProcess)newValue);
return;
case PropertiesPackage.JOBLET_PROCESS_ITEM__ICON:
setIcon((ByteArray)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case PropertiesPackage.JOBLET_PROCESS_ITEM__JOBLET_PROCESS:
setJobletProcess((JobletProcess) null);
return;
case PropertiesPackage.JOBLET_PROCESS_ITEM__ICON:
setIcon((ByteArray) null);
return;
case PropertiesPackage.JOBLET_PROCESS_ITEM__JOBLET_PROCESS:
setJobletProcess((JobletProcess)null);
return;
case PropertiesPackage.JOBLET_PROCESS_ITEM__ICON:
setIcon((ByteArray)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case PropertiesPackage.JOBLET_PROCESS_ITEM__JOBLET_PROCESS:
return jobletProcess != null;
case PropertiesPackage.JOBLET_PROCESS_ITEM__ICON:
return icon != null;
case PropertiesPackage.JOBLET_PROCESS_ITEM__JOBLET_PROCESS:
return jobletProcess != null;
case PropertiesPackage.JOBLET_PROCESS_ITEM__ICON:
return icon != null;
}
return super.eIsSet(featureID);
}

View File

@@ -22,18 +22,18 @@ import org.talend.designer.core.model.utils.emf.talendfile.ProcessType;
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.talend.core.model.properties.impl.ProcessItemImpl#getProcess <em>Process</em>}</li>
* <li>{@link org.talend.core.model.properties.impl.ProcessItemImpl#getProcess <em>Process</em>}</li>
* </ul>
* </p>
*
*
* @generated
*/
public class ProcessItemImpl extends ItemImpl implements ProcessItem {
/**
* The cached value of the '{@link #getProcess() <em>Process</em>}' reference. <!-- begin-user-doc --> <!--
* The cached value of the '{@link #getProcess() <em>Process</em>}' reference.
* <!-- begin-user-doc --> <!--
* end-user-doc -->
*
* @see #getProcess()
* @generated
* @ordered
@@ -42,7 +42,6 @@ public class ProcessItemImpl extends ItemImpl implements ProcessItem {
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
protected ProcessItemImpl() {
@@ -51,7 +50,6 @@ public class ProcessItemImpl extends ItemImpl implements ProcessItem {
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
@@ -64,7 +62,7 @@ public class ProcessItemImpl extends ItemImpl implements ProcessItem {
*
* @generated NOT
*/
public synchronized ProcessType getProcess() {
public ProcessType getProcess() {
if (process != null && process.eIsProxy()) {
InternalEObject oldProcess = (InternalEObject) process;
process = (ProcessType) eResolveProxy(oldProcess);
@@ -82,10 +80,9 @@ public class ProcessItemImpl extends ItemImpl implements ProcessItem {
}
}
if (process != oldProcess) {
if (eNotificationRequired()) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, PropertiesPackage.PROCESS_ITEM__PROCESS,
oldProcess, process));
}
}
}
return process;
@@ -93,7 +90,6 @@ public class ProcessItemImpl extends ItemImpl implements ProcessItem {
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public ProcessType basicGetProcess() {
@@ -102,74 +98,66 @@ public class ProcessItemImpl extends ItemImpl implements ProcessItem {
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public void setProcess(ProcessType newProcess) {
ProcessType oldProcess = process;
process = newProcess;
if (eNotificationRequired()) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PropertiesPackage.PROCESS_ITEM__PROCESS, oldProcess, process));
}
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case PropertiesPackage.PROCESS_ITEM__PROCESS:
if (resolve) {
return getProcess();
}
return basicGetProcess();
case PropertiesPackage.PROCESS_ITEM__PROCESS:
if (resolve) return getProcess();
return basicGetProcess();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case PropertiesPackage.PROCESS_ITEM__PROCESS:
setProcess((ProcessType) newValue);
return;
case PropertiesPackage.PROCESS_ITEM__PROCESS:
setProcess((ProcessType)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case PropertiesPackage.PROCESS_ITEM__PROCESS:
setProcess((ProcessType) null);
return;
case PropertiesPackage.PROCESS_ITEM__PROCESS:
setProcess((ProcessType)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case PropertiesPackage.PROCESS_ITEM__PROCESS:
return process != null;
case PropertiesPackage.PROCESS_ITEM__PROCESS:
return process != null;
}
return super.eIsSet(featureID);
}

View File

@@ -5,9 +5,6 @@ Application.workspaceInUse=Workspace in use, cannot start multiple instances at
Application.WorkspaceInuseMessage=This workspace is already in use.\nYou cannot launch the Studio more than once with the same workspace.
Application.WorkspaceInuseTitle=Workspace error
Application.workspaceNotExiste=Workspace not exist, cannot start instances in this path.
Application.doNotSupportJavaVersionYetPoweredbyTalend=The Studio does not support Java 8. Java 7 is the recommended JVM version to be used. Refer to the following KB article on Talend Help Center for more information (requires a MyTalend account registration):
Application.doNotSupportJavaVersionYetNoPoweredbyTalend=The Studio does not support Java 8. Java 7 is the recommended JVM version to be used.
ApplicationActionBarAdvisor.menuFileLabel=&File
ApplicationActionBarAdvisor.menuEditLabel=&Edit
ApplicationActionBarAdvisor.navigateLabel=&Navigate

View File

@@ -28,19 +28,11 @@ import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.osgi.service.datalocation.Location;
import org.eclipse.swt.SWT;
import org.eclipse.swt.browser.Browser;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkbenchPreferenceConstants;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.forms.events.HyperlinkAdapter;
import org.eclipse.ui.forms.events.HyperlinkEvent;
import org.eclipse.ui.forms.widgets.Hyperlink;
import org.talend.commons.exception.BusinessException;
import org.talend.commons.exception.ExceptionHandler;
import org.talend.commons.ui.swt.dialogs.ErrorDialogWidthDetailArea;
@@ -49,7 +41,6 @@ import org.talend.core.GlobalServiceRegister;
import org.talend.core.model.migration.IMigrationToolService;
import org.talend.core.repository.CoreRepositoryPlugin;
import org.talend.core.tis.ICoreTisService;
import org.talend.core.ui.TalendBrowserLaunchHelper;
import org.talend.core.ui.branding.IBrandingService;
import org.talend.rcp.i18n.Messages;
import org.talend.repository.RegistrationPlugin;
@@ -72,11 +63,7 @@ public class Application implements IApplication {
try {
Shell shell = new Shell(display, SWT.ON_TOP);
// To show that the studio does not fully support java 8 yet
if (checkUnSupportJavaVersion(shell)) {
shell.dispose();
return EXIT_OK;
}
// check workspace inuse, if true, means have do lock in configurator. if false, will try to lock
if (!Boolean.getBoolean("org.talend.workspace.locked")) { //$NON-NLS-1$
// TDI-28205, the lock may be acquired by the configurator but leave a possibility to do it here for TOS
@@ -324,65 +311,4 @@ public class Application implements IApplication {
}
});
}
public boolean checkUnSupportJavaVersion(Shell shell) {
IBrandingService brandingService = (IBrandingService) GlobalServiceRegister.getDefault().getService(
IBrandingService.class);
String javaVersion = System.getProperty("java.version");
if (javaVersion != null) {
org.talend.commons.utils.Version v = new org.talend.commons.utils.Version(javaVersion);
if (v.getMajor() == 1 && v.getMinor() > 7) { // more than JDK 1.7
if (brandingService.isPoweredbyTalend()) {
OpenLinkMessageDialog dialog = new OpenLinkMessageDialog(shell, "", shell.getBackgroundImage(),
Messages.getString("Application.doNotSupportJavaVersionYetPoweredbyTalend"), MessageDialog.WARNING,
new String[] { "Quit" }, 0);
dialog.open();
return true;
} else {
MessageDialog dialog = new MessageDialog(shell, "", shell.getBackgroundImage(),
Messages.getString("Application.doNotSupportJavaVersionYetNoPoweredbyTalend"), MessageDialog.WARNING,
new String[] { "Quit" }, 0);
dialog.open();
return true;
}
}
}
return false;
}
private static class OpenLinkMessageDialog extends MessageDialog {
public OpenLinkMessageDialog(Shell parentShell, String dialogTitle, Image dialogTitleImage, String dialogMessage,
int dialogImageType, String[] dialogButtonLabels, int defaultIndex) {
super(parentShell, dialogTitle, dialogTitleImage, dialogMessage, dialogImageType, dialogButtonLabels, defaultIndex);
}
@Override
protected Control createDialogArea(Composite parent) {
// create message area
createMessageArea(parent);
// add custom controls
Composite composite = new Composite(parent, SWT.NONE);
GridLayout layout = new GridLayout();
layout.marginHeight = 0;
layout.marginWidth = 43;
composite.setLayout(layout);
GridData data = new GridData(GridData.FILL_BOTH);
data.horizontalSpan = 2;
composite.setLayoutData(data);
Hyperlink link = new Hyperlink(composite, SWT.WRAP);
link.setText("https://help.talend.com/display/KB/Java+8+Support");
link.setBackground(parent.getBackground());
link.setUnderlined(true);
link.addHyperlinkListener(new HyperlinkAdapter() {
@Override
public void linkActivated(HyperlinkEvent e) {
String url = "https://help.talend.com/display/KB/Java+8+Support";
TalendBrowserLaunchHelper.openURL(url);
}
});
return composite;
}
}
}

View File

@@ -61,25 +61,3 @@ RegisterManagement.passwordWrong=The password you entered is incorrect.
RegisterWizardPage.proxyUser=Proxy User
RegisterWizardPage.proxyPassword=Proxy Password
RegisterManagement.wrongUserOrPassword=Username or password is incorrect.
TalendForgeDialog.newProjectTitle=Connect to TalendForge
TalendForgeDialog.labelTitle=Connect your studio to TalendForge,the Talend online community.
TalendForgeDialog.labelMessageOne=Download new components and connectors from Talend Exchange
TalendForgeDialog.labelMessageTwo=Access the most recent documentation and tech articles from \nthe Talend social knowledgebase.
TalendForgeDialog.labelMessageThree=See the latest messages in the Talend discussion forums
TalendForgeDialog.createLabel=Create an account
TalendForgeDialog.userNameLabel=Username:
TalendForgeDialog.userNameLabel.tooltip=Username is needed
TalendForgeDialog.emailLabel=Email:
TalendForgeDialog.emailLabel.tooltip=Please input a valid email
TalendForgeDialog.countryLabel=Country:
TalendForgeDialog.link=(or connect on existing account):
TalendForgeDialog.passwordLabel=Password:
TalendForgeDialog.passwordLabel.tooltip=Password is needed
TalendForgeDialog.passwordAgainLabel=Password(again):
TalendForgeDialog.passwordAgainLabel.tooltip=Password(again) must be the same as Password
TalendForgeDialog.agreeButton=I Agree to the TalendForge Terms of Use
TalendForgeDialog.improveButton=I want to help to improve Talend by sharing anonymous usage statistics
TalendForgeDialog.readMore=(read more...)
TalendForgeDialog.createAccountButton=Create Account
TalendForgeDialog.proxySettingButton=Proxy settings...
TalendForgeDialog.skipButton=Skip

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.9 KiB

View File

@@ -61,38 +61,3 @@ RegisterManagement.userNameCharacter=Username should not contain more than 32 ch
RegisterManagement.realnameInvalid=Realname invalid (bad format).
RegisterManagement.errors=Errors
RegisterManagement.wrongUserOrPassword=Username or password is wrong.
TalendForgeDialog.newProjectTitle=Connect to TalendForge
TalendForgeDialog.labelTitle=Connect your studio to TalendForge,the Talend online community.
TalendForgeDialog.labelMessageOne=Download new components and connectors from Talend Exchange
TalendForgeDialog.labelMessageTwo=Access the most recent documentation and tech articles from \nthe Talend social knowledgebase.
TalendForgeDialog.labelMessageThree=See the latest messages in the Talend discussion forums
TalendForgeDialog.createLabel=Create an account
TalendForgeDialog.userNameLabel=Username:
TalendForgeDialog.userNameLabel.tooltip=Username is needed
TalendForgeDialog.emailLabel=Email:
TalendForgeDialog.emailLabel.tooltip=Please input a valid email
TalendForgeDialog.countryLabel=Country:
TalendForgeDialog.link=(or connect on existing account):
TalendForgeDialog.passwordLabel=Password:
TalendForgeDialog.passwordLabel.tooltip=Password is needed
TalendForgeDialog.passwordAgainLabel=Password(again):
TalendForgeDialog.passwordAgainLabel.tooltip=Password(again) must be the same as Password
TalendForgeDialog.agreeButton=I Agree to the TalendForge Terms of Use
TalendForgeDialog.improveButton=I want to help to improve Talend by sharing anonymous usage statistics
TalendForgeDialog.readMore=(read more...)
TalendForgeDialog.createAccountButton=Create Account
TalendForgeDialog.proxySettingButton=Proxy settings...
TalendForgeDialog.skipButton=Skip
TalendForgeDialog.proxyHost=Proxy Host
TalendForgeDialog.proxyPort=Proxy Port
TalendForgeDialog.proxyUser=Proxy User
TalendForgeDialog.proxyPassword=Proxy Password
TalendForgeDialog.netWorkSetting=Network setting
TalendForgeDialog.notValid=not valid
TalendForgeDialog.loginLabel=Log in
TalendForgeDialog.linkToCreate=(or create a new account):
TalendForgeDialog.connectButton=Connect
TalendForgeDialog.MessageTitle=Talend Forge
TalendForgeDialog.Message=Talend Forge Register Success!
TalendForgeDialog.ConnectSuccessMessage=Talend Forge Connect Success!
TalendForgeDialog.ConnectFailureMessage=Talend Forge Connect Failure!

View File

@@ -35,10 +35,8 @@ import org.eclipse.update.core.SiteManager;
import org.talend.commons.exception.BusinessException;
import org.talend.commons.ui.runtime.exception.ExceptionHandler;
import org.talend.commons.utils.VersionUtils;
import org.talend.commons.utils.platform.PluginChecker;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.model.general.ConnectionBean;
import org.talend.core.prefs.ITalendCorePrefConstants;
import org.talend.core.prefs.PreferenceManipulator;
import org.talend.core.token.DefaultTokenCollector;
import org.talend.core.ui.branding.IBrandingService;
@@ -450,10 +448,12 @@ public class RegisterManagement {
if (!brandingService.getBrandingConfiguration().isUseProductRegistration()) {
return;
}
boolean install_done = checkInstallDone();
ConnectionUserPerReader read = ConnectionUserPerReader.getInstance();
boolean install_done = read.isInstallDone();
if (install_done) {
return;
}
read.setInstallDone();
URL registURL = null;
try {
// UNIQUE_ID
@@ -493,29 +493,6 @@ public class RegisterManagement {
}
}
/**
* check the install is done or not, after call this method, will set install_done as true.
*
* @return
*/
private boolean checkInstallDone() {
boolean install_done = false;
if (PluginChecker.isOnlyTopLoaded()) {
IPreferenceStore prefStore = PlatformUI.getPreferenceStore();
install_done = prefStore.getBoolean(ITalendCorePrefConstants.TOP_INSTALL_DONE);
if (!install_done) {
prefStore.setValue(ITalendCorePrefConstants.TOP_INSTALL_DONE, Boolean.TRUE);
}
} else {
ConnectionUserPerReader read = ConnectionUserPerReader.getInstance();
install_done = read.isInstallDone();
if (!install_done) {
read.setInstallDone();
}
}
return install_done;
}
/**
* DOC mhirt Comment method "isProductRegistered".
*

View File

@@ -26,8 +26,6 @@ public enum ERepositoryImages implements IImage {
REGISTER_ICO("/icons/register_icon.png"), //$NON-NLS-1$
// REGISTER_ICO("/icons/login_v.png"), //$NON-NLS-1$
LICENSE_WIZ("/icons/license_wiz.png"), //$NON-NLS-1$
DOT_ICON("/icons/dot.jpg"), //$NON-NLS-N$
TALENDFORGE_ICON("/icons/talendforge.jpg"), //$NON-NLS-N$
REGISTER_WIZ("/icons/register_wiz.png"); //$NON-NLS-1$
private String path;

View File

@@ -833,8 +833,6 @@ ContextWizard.contextPageTitle=Step 2 of 2
ContextWizard.step0Description=Add any required information
ContextWizard.step0Title=Step 1 of 2
CreateTableAction.action.createTitle=Retrieve Schema
CreateTableAction.action.Warning=Warning
CreateTableAction.action.NotLockMessage=The item is not locked. \nTo retrieve a schema from a remote database, you must lock the connection first. \nContinue to view the schema?
DatabaseForm.hbase.settings=Version
DatabaseForm.hbase.distribution=Distribution
DatabaseForm.hbase.distribution.tooltip=Set the hadoop distribution
@@ -1391,6 +1389,3 @@ ConfirmReloadConnectionDialog.title=Confirm Reload Connection
ConfirmReloadConnectionDialog.desc=Connection properties changed, the analyzed elements of this connection \non which the analyses depend might be removed if reload it, \ndo you want continue?
ConfirmReloadConnectionDialog.reload=reload
ConfirmReloadConnectionDialog.unreload=don\'t reload
DatabaseForm.showContext=More options
DatabaseForm.hideContext=Less options
SeletorModuleForm.connectFromCustomModuleName.errorTitle=Error

View File

@@ -24,7 +24,6 @@ import org.eclipse.core.runtime.jobs.IJobChangeEvent;
import org.eclipse.core.runtime.jobs.JobChangeAdapter;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.wizard.IWizard;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.swt.widgets.Display;
@@ -84,7 +83,6 @@ import org.talend.cwm.helper.TableHelper;
import org.talend.metadata.managment.connection.manager.HiveConnectionManager;
import org.talend.repository.RepositoryWorkUnit;
import org.talend.repository.metadata.i18n.Messages;
import org.talend.repository.model.IProxyRepositoryFactory;
import org.talend.repository.model.IRepositoryNode.ENodeType;
import org.talend.repository.model.IRepositoryNode.EProperties;
import org.talend.repository.model.IRepositoryService;
@@ -1019,32 +1017,27 @@ public abstract class AbstractCreateTableAction extends AbstractCreateAction {
// ExtractMetaDataUtils.metadataCon = metadataConnection;
// when open,set use synonyms false.
ExtractMetaDataUtils.getInstance().setUseAllSynonyms(false);
IProxyRepositoryFactory factory = ProxyRepositoryFactory.getInstance();
boolean repositoryObjectEditable = factory.isEditableAndLockIfPossible(node.getObject());
if (!repositoryObjectEditable) {
boolean flag = MessageDialog.openConfirm(PlatformUI.getWorkbench().getActiveWorkbenchWindow()
.getShell(), Messages.getString("CreateTableAction.action.Warning"),
Messages.getString("CreateTableAction.action.NotLockMessage"));
if (flag) {
DatabaseTableWizard databaseTableWizard = new DatabaseTableWizard(
PlatformUI.getWorkbench(), creation, node.getObject(), metadataTable,
getExistingNames(), forceReadOnly, managerConnection, metadataConnection);
WizardDialog wizardDialog = new WizardDialog(PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getShell(), databaseTableWizard);
wizardDialog.setBlockOnOpen(true);
handleWizard(node, wizardDialog);
}
} else {
DatabaseTableWizard databaseTableWizard = new DatabaseTableWizard(PlatformUI.getWorkbench(),
creation, node.getObject(), metadataTable, getExistingNames(), forceReadOnly,
managerConnection, metadataConnection);
WizardDialog wizardDialog = new WizardDialog(PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getShell(), databaseTableWizard);
wizardDialog.setBlockOnOpen(true);
handleWizard(node, wizardDialog);
}
DatabaseTableWizard databaseTableWizard = new DatabaseTableWizard(PlatformUI.getWorkbench(),
creation, node.getObject(), metadataTable, getExistingNames(), forceReadOnly,
managerConnection, metadataConnection);
// UIJob uijob = new UIJob("") { //$NON-NLS-1$
//
// // modified by wzhang. when connection failed,error message display.
// public IStatus runInUIThread(IProgressMonitor monitor) {
// if (!managerConnection.getIsValide()) {
// MessageDialog.openError(null,
// Messages.getString("AbstractCreateTableAction.connError"), //$NON-NLS-1$
// Messages.getString("AbstractCreateTableAction.errorMessage")); //$NON-NLS-1$
// }
// return Status.OK_STATUS;
// }
//
// };
WizardDialog wizardDialog = new WizardDialog(PlatformUI.getWorkbench().getActiveWorkbenchWindow()
.getShell(), databaseTableWizard);
wizardDialog.setBlockOnOpen(true);
// uijob.schedule(1300);
handleWizard(node, wizardDialog);
} else {
// added for bug 16595
// no need connect to database when double click one schema.

View File

@@ -33,7 +33,6 @@ import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.layout.GridDataFactory;
import org.eclipse.jface.window.Window;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.SashForm;
import org.eclipse.swt.custom.ScrolledComposite;
import org.eclipse.swt.events.ControlAdapter;
import org.eclipse.swt.events.ControlEvent;
@@ -48,10 +47,6 @@ import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
@@ -384,23 +379,6 @@ public class DatabaseForm extends AbstractForm {
private Composite authenticationCom;
private SashForm sash;
private Composite dbConnectionArea;
private Composite hidableArea;
private Button moveButton;
private static final String UP = "^"; //$NON-NLS-1$
private static final String DOWN = "v"; //$NON-NLS-1$
/**
* wheather the db properties group visible
*/
private boolean isDbPropertiesVisible = true;
/**
* Constructor to use by a Wizard to create a new database connection.
*
@@ -429,34 +407,12 @@ public class DatabaseForm extends AbstractForm {
this.provider = ExtractMetaDataFromDataBase.getProviderByDbType(metadataconnection.getDbType());
}
setupForm(true);
refreshHidableArea();
addStringConnectionControls();
GridLayout layout2 = (GridLayout) getLayout();
layout2.marginHeight = 0;
setLayout(layout2);
}
/**
* refresh the hidable area when hide/visible the DbProperties group
*/
protected void refreshHidableArea() {
if (exportContextBtn != null) {
if (isDbPropertiesVisible) {
exportContextBtn.getControl().getParent().getParent().setParent(hidableArea);
} else {
exportContextBtn.getControl().getParent().getParent().setParent(dbConnectionArea);
}
}
moveButton.setVisible(isDbPropertiesVisible);
hidableArea.setVisible(isDbPropertiesVisible);
sash.setSashWidth(2);
sash.setWeights(new int[] { 21, 12 });
hidableArea.layout();
this.layout();
}
/**
* initialize UI (button save & default settings or saved settings).
*/
@@ -686,27 +642,10 @@ public class DatabaseForm extends AbstractForm {
protected void addFields() {
int width = getSize().x;
GridLayout layout2;
Composite parent = new Composite(this, SWT.NONE);
// FillLayout fillLayout = new FillLayout();
// fillLayout.marginHeight = 0;
// fillLayout.marginWidth = 0;
parent.setLayout(new FillLayout());
GridData parentGridData = new GridData(SWT.FILL, SWT.FILL, true, true);
parent.setLayoutData(parentGridData);
sash = new SashForm(parent, SWT.VERTICAL | SWT.SMOOTH);
// sash.setLayoutData(new GridData(GridData.FILL_BOTH));
sash.setBackground(parent.getDisplay().getSystemColor(SWT.COLOR_WHITE));
GridLayout layout = new GridLayout();
sash.setLayout(layout);
dbConnectionArea = new Composite(sash, SWT.NONE);
GridLayout dbConnAreaLayout = new GridLayout(1, false);
dbConnectionArea.setLayout(dbConnAreaLayout);
// The orginal high is 270.
// databaseSettingGroup = Form.createGroup(this, 1, Messages.getString("DatabaseForm.groupDatabaseSettings"), 450); //$NON-NLS-1$
//
databaseSettingGroup = new Group(dbConnectionArea, SWT.NONE);
databaseSettingGroup = new Group(this, SWT.NONE);
GridLayout gridLayout1 = new GridLayout(1, false);
databaseSettingGroup.setLayout(gridLayout1);
GridData gridData1 = new GridData(SWT.FILL, SWT.FILL, true, true);
@@ -1112,8 +1051,8 @@ public class DatabaseForm extends AbstractForm {
try {
MyURLClassLoader cl = new MyURLClassLoader(file.toURL());
Class[] classes = cl.getAssignableClasses(Driver.class);
for (Class classe : classes) {
driverClassTxt.add(classe.getName());
for (int i = 0; i < classes.length; ++i) {
driverClassTxt.add(classes[i].getName());
}
} catch (Exception ex) {
ExceptionHandler.process(ex);
@@ -1181,6 +1120,9 @@ public class DatabaseForm extends AbstractForm {
hideControl(browseDriverClassButton, false);
usernameTxt.show();
passwordTxt.show();
hideControl(useKeyTab, false);
principalTxt.show();
keytabTxt.show();
} else {
metastoreUrlTxt.hide();
driverJarTxt.hide();
@@ -1189,6 +1131,9 @@ public class DatabaseForm extends AbstractForm {
hideControl(browseDriverClassButton, true);
usernameTxt.hide();
passwordTxt.hide();
hideControl(useKeyTab, true);
principalTxt.hide();
keytabTxt.hide();
}
}
@@ -1794,45 +1739,38 @@ public class DatabaseForm extends AbstractForm {
*/
private void addCheckAndStandardButtons(int width, Composite compositeGroupDbSettings) {
GridLayout layout2 = null;
fileField.hide();
directoryField.hide();
Composite unionBtnsCompsite = new Composite(dbConnectionArea, SWT.NONE);
FormLayout formLayout = new FormLayout();
unionBtnsCompsite.setLayout(formLayout);
moveButton = new Button(unionBtnsCompsite, SWT.PUSH);
moveButton.setText(DOWN);
moveButton.setToolTipText(Messages.getString("DatabaseForm.hideContext")); //$NON-NLS-1$
addMoveButtonListener();
FormData moveButtonFormData = new FormData();
moveButtonFormData.right = new FormAttachment(100, 0);
moveButton.setLayoutData(moveButtonFormData);
// Button Check
Composite checkGroup = new Composite(unionBtnsCompsite, SWT.NONE);
// align moveButton with checkGroup
moveButtonFormData.top = new FormAttachment(checkGroup, 0, SWT.CENTER);
FormData checkGroupFormData = new FormData();
checkGroupFormData.left = new FormAttachment(0, 0);
checkGroupFormData.right = new FormAttachment(100, 0);
checkGroup.setLayoutData(checkGroupFormData);
// Group checkGroup = Form.createGroup(this, 1, "", 5);
Composite checkGroup = new Composite(this, SWT.NONE);
GridLayout gridLayout = new GridLayout(1, false);
checkGroup.setLayout(gridLayout);
GridData gridData23 = new GridData(SWT.FILL, SWT.FILL, true, true);
gridData23.minimumHeight = 2;
gridData23.heightHint = 2;
checkGroup.setLayoutData(gridData23);
Composite compositeCheckButton = Form.startNewGridLayout(checkGroup, 1, false, SWT.CENTER, SWT.BOTTOM);
layout2 = (GridLayout) compositeCheckButton.getLayout();
layout2.marginHeight = 0;
layout2.marginTop = 0;
layout2.marginBottom = 0;
layout2.marginLeft = 0;
layout2.marginRight = 0;
layout2.marginWidth = 0;
unionBtnsCompsite.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
GridData checkGridData = new GridData(GridData.FILL_HORIZONTAL);
checkGridData.minimumHeight = 5;
checkGroup.setLayoutData(checkGridData);
checkButton = new UtilsButton(compositeCheckButton, Messages.getString("DatabaseForm.check"), WIDTH_BUTTON_PIXEL, //$NON-NLS-1$
HEIGHT_BUTTON_PIXEL);
checkButton.setEnabled(false);
hidableArea = new Composite(sash, SWT.NONE);
GridLayout hidableAreaLayout = new GridLayout(1, false);
hidableArea.setLayout(hidableAreaLayout);
// Group Database Properties
Group group1 = Form.createGroup(hidableArea, 1, Messages.getString("DatabaseForm.groupDatabaseProperties")); //$NON-NLS-1$
Group group1 = Form.createGroup(this, 1, Messages.getString("DatabaseForm.groupDatabaseProperties")); //$NON-NLS-1$
GridData gridData = new GridData(GridData.FILL_HORIZONTAL);
// gridData.minimumHeight = 50;
gridData.heightHint = 80;
@@ -1868,6 +1806,8 @@ public class DatabaseForm extends AbstractForm {
GridLayout layout = new GridLayout(4, false);
layout.horizontalSpacing = 15;
layout.verticalSpacing = 0;
layout.marginHeight = 0;
layout.marginWidth = 0;
GridData layoutData = new GridData(GridData.FILL_HORIZONTAL);
layoutData.horizontalSpan = 4;
c.setLayoutData(layoutData);
@@ -1893,26 +1833,6 @@ public class DatabaseForm extends AbstractForm {
group1.setVisible(false);
}
}
isDbPropertiesVisible = group1.getVisible();
}
private void addMoveButtonListener() {
// TODO Auto-generated method stub
moveButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(final SelectionEvent e) {
if (moveButton.getText().equals(DOWN)) {
sash.setWeights(new int[] { 33, 0 });
moveButton.setToolTipText(Messages.getString("DatabaseForm.showContext")); //$NON-NLS-1$
moveButton.setText(UP);
} else if (moveButton.getText().equals(UP)) {
sash.setWeights(new int[] { 21, 12 });
moveButton.setToolTipText(Messages.getString("DatabaseForm.hideContext")); //$NON-NLS-1$
moveButton.setText(DOWN);
}
}
});
}
/**
@@ -3225,8 +3145,8 @@ public class DatabaseForm extends AbstractForm {
try {
MyURLClassLoader cl = new MyURLClassLoader(file.toURL());
Class[] classes = cl.getAssignableClasses(Driver.class);
for (Class classe : classes) {
generalJdbcClassNameText.add(classe.getName());
for (int i = 0; i < classes.length; ++i) {
generalJdbcClassNameText.add(classes[i].getName());
}
} catch (Exception ex) {
ExceptionHandler.process(ex);
@@ -4681,21 +4601,21 @@ public class DatabaseForm extends AbstractForm {
int hiveModeIndex = hiveModeCombo.getSelectionIndex();
int hiveServerIndex = hiveServerVersionCombo.getSelectionIndex();
// MOD msjian TDQ-6407 2012-11-26: for top, until now, not support embeded mode for hive
// if (isTOPStandaloneMode()) {
// getConnection().setURL(getStringConnection());
// handleUIWhenStandaloneModeSelected();
// } else {
boolean isEmbeddedMode = HiveConnUtils
.isEmbeddedMode(distributionIndex, hiveVersionIndex, hiveModeIndex, hiveServerIndex);
getConnection().setURL(getStringConnection());
if (isEmbeddedMode) {
// handleEmbeddedMode();
handleUIWhenEmbeddedModeSelected();
} else {
// handleStandaloneMode();
if (isTOPStandaloneMode()) {
getConnection().setURL(getStringConnection());
handleUIWhenStandaloneModeSelected();
} else {
boolean isEmbeddedMode = HiveConnUtils.isEmbeddedMode(distributionIndex, hiveVersionIndex, hiveModeIndex,
hiveServerIndex);
getConnection().setURL(getStringConnection());
if (isEmbeddedMode) {
// handleEmbeddedMode();
handleUIWhenEmbeddedModeSelected();
} else {
// handleStandaloneMode();
handleUIWhenStandaloneModeSelected();
}
}
// }
// TDQ-6407~
doUpdateConnection();

View File

@@ -413,11 +413,8 @@ public class DatabaseWizard extends CheckLastVersionRepositoryWizard implements
String driverClass = ExtractMetaDataUtils.getInstance().getDriverClassByDbType(connection.getDatabaseType());
// feature TDI-22108
if (EDatabaseTypeName.VERTICA.equals(dbType)
&& (EDatabaseVersion4Drivers.VERTICA_6.getVersionValue().equals(connection.getDbVersionString())
|| EDatabaseVersion4Drivers.VERTICA_5_1.getVersionValue().equals(connection.getDbVersionString())
|| EDatabaseVersion4Drivers.VERTICA_6_1_X.getVersionValue().equals(
connection.getDbVersionString()) || EDatabaseVersion4Drivers.VERTICA_7.getVersionValue()
.equals(connection.getDbVersionString()))) {
&& (EDatabaseVersion4Drivers.VERTICA_6.getVersionValue().equals(connection.getDbVersionString()) || EDatabaseVersion4Drivers.VERTICA_5_1
.getVersionValue().equals(connection.getDbVersionString()))) {
driverClass = EDatabase4DriverClassName.VERTICA2.getDriverClass();
}
((DatabaseConnection) connectionItem.getConnection()).setDriverClass(driverClass);

View File

@@ -52,7 +52,6 @@ import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.dialogs.SearchPattern;
import org.talend.commons.ui.runtime.exception.ExceptionHandler;
import org.talend.commons.ui.runtime.exception.ExceptionMessageDialog;
import org.talend.commons.ui.swt.dialogs.ErrorDialogWidthDetailArea;
import org.talend.commons.ui.swt.formtools.Form;
import org.talend.commons.ui.swt.formtools.UtilsButton;
@@ -330,7 +329,7 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
// Button Create Table
String displayStr = Messages.getString("SelectorTableForm.selectAllTables"); //$NON-NLS-1$
Point buttonSize = gc.stringExtent(displayStr);
selectAllTablesButton = new UtilsButton(compositeRetreiveSchemaButton, displayStr, buttonSize.x + 12, HEIGHT_BUTTON_PIXEL);
selectAllTablesButton = new UtilsButton(compositeRetreiveSchemaButton, displayStr, buttonSize.x + 12, HEIGHT_BUTTON_PIXEL); //$NON-NLS-1$
displayStr = Messages.getString("SelectorTableForm.selectNoneTables"); //$NON-NLS-1$
buttonSize = gc.stringExtent(displayStr);
@@ -417,7 +416,6 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
private final Comparator strComparator = new Comparator() {
@Override
public int compare(Object arg0, Object arg1) {
TableItem t1 = (TableItem) arg0;
@@ -515,8 +513,8 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
updateStatus(IStatus.ERROR, null);
TableItem[] tableItems = table.getItems();
int size = tableItems.length;
for (TableItem tableItem2 : tableItems) {
TableItem tableItem = tableItem2;
for (int i = 0; i < tableItems.length; i++) {
TableItem tableItem = tableItems[i];
if (!tableItem.getChecked()) {
tableItem.setText(3, Messages.getString("SelectorTableForm.Pending")); //$NON-NLS-1$
countPending++;
@@ -541,7 +539,8 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
countSuccess = 0;
countPending = 0;
TableItem[] tableItems = table.getItems();
for (TableItem tableItem : tableItems) {
for (int i = 0; i < tableItems.length; i++) {
TableItem tableItem = tableItems[i];
if (tableItem.getChecked()) {
clearTableItem(tableItem);
tableItem.setChecked(false);
@@ -612,7 +611,6 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
}
parentWizardPage.getWizard().getContainer().run(true, true, new IRunnableWithProgress() {
@Override
public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
monitor.beginTask(Messages.getString("CreateTableAction.action.createTitle"), IProgressMonitor.UNKNOWN); //$NON-NLS-1$
@@ -621,36 +619,24 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
String proxy = null;
oldTemConnection = getOriginalValueConnection();
if (oldTemConnection.isUseProxy()) {
proxy = SalesforceModuleParseAPI.USE_SOCKS_PROXY;
proxy = SalesforceModuleParseAPI.USE_SOCKS_PROXY;//$NON-NLS-1$
} else if (oldTemConnection.isUseHttpProxy()) {
proxy = SalesforceModuleParseAPI.USE_HTTP_PROXY;
proxy = SalesforceModuleParseAPI.USE_HTTP_PROXY;//$NON-NLS-1$
}
try {
itemTableName = connectFromCustomModuleName(proxy);
if (itemTableName.size() <= 0) {
// connection is done but any table exist
if (displayMessageBox) {
openInfoDialogInUIThread(getShell(),
Messages.getString("DatabaseTableForm.checkConnection"), Messages //$NON-NLS-1$
.getString("DatabaseTableForm.tableNoExist"), true);//$NON-NLS-1$
}
} else {
createAllItems(displayMessageBox, null);
itemTableName = connectFromCustomModuleName(proxy);
if (itemTableName.size() <= 0) {
// connection is done but any table exist
if (displayMessageBox) {
openInfoDialogInUIThread(getShell(),
Messages.getString("DatabaseTableForm.checkConnection"), Messages //$NON-NLS-1$
.getString("DatabaseTableForm.tableNoExist"), true);//$NON-NLS-1$
}
} catch (final Exception ex) {
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
ExceptionMessageDialog.openError(
getShell(),
Messages.getString("SeletorModuleForm.connectFromCustomModuleName.errorTitle"), ex.getMessage(), ex); //$NON-NLS-1$
}
});
} finally {
monitor.done();
} else {
createAllItems(displayMessageBox, null);
}
monitor.done();
}
});
} catch (Exception e) {
@@ -668,7 +654,6 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
private void createAllItems(final boolean displayMessageBox, final List<String> newList) {
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
List<String> list = new ArrayList<String>();
if (newList != null) {
@@ -708,21 +693,16 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
});
}
public static void openInfoDialogInUIThread(Shell shell, final String title, final String msg, boolean ifUseRunnable) {
public static void openInfoDialogInUIThread(final Shell shell, final String title, final String msg, boolean ifUseRunnable) {
if (ifUseRunnable) {
Display.getDefault().asyncExec(new Runnable() {
shell.getDisplay().asyncExec(new Runnable() {
@Override
public void run() {
MessageDialog.openInformation(new Shell(), title, msg);
MessageDialog.openInformation(shell, title, msg);
}
});
} else {
Shell iShell = shell;
if (iShell == null) {
iShell = new Shell();
}
MessageDialog.openInformation(iShell, title, msg);
MessageDialog.openInformation(shell, title, msg);
}
}
@@ -893,8 +873,8 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
if (runnable != null) {
return runnable;
}
for (Object element2 : getQueue()) {
RetrieveColumnRunnable element = (RetrieveColumnRunnable) element2;
for (Iterator iter = getQueue().iterator(); iter.hasNext();) {
RetrieveColumnRunnable element = (RetrieveColumnRunnable) iter.next();
if (element.getTableItem() == key) {
return element;
}
@@ -958,7 +938,6 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
getConnection().setModuleName(tableString);
}
@Override
public void run() {
if (isCanceled()) {
return;
@@ -1063,7 +1042,6 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
//
Display.getDefault().syncExec(new Runnable() {
@Override
public void run() {
if (isCanceled()) {
return;
@@ -1184,11 +1162,7 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
protected void addFieldsListeners() {
nameFilter.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
if (itemTableName == null) {
return;
}
List<String> newList = new ArrayList<String>();
String pattern = nameFilter.getText();
@@ -1344,7 +1318,6 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
}
}
@Override
protected SalesforceSchemaConnection getConnection() {
if (oldTemConnection != null) {
return oldTemConnection;
@@ -1420,7 +1393,7 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
return this.itemTableName;
}
public List<String> connectFromCustomModuleName(String proxy) throws Exception {
public List<String> connectFromCustomModuleName(String proxy) {
preparModuleInit();
SalesforceModuleParseAPI salesforceAPI = new SalesforceModuleParseAPI();
String[] types = null;
@@ -1439,82 +1412,89 @@ public class SelectorModulesForm extends AbstractSalesforceStepForm {
httpProxy = true;
}
}
if (loginType.equalsIgnoreCase(BASIC)) {
salesforceAPI.resetAllProxy();
salesforceAPI.setProxy(proxyHost, proxyPort, proxyUsername, proxyPassword, httpProxy, socksProxy, httpsProxy);
salesforceAPI.login(endPoint, username, pwd, timeOut);
ISalesforceModuleParser currentAPI = salesforceAPI.getCurrentAPI();
if (currentAPI instanceof SalesforceModuleParseEnterprise) {
describeGlobalResult = describeGlobal();
if (describeGlobalResult != null) {
types = describeGlobalResult.getTypes();
try {
if (loginType.equalsIgnoreCase(BASIC)) {
salesforceAPI.resetAllProxy();
salesforceAPI.setProxy(proxyHost, proxyPort, proxyUsername, proxyPassword, httpProxy, socksProxy, httpsProxy);
salesforceAPI.login(endPoint, username, pwd, timeOut);
ISalesforceModuleParser currentAPI = salesforceAPI.getCurrentAPI();
if (currentAPI instanceof SalesforceModuleParseEnterprise) {
describeGlobalResult = describeGlobal();
if (describeGlobalResult != null) {
types = describeGlobalResult.getTypes();
}
} else {
// for bug 17280 use new jar axis2 for salesforce component and wizard.
if (currentAPI instanceof SalesforceModuleParserPartner) {
SalesforceModuleParserPartner partner = (SalesforceModuleParserPartner) currentAPI;
SforceManagementImpl sforceManagement = partner.getSforceManagement();
SessionHeader sessionHeader = sforceManagement.getSessionHeader();
DescribeGlobal dg = new DescribeGlobal();
com.salesforce.soap.partner.DescribeGlobalResult dgr = sforceManagement.getStub()
.describeGlobal(dg, sessionHeader, null, null).getResult();
dgsrs = dgr.getSobjects();
}
}
} else {
// for bug 17280 use new jar axis2 for salesforce component and wizard.
if (currentAPI instanceof SalesforceModuleParserPartner) {
SalesforceModuleParserPartner partner = (SalesforceModuleParserPartner) currentAPI;
SforceManagementImpl sforceManagement = partner.getSforceManagement();
SessionHeader sessionHeader = sforceManagement.getSessionHeader();
salesforceAPI.resetAllProxy();
salesforceAPI.setProxy(proxyHost, proxyPort, proxyUsername, proxyPassword, httpProxy, socksProxy, httpsProxy);
Token token = salesforceAPI.login(endPointForAuth, consumerKey, consumeSecret, callbackHost, callbackPort,
salesforceVersion, tokenProperties, timeOut);
if (token != null) {
org.talend.salesforce.SforceManagement sfMgr = new org.talend.salesforce.SforceManagementImpl();
OAuthClient client = new OAuthClient();
client.setBaseOAuthURL(endPointForAuth);
client.setCallbackHost(callbackHost);
client.setCallbackPort(Integer.parseInt(callbackPort));
client.setClientID(consumerKey);
client.setClientSecret(consumeSecret);
String endpoint = client.getSOAPEndpoint(token, salesforceVersion);
boolean result = sfMgr.login(token.getAccess_token(), endpoint, Integer.parseInt(timeOut), false);
SessionHeader sessionHeader = sfMgr.getSessionHeader();
DescribeGlobal dg = new DescribeGlobal();
com.salesforce.soap.partner.DescribeGlobalResult dgr = sforceManagement.getStub()
com.salesforce.soap.partner.DescribeGlobalResult dgr = sfMgr.getStub()
.describeGlobal(dg, sessionHeader, null, null).getResult();
dgsrs = dgr.getSobjects();
}
}
} else {
salesforceAPI.resetAllProxy();
salesforceAPI.setProxy(proxyHost, proxyPort, proxyUsername, proxyPassword, httpProxy, socksProxy, httpsProxy);
Token token = salesforceAPI.login(endPointForAuth, consumerKey, consumeSecret, callbackHost, callbackPort,
salesforceVersion, tokenProperties, timeOut);
if (token != null) {
org.talend.salesforce.SforceManagement sfMgr = new org.talend.salesforce.SforceManagementImpl();
OAuthClient client = new OAuthClient();
client.setBaseOAuthURL(endPointForAuth);
client.setCallbackHost(callbackHost);
client.setCallbackPort(Integer.parseInt(callbackPort));
client.setClientID(consumerKey);
client.setClientSecret(consumeSecret);
String endpoint = client.getSOAPEndpoint(token, salesforceVersion);
boolean result = sfMgr.login(token.getAccess_token(), endpoint, Integer.parseInt(timeOut), false);
SessionHeader sessionHeader = sfMgr.getSessionHeader();
DescribeGlobal dg = new DescribeGlobal();
com.salesforce.soap.partner.DescribeGlobalResult dgr = sfMgr.getStub()
.describeGlobal(dg, sessionHeader, null, null).getResult();
dgsrs = dgr.getSobjects();
}
}
INode node = getSalesforceNode();
salesforceAPI.resetAllProxy();
INode node = getSalesforceNode();
List list = new ArrayList();
List list = new ArrayList();
IElementParameter modulesNameParam = node.getElementParameter("MODULENAME"); //$NON-NLS-1$
Object[] modulename = modulesNameParam.getListItemsValue();
if (modulename != null && modulename.length > 1) {
for (int i = 0; i < modulename.length - 1; i++) {
list.add(i, modulename[i]);
}
}
if (types != null && types.length > 0) {
for (int j = 0; j < types.length; j++) {
if (!list.contains(types[j])) {
list.add(types[j]);
IElementParameter modulesNameParam = node.getElementParameter("MODULENAME"); //$NON-NLS-1$
Object[] modulename = modulesNameParam.getListItemsValue();
if (modulename != null && modulename.length > 1) {
for (int i = 0; i < modulename.length - 1; i++) {
list.add(i, modulename[i]);
}
}
}
if (dgsrs != null && dgsrs.length > 0) {
for (DescribeGlobalSObjectResult dsResult : dgsrs) {
String name = dsResult.getName();
if (!list.contains(name)) {
list.add(name);
if (types != null && types.length > 0) {
for (int j = 0; j < types.length; j++) {
if (!list.contains(types[j])) {
list.add(types[j]);
}
}
}
if (dgsrs != null && dgsrs.length > 0) {
for (int k = 0; k < dgsrs.length; k++) {
DescribeGlobalSObjectResult dsResult = dgsrs[k];
String name = dsResult.getName();
if (!list.contains(name)) {
list.add(name);
}
}
}
// createAllItems(false, list);
return list;
} catch (Exception ex) {
ExceptionHandler.process(ex);
return null;
}
// createAllItems(false, list);
return list;
}

View File

@@ -25,7 +25,6 @@ import org.eclipse.swt.widgets.Control;
import org.eclipse.ui.navigator.CommonNavigator;
import org.eclipse.ui.navigator.CommonViewer;
import org.talend.core.model.general.Project;
import org.talend.core.model.properties.Item;
import org.talend.repository.model.ProjectRepositoryNode;
import org.talend.repository.model.RepositoryNode;
import org.talend.repository.model.nodes.IProjectRepositoryNode;
@@ -157,10 +156,6 @@ public abstract class FolderListenerSingleTopContentProvider extends SingleTopLe
return workspaceRelativePath;
}
protected boolean isLinkedTopNode(RepositoryNode topLevelNode, Item item) {
return false;
}
/**
* DOC sgandon Comment method "getTopLevelNodeProjectRelativePath".
*

View File

@@ -255,8 +255,7 @@ public abstract class ProjectRepoAbstractContentProvider extends FolderListenerS
for (final RepositoryNode repoNode : topLevelNodes) {
IPath workspaceTopNodePath = getWorkspaceTopNodePath(repoNode);
if ((workspaceTopNodePath != null && workspaceTopNodePath.isPrefixOf(itemPath))
|| isLinkedTopNode(repoNode, item)) {
if (workspaceTopNodePath != null && workspaceTopNodePath.isPrefixOf(itemPath)) {
Display.getDefault().asyncExec(new Runnable() {
@Override

View File

@@ -12,7 +12,7 @@
<parent>
<groupId>org.talend</groupId>
<artifactId>org.talend.tos</artifactId>
<version>5.5.1</version>
<version>5.4.2</version>
</parent>
<dependencies>

View File

@@ -184,8 +184,7 @@ public final class ConnectionUtils {
}
/**
* add ";shutdown=true" to the end of the hsql url when the url don't contain it. this is only used for hsql
* database.(ConnectionUtils.isHsql(url))
* add ";shutdown=true" to the end of the hsql url when the url don't contain it.
*
* @param url
* @param AdditionalParams
@@ -193,7 +192,8 @@ public final class ConnectionUtils {
*/
public static String addShutDownForHSQLUrl(String url, String AdditionalParams) {
String dbUrl = url;
if (AdditionalParams.indexOf(SHUTDOWN_PARAM) == -1) {
boolean isHSQL = ConnectionUtils.isHsql(dbUrl);
if (isHSQL && AdditionalParams.indexOf(SHUTDOWN_PARAM) == -1) {
dbUrl = dbUrl + SHUTDOWN_PARAM;
}
return dbUrl;
@@ -225,10 +225,6 @@ public final class ConnectionUtils {
return url != null && url.startsWith("jdbc:teradata"); //$NON-NLS-1$
}
public static boolean isVertica(String url) {
return url != null && url.startsWith("jdbc:vertica"); //$NON-NLS-1$
}
/**
* Method "isValid".
*
@@ -382,32 +378,6 @@ public final class ConnectionUtils {
return result;
}
public static boolean isOracleForSid(DatabaseMetaData metadata, String oracleProduct) throws SQLException {
if (metadata != null && metadata.getDatabaseProductName() != null
&& metadata.getDatabaseProductName().indexOf(oracleProduct) > -1) {
return true;
}
return false;
}
/**
*
* DOC Comment method "isExasol".
*
* @param metadata
* @return
* @throws SQLException
*/
public static boolean isExasol(DatabaseMetaData metadata) throws SQLException {
if (metadata != null && metadata.getDriverName() != null
&& metadata.getDriverName().toLowerCase().startsWith("exasol") //$NON-NLS-1$
&& metadata.getDatabaseProductName() != null
&& metadata.getDatabaseProductName().toLowerCase().startsWith("exasol")) { //$NON-NLS-1$
return true;
}
return false;
}
/**
* yyi 2010-08-25 for 14851, Sybase DB has several names with different productions and versions. For example the
* Sybase IQ with version 12.6 is called 'Sybase' getting by JDBC but the version 15+ it is changed to 'Sybase IQ'.

View File

@@ -11,8 +11,8 @@
<artifactId>org.talend.tis-shared</artifactId>
=======
<artifactId>org.talend.tos</artifactId>
>>>>>>> release/5.5.1/tos
<version>5.5.1</version>
>>>>>>> release/5.4.2/tos
<version>5.4.2</version>
<packaging>pom</packaging>
@@ -26,7 +26,7 @@
<module>org.talend.remote.jobserver.commons</module>
=======
<module>org.talend.utils</module>
>>>>>>> release/5.5.1/tos
>>>>>>> release/5.4.2/tos
</modules>
<distributionManagement>
@@ -49,7 +49,7 @@
<id>tos-snapshots</id>
<name>TOS snapshots Repository</name>
<url>${tos.snapshots.repo.url}</url>
>>>>>>> release/5.5.1/tos
>>>>>>> release/5.4.2/tos
</snapshotRepository>
</distributionManagement>