Compare commits

...

41 Commits

Author SHA1 Message Date
chmyga
a6810bf733 chore(TDI-46267): bump connectors to 1.23.0-SNAPSHOT (#4364)
Co-authored-by: Dmytro Chmyga <dmytro.chmyga@globallogic.com>
2021-06-09 14:13:34 +03:00
Xilai Dai
84f6219d5f APPINT-32986 update the pom_project_template.xml (#4361) 2021-06-09 18:23:07 +08:00
Chao MENG
42f187c8e2 fix(TUP-31796): Pictures in welcom page are missing (#4360)
https://jira.talendforge.org/browse/TUP-31796
2021-06-09 16:31:30 +08:00
sbliu
1560e256dc feat(TUP-30489) add progress bar to indicate install patch. (#4347) 2021-06-08 14:46:03 +08:00
sbliu
59207f8232 fix(TUP-31252) Improve the data collector for AMC. (#4350) 2021-06-08 14:25:49 +08:00
Xilai Dai
82966f5ba8 chore(APPINT-32986) upgrade commons-io to 2.8.0 (#4345) 2021-06-07 18:01:47 +08:00
bhe-talendbj
03cdc22509 fix(TUP-31729): add missing com.fasterxml.jackson:jackon-bom:2.10.0:pom and com.fasterxml.jackson:oss-parent:38:pom (#4348) 2021-06-07 17:26:03 +08:00
Emmanuel GALLOIS
43fc091be8 feat(TDI-46215): bump component-runtime to 1.33.1 (#4338) 2021-06-07 09:44:44 +02:00
bhe-talendbj
4c41c51472 fix(TUP-31580): execlude other dependencies (#4352) 2021-06-07 15:11:58 +08:00
bhe-talendbj
c226736a04 fix(TUP-31580): replace javascriptengine (#4334) (#4344)
* fix(TUP-31580): replace javascriptengine

* fix(TUP-31580): fix test case

* fix(TUP-31580): fix test case
2021-06-04 09:58:22 +08:00
sbliu
79da2594c0 feat(TUP-30834) remove extra tag. (#4343) 2021-06-03 12:20:59 +08:00
sbliu
13f057c49e feat(TUP-30834) fix unit test failure introduced by api change. (#4341) 2021-06-03 10:55:59 +08:00
sbliu
32e8d6357d feat(TUP-30834) add 2 axis2 module to classpath and export package. (#4317)
add 2 axis2 module to classpath and export package.
replace axis1 with axis2 for S56MDMConnetionHelper
remove salesforce code&dependency from tup, adpt the 'create salesforce connection' toolbar action to generic salesforce connection creation action.
2021-06-03 10:34:04 +08:00
zshen-talend
b267b0dc76 fix(TDQ-19437): tDqReportRun failed when dbconneciton is context mode (#4316) 2021-06-02 07:27:40 +00:00
bhe-talendbj
b51103d01b chore(TUP-31095): Use prepareStatement instead of plain sql execution (#4248) (#4331)
* fix(TUP-31095): initial impl

* fix(TUP-31095): initial impl

* chore(TUP-31095): use prepared statement

* chore(TUP-31095): fix param index

* fix(TUP-31095): fix parameter index
2021-06-01 15:13:32 +08:00
jiezhang-tlnd
ccdbc33869 fix(TUP-31527)CVE lucene-queries (#4295)
* fix(TUP-31527)CVE lucene-queries

* CVE: lucene-queries:4.10.4

* CVE: lucene-queries:4.10.4

* remove plugin org.talend.libraries.apache.lucene4

* remove plugin org.talend.libraries.apache.lucene4

* remove org.talend.libraries.apache.lucene4
2021-05-31 17:14:09 +08:00
hcyi
8560039492 fix(TUP-31553):Hadoop Metadata Wizard when using custom distro dialog (#4307)
box doesnt pop up to import dependencies.
2021-05-28 14:37:21 +08:00
Jane Ding
5fbdc38ebf fix(TUP-31316):Error connecting to Azure SQL database with Azure Active (#4271) (#4319)
* fix(TUP-31316):Error connecting to Azure SQL database with Azure Active
directory method
https://jira.talendforge.org/browse/TUP-31316

* fix(TUP-31316):Error connecting to Azure SQL database with Azure Active
directory method
https://jira.talendforge.org/browse/TUP-31316
2021-05-26 17:46:49 +08:00
zyuan-talend
93be98ff85 feat(TUP-30343):have the "Export Dependencies" option checked by default (#4312) (#4314) 2021-05-26 14:32:56 +08:00
bhe-talendbj
b5f39fba54 fix(TUP-31380): port (#4306) 2021-05-26 10:56:49 +08:00
bhe-talendbj
e18b7dfbf5 fix(TUP-31473): add missing parent poms (#4298) 2021-05-26 10:55:57 +08:00
Jane Ding
280be6425f fix(TUP-30849):Improve build Job performance (#4310)
https://jira.talendforge.org/browse/TUP-30849
2021-05-25 18:43:43 +08:00
Jane Ding
ef22ca920f feat(TUP-31117):Improve performances related to recursive jobs (#4292)
* feat(TUP-31117):Improve performances related to recursive jobs
https://jira.talendforge.org/browse/TUP-31117

* feat(TUP-31117):Improve performances related to recursive jobs
https://jira.talendforge.org/browse/TUP-31117

* feat(TUP-31117):Improve performances related to recursive jobs
https://jira.talendforge.org/browse/TUP-31117
2021-05-25 17:43:00 +08:00
jiezhang-tlnd
7b1d49ae69 fix(TUP-31164)Guess schema button on the informix tDBinput component (#4308)
returns zero length datatype
https://jira.talendforge.org/browse/TUP-31164
2021-05-25 15:21:41 +08:00
jiezhang-tlnd
03de72105c chore(TUP-30461)CVE plexus-utils (#4272)
* chore(TUP-30461)CVE plexus-utils

* remove plexus-utils-3.0.17

* remove from classpatch
2021-05-24 15:53:00 +08:00
hcyi
c1398000c1 feat(TUP-30619):Missing Additional parameters in SAP Connection. (#4285)
* feat(TUP-30619):Missing Additional parameters in SAP Connection.

* feat(TUP-30619):update key for Additional parameters in SAP Connection.
2021-05-24 09:52:01 +08:00
kjwang
454d6ecd84 Kjwang/fix tup 31227 studio populate unstaged changes (#4277) (#4302)
TUP-31227 Studio populate UnstagedChanges for Global Routines and SQL
Templates when opening a new Feature Branch
https://jira.talendforge.org/browse/TUP-31227
2021-05-21 14:27:41 +08:00
Chao MENG
7b5f7b3935 feat(TUP-30475): studio lite (#4299)
https://jira.talendforge.org/browse/TUP-30475

Add missing org.talend.migrationTool.nl
2021-05-19 15:33:33 +02:00
Chao MENG
0a1ebc33ac feat(TUP-30475): studio lite (#4286)
https://jira.talendforge.org/browse/TUP-30475
2021-05-18 14:43:36 +08:00
vdrokov
5271d9bdb8 APPINT-32987: Fix dublicate variable (#4279) 2021-05-13 12:08:56 +03:00
sbliu
7b484be397 fix(TUP-31060): store real routine id in case recreated routine with same name. 2021-05-13 10:57:32 +08:00
Jane Ding
895635738f chore(APPINT-32936):CVE:Upgrade commons-codec-1.11 to 1.15 (#4260)
https://jira.talendforge.org/browse/APPINT-32936
2021-05-12 13:59:45 +08:00
hzhao-talendbj
e53ee0fe2e chore(TUP-30255): Upgrade httpclient-4.5.5,4.5.7 to 4.5.13 (#4237) 2021-05-10 18:13:47 +08:00
Denis Sergent
562018e483 Revert "APPINT-32905: Issue with Rest service flow (#4264)" (#4275)
This reverts commit 5f1c19871c.
2021-05-10 11:47:43 +02:00
Jane Ding
03f01de063 fix(TUP-31237):Invalid username or password when creating a Snowflake (#4232) (#4273)
* fix(TUP-31237):Invalid username or password when creating a Snowflake
Metadata Connection with a Snowflake password that has a slash character
https://jira.talendforge.org/browse/TUP-31237

* fix(TUP-31237):Invalid username or password when creating a Snowflake
Metadata Connection with a Snowflake password that has a slash character
https://jira.talendforge.org/browse/TUP-31237
2021-05-10 16:55:54 +08:00
Jane Ding
123a1934e2 chore(TUP-31030):Update CXF to 3.3.10 (#4239)
* chore(TUP-31030):Update CXF to 3.3.10
https://jira.talendforge.org/browse/TUP-31030

* chore(TUP-31030):Update CXF to 3.3.10
https://jira.talendforge.org/browse/TUP-31030
2021-05-10 09:46:42 +08:00
mbasiuk-talend
a38c309608 chore(TDI-46032): bump connectors version to 1.22.0-SNAPSHOT (#4270) 2021-05-08 17:09:13 +08:00
hzhao-talendbj
0eddc3b1b9 fix(TUP-31248):impala metadata connection issues for Cloudera (#4246) (#4252)
* fix(TUP-31248):impala metadata connection issues for Cloudera

* TUP-31248 revert mistake remove

* TUP-31248 remove some useless code

* TUP-31248 add connection info to log when connection failed
2021-05-07 15:46:08 +08:00
kjwang
afe5d2e74b Feat:TUP-30377 Move the "Allow specific characters (UTF8,...)" (#4238) (#4266)
Feat:TUP-30377 Move the "Allow specific characters (UTF8,...) preference
setting to project setting.
https://jira.talendforge.org/browse/TUP-30377
2021-05-06 17:22:35 +08:00
jiezhang-tlnd
c3277931c1 fix(TUP-30451)CVE: (#4256)
maven-shared-utils-3.0.0.jar,maven-shared-utils-3.0.1.jar,maven-shared-utils-3.1.0.jar
2021-05-06 11:01:46 +08:00
vdrokov
5f1c19871c APPINT-32905: Issue with Rest service flow (#4264) 2021-05-05 17:28:43 +03:00
144 changed files with 1396 additions and 11372 deletions

View File

@@ -62,6 +62,7 @@
<plugin id="org.talend.metadata.managment.ui" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.metadata.managment.ui.nl" download-size="0" install-size="0" version="0.0.0" fragment="true" unpack="false"/>
<plugin id="org.talend.migrationTool" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.migrationTool.nl" download-size="0" install-size="0" version="0.0.0" fragment="true" unpack="false"/>
<plugin id="org.talend.model" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.model.edit" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.model.edit.nl" download-size="0" install-size="0" version="0.0.0" fragment="true" unpack="false"/>

View File

@@ -57,11 +57,4 @@
version="0.0.0"
unpack="true"/>
<plugin
id="org.talend.libraries.apache.lucene4"
download-size="0"
install-size="0"
version="0.0.0"
unpack="true"/>
</feature>

View File

@@ -0,0 +1,56 @@
package org.talend.commons.runtime.service;
import java.util.Collection;
import java.util.Collections;
import java.util.Properties;
import org.osgi.framework.BundleContext;
import org.osgi.framework.FrameworkUtil;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceReference;
import org.talend.commons.exception.CommonExceptionHandler;
public interface ICollectDataService {
final String KEY_SOURCE = "source";
//
final String AMC_FILE_TYPE_USED = "FILE_TYPE_USED";
final String AMC_DATABASE_TYPE_USED = "DATABASE_TYPE_USED";
final String AMC_PREVIEW_KEY = "amc.datasource";
final String AMC_PREVIEW_FILEVALUE = "File";
final String AMC_PREVIEW_DATABASEVALUE = "Database";
/**
* @return json string
*/
String getCollectedDataJSON();
Properties getCollectedData();
public static ICollectDataService getInstance(String from) throws Exception {
BundleContext bc = FrameworkUtil.getBundle(ICollectDataService.class).getBundleContext();
Collection<ServiceReference<ICollectDataService>> tacokitServices = Collections.emptyList();
try {
tacokitServices = bc.getServiceReferences(ICollectDataService.class, null);
} catch (InvalidSyntaxException e) {
CommonExceptionHandler.process(e);
}
if (tacokitServices != null) {
for (ServiceReference<ICollectDataService> sr : tacokitServices) {
if (from == null || from.equals(sr.getProperty(KEY_SOURCE))) {
ICollectDataService tacokitService = bc.getService(sr);
if (tacokitService != null) {
return tacokitService;
}
}
}
}
return null;
}
}

View File

@@ -12,6 +12,8 @@
// ============================================================================
package org.talend.commons.runtime.service;
import org.eclipse.core.runtime.IProgressMonitor;
/**
* DOC ggu class global comment. Detailled comment
*/
@@ -19,6 +21,10 @@ public interface P2InstallComponent {
boolean install();
default boolean install(IProgressMonitor monitor) {
return false;
}
boolean needRelaunch();
String getInstalledMessages();

View File

@@ -16,7 +16,6 @@ import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
@@ -109,11 +108,11 @@ public class DB2ForZosDataBaseMetadata extends PackageFakeDatabaseMetadata {
// MOD yyin 2012-05-15 TDQ-5190
String sql = "SELECT DISTINCT CREATOR FROM SYSIBM.SYSTABLES"; //$NON-NLS-1$
ResultSet rs = null;
Statement stmt = null;
PreparedStatement stmt = null;
List<String[]> list = new ArrayList<String[]>();
try {
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
stmt = connection.prepareStatement(sql);
rs = stmt.executeQuery();
while (rs.next()) {
String creator = rs.getString("CREATOR"); //$NON-NLS-1$
@@ -331,18 +330,20 @@ public class DB2ForZosDataBaseMetadata extends PackageFakeDatabaseMetadata {
public ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern)
throws SQLException {
// for real
String sql = "SELECT * FROM SYSIBM.SYSCOLUMNS where TBNAME='" + tableNamePattern + "' AND TBCREATOR = '" //$NON-NLS-1$ //$NON-NLS-2$
+ schemaPattern + "' ORDER BY TBCREATOR, TBNAME, COLNO"; //$NON-NLS-1$
String sql = "SELECT * FROM SYSIBM.SYSCOLUMNS where TBNAME=? AND TBCREATOR = ? ORDER BY TBCREATOR, TBNAME, COLNO"; //$NON-NLS-1$
// for test
// String sql = "SELECT * FROM SYSIBM.SYSCOLUMNS where NAME='NAME'";
ResultSet rs = null;
Statement stmt = null;
PreparedStatement stmt = null;
List<String[]> list = new ArrayList<String[]>();
try {
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
stmt = connection.prepareStatement(sql);
stmt.setString(1, tableNamePattern);
stmt.setString(2, schemaPattern);
rs = stmt.executeQuery();
while (rs.next()) {
// For real db2 for zos, should use these code.

View File

@@ -25,7 +25,6 @@ public class JtdsDatabaseMetadata extends PackageFakeDatabaseMetadata {
@Override
public ResultSet getSchemas() throws SQLException {
java.sql.Statement statement = connection.createStatement();
String sql;
if (((PackageFakeDatabaseMetadata) connection).getDatabaseMajorVersion() >= 9) {
sql = JDBC3 ? "SELECT name AS TABLE_SCHEM, NULL as TABLE_CATALOG FROM " + connection.getCatalog() + ".sys.schemas"
@@ -36,6 +35,7 @@ public class JtdsDatabaseMetadata extends PackageFakeDatabaseMetadata {
}
sql += " ORDER BY TABLE_SCHEM";
return statement.executeQuery(sql);
java.sql.PreparedStatement statement = connection.prepareStatement(sql);
return statement.executeQuery();
}
}

View File

@@ -13,9 +13,9 @@
package org.talend.commons.utils.database;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
@@ -152,15 +152,19 @@ public class SAPHanaDataBaseMetadata extends FakeDatabaseMetaData {
// check if the type is contained is in the types needed.
String sqlcv = "SELECT OBJECT_NAME,PACKAGE_ID FROM _SYS_REPO.ACTIVE_OBJECT WHERE OBJECT_SUFFIX = 'calculationview'"; //$NON-NLS-1$
if (tableNamePattern != null && !tableNamePattern.equals("%")) { //$NON-NLS-1$
sqlcv += " AND (OBJECT_NAME LIKE '" + tableNamePattern + "'"; //$NON-NLS-1$ //$NON-NLS-2$
sqlcv += " OR PACKAGE_ID LIKE '" + tableNamePattern + "')"; //$NON-NLS-1$ //$NON-NLS-2$
sqlcv += " AND (OBJECT_NAME LIKE ?"; //$NON-NLS-1$ //$NON-NLS-2$
sqlcv += " OR PACKAGE_ID LIKE ? )"; //$NON-NLS-1$ //$NON-NLS-2$
}
ResultSet rscv = null;
Statement stmtcv = null;
PreparedStatement stmtcv = null;
List<String[]> listcv = new ArrayList<String[]>();
try {
stmtcv = connection.createStatement();
rscv = stmtcv.executeQuery(sqlcv);
stmtcv = connection.prepareStatement(sqlcv);
if (tableNamePattern != null && !tableNamePattern.equals("%")) {
stmtcv.setString(1, tableNamePattern);
stmtcv.setString(2, tableNamePattern);
}
rscv = stmtcv.executeQuery();
while (rscv.next()) {
String objectName = rscv.getString("OBJECT_NAME"); //$NON-NLS-1$
if (objectName != null) {
@@ -303,11 +307,11 @@ public class SAPHanaDataBaseMetadata extends FakeDatabaseMetaData {
if (!load) {
String sqlcv = "SELECT * from \"" + schemaPattern + "\".\"" + tableNamePattern + "\""; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
ResultSet rscv = null;
Statement stmtcv = null;
PreparedStatement stmtcv = null;
List<String[]> listcv = new ArrayList<String[]>();
try {
stmtcv = connection.createStatement();
rscv = stmtcv.executeQuery(sqlcv);
stmtcv = connection.prepareStatement(sqlcv);
rscv = stmtcv.executeQuery();
int i = 1;
while (rscv.next()) {
String tableName = tableNamePattern;

View File

@@ -13,9 +13,9 @@
package org.talend.commons.utils.database;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
@@ -62,11 +62,11 @@ public class SASDataBaseMetadata extends FakeDatabaseMetaData {
// see the feature 5827
String sql = "SELECT DISTINCT LIBNAME FROM SASHELP.VTABLE"; //$NON-NLS-1$
ResultSet rs = null;
Statement stmt = null;
PreparedStatement stmt = null;
List<String[]> list = new ArrayList<String[]>();
try {
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
stmt = connection.prepareStatement(sql);
rs = stmt.executeQuery();
while (rs.next()) {
String creator = rs.getString("LIBNAME"); //$NON-NLS-1$
@@ -147,17 +147,21 @@ public class SASDataBaseMetadata extends FakeDatabaseMetaData {
public ResultSet getTables(String catalog, String schema, String tableNamePattern, String[] types) throws SQLException {
String sql;
if (schema != null) {
sql = "SELECT * FROM SASHELP.VTABLE where LIBNAME = '" + schema + "'"; //$NON-NLS-1$ //$NON-NLS-2$
sql = "SELECT * FROM SASHELP.VTABLE where LIBNAME = ?"; //$NON-NLS-1$ //$NON-NLS-2$
} else {
sql = "SELECT * FROM SASHELP.VTABLE"; //$NON-NLS-1$
}
ResultSet rs = null;
Statement stmt = null;
PreparedStatement stmt = null;
List<String[]> list = new ArrayList<String[]>();
try {
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
stmt = connection.prepareStatement(sql);
if (schema != null) {
stmt.setString(1, schema);
}
rs = stmt.executeQuery();
while (rs.next()) {
String name = rs.getString("MEMNAME"); //$NON-NLS-1$
@@ -229,18 +233,20 @@ public class SASDataBaseMetadata extends FakeDatabaseMetaData {
public ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern)
throws SQLException {
// for real
String sql = "SELECT * FROM SASHELP.VCOLUMN where MEMNAME='" + tableNamePattern + "' AND LIBNAME = '" //$NON-NLS-1$ //$NON-NLS-2$
+ schemaPattern + "' ORDER BY LIBNAME, MEMNAME, VARNUM"; //$NON-NLS-1$
String sql = "SELECT * FROM SASHELP.VCOLUMN where MEMNAME=? AND LIBNAME = ? ORDER BY LIBNAME, MEMNAME, VARNUM"; //$NON-NLS-1$
// for test
// String sql = "SELECT * FROM SYSIBM.SYSCOLUMNS where NAME='NAME'";
ResultSet rs = null;
Statement stmt = null;
PreparedStatement stmt = null;
List<String[]> list = new ArrayList<String[]>();
try {
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
stmt = connection.prepareStatement(sql);
stmt.setString(1, tableNamePattern);
stmt.setString(2, schemaPattern);
rs = stmt.executeQuery();
while (rs.next()) {
String tableName = rs.getString("MEMNAME"); //$NON-NLS-1$
if (tableName != null) {

View File

@@ -13,9 +13,9 @@
package org.talend.commons.utils.database;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
@@ -52,10 +52,12 @@ public class Sybase16SADatabaseMetaData extends SybaseDatabaseMetaData {
for (String catalogName : catList) {
String sql = createSqlByLoginAndCatalog(login, catalogName);
ResultSet rs = null;
Statement stmt = null;
PreparedStatement stmt = null;
try {
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
stmt = connection.prepareStatement(sql);
stmt.setString(1, login);
rs = stmt.executeQuery();
while (rs.next()) {
int temp = rs.getInt(1);
@@ -113,8 +115,7 @@ public class Sybase16SADatabaseMetaData extends SybaseDatabaseMetaData {
*/
protected String createSqlByLoginAndCatalog(String loginName, String catalogName) {
String sql = "select count(*) from " + catalogName
+ ".dbo.sysusers where suid in (select suid from "+catalogName+".dbo.syslogins where name = '" + loginName
+ "')";
+ ".dbo.sysusers where suid in (select suid from " + catalogName + ".dbo.syslogins where name = ? )";
return sql;
}

View File

@@ -13,9 +13,9 @@
package org.talend.commons.utils.database;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
@@ -54,10 +54,13 @@ public class SybaseDatabaseMetaData extends PackageFakeDatabaseMetadata {
for (String catalogName : catList) {
String sql = createSqlByLoginAndCatalog(login, catalogName);
ResultSet rs = null;
Statement stmt = null;
PreparedStatement stmt = null;
try {
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
stmt = connection.prepareStatement(sql);
stmt.setString(1, login);
stmt.setString(2, login);
rs = stmt.executeQuery();
while (rs.next()) {
int temp = rs.getInt(1);
@@ -92,11 +95,11 @@ public class SybaseDatabaseMetaData extends PackageFakeDatabaseMetadata {
public ResultSet getSchemas(String catalog, String schemaPattern) throws SQLException {
String sql = "SELECT DISTINCT name FROM " + catalog + ".dbo.sysusers where suid > 0"; //$NON-NLS-1$ //$NON-NLS-2$
ResultSet rs = null;
Statement stmt = null;
PreparedStatement stmt = null;
List<String[]> list = new ArrayList<String[]>();
try {
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
stmt = connection.prepareStatement(sql);
rs = stmt.executeQuery();
while (rs.next()) {
String name = rs.getString("name"); //$NON-NLS-1$
@@ -136,9 +139,9 @@ public class SybaseDatabaseMetaData extends PackageFakeDatabaseMetadata {
*/
protected String createSqlByLoginAndCatalog(String loginName, String catalogName) {
return "select count(*) from " + catalogName //$NON-NLS-1$
+ ".dbo.sysusers where suid in (select suid from master.dbo.syslogins where name = '" + loginName //$NON-NLS-1$
+ "') or suid in (select altsuid from " + catalogName //$NON-NLS-1$
+ ".dbo.sysalternates a, master.dbo.syslogins b where b.name = '" + loginName + "' and a.suid = b.suid)"; //$NON-NLS-1$ //$NON-NLS-2$
+ ".dbo.sysusers where suid in (select suid from master.dbo.syslogins where name = ?"
+ ") or suid in (select altsuid from " + catalogName //$NON-NLS-1$
+ ".dbo.sysalternates a, master.dbo.syslogins b where b.name = ? and a.suid = b.suid)"; //$NON-NLS-1$ //$NON-NLS-2$
}
@Override

View File

@@ -13,9 +13,9 @@
package org.talend.commons.utils.database;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
@@ -97,13 +97,12 @@ public class TeradataDataBaseMetadata extends FakeDatabaseMetaData {
int dbMajorVersion = connection.getMetaData().getDatabaseMajorVersion();
String sql = "HELP COLUMN \"" + schema + "\".\"" + table + "\".* ";//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
ResultSet rs = null;
Statement stmt = null;
PreparedStatement stmt = null;
String columnName = null;
List<String[]> list = new ArrayList<String[]>();
try {
if (dbMajorVersion > 12) {
sql = "SELECT * from DBC.INDICESV WHERE UPPER(databasename) = UPPER('" + schema //$NON-NLS-1$
+ "') AND UPPER(tablename) = UPPER('" + table + "') AND UPPER(UniqueFlag) = UPPER('Y')"; //$NON-NLS-1$//$NON-NLS-2$
sql = "SELECT * from DBC.INDICESV WHERE UPPER(databasename) = UPPER(?) AND UPPER(tablename) = UPPER(?) AND UPPER(UniqueFlag) = UPPER('Y')"; //$NON-NLS-1$ //$NON-NLS-2$
rs = getResultSet(catalog, schema, table, sql);
while (rs.next()) {
columnName = rs.getString("ColumnName").trim(); //$NON-NLS-1$
@@ -112,8 +111,11 @@ public class TeradataDataBaseMetadata extends FakeDatabaseMetaData {
list.add(r);
}
} else {
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
stmt = connection.prepareStatement(sql);
stmt.setString(1, schema);
stmt.setString(2, table);
rs = stmt.executeQuery();
while (rs.next()) {
columnName = rs.getString("Column Name").trim(); //$NON-NLS-1$
String pk = rs.getString("Primary?");//$NON-NLS-1$
@@ -138,10 +140,10 @@ public class TeradataDataBaseMetadata extends FakeDatabaseMetaData {
public ResultSet getResultSet(String catalog, String schema, String table, String sql) throws SQLException {
ResultSet rs = null;
Statement stmt = null;
PreparedStatement stmt = null;
try {
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
stmt = connection.prepareStatement(sql);
rs = stmt.executeQuery();
} catch (SQLException e) {
throw new RuntimeException(e);
}
@@ -203,17 +205,15 @@ public class TeradataDataBaseMetadata extends FakeDatabaseMetaData {
sysTable = "DBC.TABLESV";//$NON-NLS-1$
}
if (types != null && types.length > 0) {
sql = "SELECT * from " + sysTable + " WHERE UPPER(databasename) = UPPER('" + database //$NON-NLS-1$//$NON-NLS-2$
+ "') AND tablekind " + addTypesToSql(types); //$NON-NLS-1$
sql = "SELECT * from " + sysTable + " WHERE UPPER(databasename) = UPPER(?) AND tablekind " + addTypesToSql(types); //$NON-NLS-1$
} else {
// When the types is empty, all the tables and views will be retrieved.
sql = "SELECT * from " + sysTable + " WHERE UPPER(databasename) = UPPER('" + database //$NON-NLS-1$//$NON-NLS-2$
+ "') AND (tablekind = 'T' or tablekind = 'V')"; //$NON-NLS-1$
sql = "SELECT * from " + sysTable + " WHERE UPPER(databasename) = UPPER(?) AND (tablekind = 'T' or tablekind = 'V')"; //$NON-NLS-1$
}
// add the filter for table/views
if (!StringUtils.isEmpty(tableNamePattern)) {
sql = sql + " AND tablename LIKE '" + tableNamePattern + "'";//$NON-NLS-1$//$NON-NLS-2$
sql = sql + " AND tablename LIKE ?";//$NON-NLS-1$ //$NON-NLS-2$
}
if (types != null && types.length > 0) {
@@ -223,11 +223,18 @@ public class TeradataDataBaseMetadata extends FakeDatabaseMetaData {
}
ResultSet rs = null;
Statement stmt = null;
PreparedStatement stmt = null;
List<String[]> list = new ArrayList<String[]>();
try {
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
stmt = connection.prepareStatement(sql);
stmt.setString(1, database);
// add the filter for table/views
if (!StringUtils.isEmpty(tableNamePattern)) {
stmt.setString(2, tableNamePattern);
}
rs = stmt.executeQuery();
while (rs.next()) {
String name = rs.getString("TableName").trim(); //$NON-NLS-1$
@@ -326,21 +333,33 @@ public class TeradataDataBaseMetadata extends FakeDatabaseMetaData {
if (!StringUtils.isEmpty(database)) {
sql = "HELP COLUMN \"" + database + "\".\"" + tableNamePattern + "\".* ";//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
if (dbMajorVersion > 12) {
sql = "SELECT * from DBC.COLUMNSV WHERE UPPER(databasename) = UPPER('" + database //$NON-NLS-1$
+ "') AND UPPER(tablename) = UPPER('" + tableNamePattern + "')" + " Order by tablename "; //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
sql = "SELECT * from DBC.COLUMNSV WHERE UPPER(databasename) = UPPER(?) AND UPPER(tablename) = UPPER(?)" //$NON-NLS-1$
+ " Order by tablename "; //$NON-NLS-1$ //$NON-NLS-3$
}
} else {
sql = "HELP COLUMN \"" + tableNamePattern + "\".* ";//$NON-NLS-1$//$NON-NLS-2$
if (dbMajorVersion > 12) {
sql = "SELECT * from DBC.COLUMNSV WHERE UPPER(tablename) = UPPER('" + tableNamePattern + "')" + " Order by tablename "; //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
sql = "SELECT * from DBC.COLUMNSV WHERE UPPER(tablename) = UPPER(?)" + " Order by tablename "; //$NON-NLS-1$//$NON-NLS-2$
// //$NON-NLS-3$
}
}
ResultSet rs = null;
Statement stmt = null;
PreparedStatement stmt = null;
List<String[]> list = new ArrayList<String[]>();
try {
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
stmt = connection.prepareStatement(sql);
if (!StringUtils.isEmpty(database)) {
if (dbMajorVersion > 12) {
stmt.setString(1, database);
stmt.setString(2, tableNamePattern);
}
} else {
if (dbMajorVersion > 12) {
stmt.setString(1, tableNamePattern);
}
}
rs = stmt.executeQuery();
while (rs.next()) {
String tableName = tableNamePattern;
String columnName = null;

View File

@@ -12,11 +12,14 @@
// ============================================================================
package org.talend.core.repository.model;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -623,8 +626,7 @@ public abstract class AbstractEMFRepositoryFactory extends AbstractRepositoryFac
stream.close();
byte[] currentContent = item.getContent().getInnerContent();
if (!Arrays.equals(innerContent, currentContent)) {
if (!isSameStringContent(innerContent, currentContent)) {
item.getContent().setInnerContent(innerContent);
Project project = getRepositoryContext().getProject();
save(project, item);
@@ -641,6 +643,44 @@ public abstract class AbstractEMFRepositoryFactory extends AbstractRepositoryFac
throw new PersistenceException(ioe);
}
}
protected boolean isSameStringContent(byte[] data1, byte[] data2) throws IOException {
boolean isSame = true;
BufferedReader br1 = null, br2 = null;
try {
br1 = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(data1), StandardCharsets.UTF_8.toString()));
br2 = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(data2), StandardCharsets.UTF_8.toString()));
String line1 = null, line2 = null;
while (isSame) {
line1 = br1.readLine();
line2 = br2.readLine();
if ((line1 == null && line2 == null)) {
break;
}
if (!StringUtils.equals(line1, line2)) {
isSame = false;
break;
}
}
} finally {
if (br1 != null) {
try {
br1.close();
} catch (IOException e) {
ExceptionHandler.process(e);
}
}
if (br2 != null) {
try {
br2.close();
} catch (IOException e) {
ExceptionHandler.process(e);
}
}
}
return isSame;
}
private void createSQLPattern(URL url, String sqlPatternLabel, String categoryName) throws PersistenceException {
if (url == null) {
@@ -693,8 +733,7 @@ public abstract class AbstractEMFRepositoryFactory extends AbstractRepositoryFac
stream.close();
byte[] currentContent = item.getContent().getInnerContent();
if (!Arrays.equals(innerContent, currentContent)) {
if (!isSameStringContent(innerContent, currentContent)) {
item.getContent().setInnerContent(innerContent);
Project project = getRepositoryContext().getProject();
save(project, item);

View File

@@ -1,7 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src/main/java"/>
<classpathentry exported="true" kind="lib" path="lib/resty-0.3.2.jar" />
<classpathentry exported="true" kind="lib" path="lib/delight-rhino-sandbox-0.0.15.jar"/>
<classpathentry exported="true" kind="lib" path="lib/rhino-1.7.13.jar"/>
<classpathentry exported="true" kind="lib" path="lib/resty-0.3.2.jar"/>
<classpathentry exported="true" kind="lib" path="lib/json_simple-1.1.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>

View File

@@ -130,5 +130,7 @@ Bundle-Activator: org.talend.core.runtime.CoreRuntimePlugin
Bundle-ActivationPolicy: lazy
Bundle-ClassPath: .,
lib/resty-0.3.2.jar,
lib/json_simple-1.1.jar
lib/json_simple-1.1.jar,
lib/delight-rhino-sandbox-0.0.15.jar,
lib/rhino-1.7.13.jar
Eclipse-RegisterBuddy: org.talend.testutils

View File

@@ -12,7 +12,9 @@ bin.includes = META-INF/,\
lib/,\
talend_metadata_columns_schema.xsd,\
talend_targetschema_columns_schema.xsd,\
dist/
dist/,\
lib/delight-rhino-sandbox-0.0.15.jar,\
lib/rhino-1.7.13.jar
src.includes = META-INF/,\
mappingMetadataTypes.xml,\
mappings/,\

View File

@@ -9,19 +9,61 @@
</parent>
<artifactId>org.talend.core.runtime</artifactId>
<packaging>eclipse-plugin</packaging>
<dependencies>
<!-- https://mvnrepository.com/artifact/org.javadelight/delight-rhino-sandbox -->
<dependency>
<groupId>org.javadelight</groupId>
<artifactId>delight-rhino-sandbox</artifactId>
<version>0.0.15</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.mozilla/rhino -->
<dependency>
<groupId>org.mozilla</groupId>
<artifactId>rhino</artifactId>
<version>1.7.13</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<version>2.8.2</version>
<executions>
<execution>
<id>default-deploy</id>
<phase>deploy</phase>
</execution>
</executions>
</plugin>
</plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>copy-dependencies</id>
<phase>generate-sources</phase>
<goals>
<goal>copy</goal>
</goals>
<configuration>
<outputDirectory>${project.basedir}/lib</outputDirectory>
<artifactItems>
<artifactItem>
<groupId>org.javadelight</groupId>
<artifactId>delight-rhino-sandbox</artifactId>
<version>0.0.15</version>
</artifactItem>
<artifactItem>
<groupId>org.mozilla</groupId>
<artifactId>rhino</artifactId>
<version>1.7.13</version>
</artifactItem>
</artifactItems>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<version>2.8.2</version>
<executions>
<execution>
<id>default-deploy</id>
<phase>deploy</phase>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -22,14 +22,14 @@ import org.talend.core.runtime.hd.hive.HiveMetadataHelper;
*/
public enum EImpalaDriver {
HIVE2("HIVE2", "HIVE2", "org.apache.hive.jdbc.HiveDriver"),
IMPALA40("IMPALA40", "IMPALA40", "com.cloudera.impala.jdbc4.Driver"),
IMPALA41("IMPALA41", "IMPALA41", "com.cloudera.impala.jdbc41.Driver");
HIVE2("HIVE2", "HIVE2", "org.apache.hive.jdbc.HiveDriver", "doSupportHive2"),
IMPALA("IMPALA", "IMPALA", "com.cloudera.impala.jdbc.Driver", "doSupportImpalaConnector");
EImpalaDriver(String displayName, String name, String driver) {
EImpalaDriver(String displayName, String name, String driver, String supportDriverMethodName) {
this.displayName = displayName;
this.name = name;
this.driver = driver;
this.supportDriverMethodName = supportDriverMethodName;
}
private String displayName;
@@ -38,6 +38,8 @@ public enum EImpalaDriver {
private String driver;
private String supportDriverMethodName;
public String getDisplayName() {
return displayName;
}
@@ -50,6 +52,10 @@ public enum EImpalaDriver {
return driver;
}
public String getSupportDriverMethodName() {
return supportDriverMethodName;
}
public static boolean isSupport(String distribution, String version, boolean byDisplay, String supportMethodName) {
return HiveMetadataHelper.doSupportMethod(distribution, version, byDisplay, supportMethodName);
}
@@ -57,7 +63,7 @@ public enum EImpalaDriver {
public static String[] getImpalaDriverDisplay(String distribution, String version, boolean byDisplay) {
List<String> list = new ArrayList<>(0);
for (EImpalaDriver driver : EImpalaDriver.values()) {
if (isSupport(distribution, version, byDisplay, "doSupportImpalaConnector")) {
if (isSupport(distribution, version, byDisplay, driver.getSupportDriverMethodName())) {
list.add(driver.getDisplayName());
}
}

View File

@@ -228,8 +228,10 @@ public class DatabaseConnStrUtil {
.get(ConnParameterKeys.IMPALA_AUTHENTICATION_PRINCIPLA));
}
String url = null;
if (template.startsWith(DbConnStrForHive.URL_HIVE_2_TEMPLATE)) {
url = getImpalaURLString(false, server, port, sidOrDatabase, impalaPrincipal);
String driver = dbConn.getParameters().get(ConnParameterKeys.IMPALA_DRIVER);
if (template.startsWith(DbConnStrForHive.URL_HIVE_2_TEMPLATE)
|| template.startsWith(DbConnStrForHive.URL_IMPALA_TEMPLATE)) {
url = getImpalaURLString(false, server, port, sidOrDatabase, impalaPrincipal, driver);
}
url = attachAdditionalHiveParameters(url, dbConn, false);
return url;
@@ -376,9 +378,13 @@ public class DatabaseConnStrUtil {
return s;
}
private static String getImpalaURLString(boolean supportContext, String server, String port, String sid, String Principal) {
String s = EDatabaseConnTemplate.IMPALA.getUrlTemplate(null);
String standardURlString = getImpalaURlString(s, supportContext, server, port, sid);
private static String getImpalaURLString(boolean supportContext, String server, String port, String sid, String Principal,
String driver) {
String template = EDatabaseConnTemplate.IMPALA.getUrlTemplate(null);
if (EDatabaseTypeName.IMPALA.getDbType().equals(driver)) {
template = EDatabaseConnTemplate.IMPALA_IMPALA_DRIVER.getUrlTemplate(null);
}
String standardURlString = getImpalaURlString(template, supportContext, server, port, sid);
String principalSuffix = "principal="; //$NON-NLS-1$
boolean hasPrinc = false;
String[] urlArray = standardURlString.split(SEMICOLON);

View File

@@ -30,6 +30,8 @@ public class DbConnStrForHive extends DbConnStr {
public static final String URL_HIVE_2_TEMPLATE = "jdbc:hive2://";//$NON-NLS-1$
public static final String URL_IMPALA_TEMPLATE = "jdbc:impala://";//$NON-NLS-1$
/**
* DOC Marvin DbConnStrForHive constructor comment.
*

View File

@@ -214,6 +214,9 @@ public enum EDatabaseConnTemplate {
IMPALA(new DbConnStr(EDatabaseTypeName.IMPALA, "jdbc:hive2://<host>:<port>/<sid>;auth=noSasl", //$NON-NLS-1$
"21050", null, null, "localhost", "default")), //$NON-NLS-1$
IMPALA_IMPALA_DRIVER(new DbConnStr(EDatabaseTypeName.IMPALA, "jdbc:impala://<host>:<port>/<sid>;auth=noSasl", //$NON-NLS-1$
"21050", null, null, "localhost", "default")), //$NON-NLS-1$
HBASE(new DbConnStr(EDatabaseTypeName.HBASE, "127.0.0.1", //$NON-NLS-1$
"2181")), //$NON-NLS-1$

View File

@@ -84,7 +84,11 @@ public enum EDatabaseVersion4Drivers {
MSSQL_2012(new DbVersion4Drivers(EDatabaseTypeName.MSSQL,
"Microsoft SQL Server 2012", "Microsoft SQL Server 2012", "jtds-1.3.1-patch-20190523.jar")), //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
MSSQL_PROP(new DbVersion4Drivers(EDatabaseTypeName.MSSQL,
"Microsoft", "MSSQL_PROP", "mssql-jdbc.jar")), //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
"Microsoft", "MSSQL_PROP", //$NON-NLS-1$ //$NON-NLS-2$
new String[] { "mssql-jdbc.jar", "slf4j-api-1.7.25.jar", "slf4j-log4j12-1.7.25.jar", "adal4j-1.6.5.jar", //$NON-NLS-1$
"commons-lang3-3.10.jar", "commons-codec-1.14.jar", "gson-2.8.6.jar", "oauth2-oidc-sdk-6.5.jar",
"json-smart-2.4.2.jar", "nimbus-jose-jwt-8.11.jar", "javax.mail-1.6.2.jar", "log4j-1.2.17.jar",
"accessors-smart-1.1.jar", "asm-5.0.3.jar" })),
VERTICA_9(new DbVersion4Drivers(EDatabaseTypeName.VERTICA, "VERTICA 9.X", "VERTICA_9_0", "vertica-jdbc-9.3.1-0.jar")), //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
VERTICA_7_1_X(new DbVersion4Drivers(EDatabaseTypeName.VERTICA, "VERTICA 7.1.X (Deprecated)", "VERTICA_7_1_X", "vertica-jdbc-7.1.2-0.jar")), //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$

View File

@@ -26,11 +26,6 @@ public enum ECustomVersionGroup {
MAPRDB,
PIG,
PIG_HBASE,
PIG_HCATALOG,
MAP_REDUCE,
SPARK,

View File

@@ -18,12 +18,6 @@ public enum ECustomVersionType {
MAPRDB("Maprdb", ECustomVersionGroup.MAPRDB), //$NON-NLS-1$
PIG("Pig", ECustomVersionGroup.PIG), //$NON-NLS-1$
PIG_HBASE("Pig for HBase", ECustomVersionGroup.PIG_HBASE), //$NON-NLS-1$
PIG_HCATALOG("Pig for Hcatalog", ECustomVersionGroup.PIG_HCATALOG), //$NON-NLS-1$
MAP_REDUCE("Map Reduce", ECustomVersionGroup.MAP_REDUCE), //$NON-NLS-1$
SPARK("Spark", ECustomVersionGroup.SPARK), //$NON-NLS-1$

View File

@@ -483,7 +483,7 @@ public class HadoopCustomVersionDefineDialog extends TitleAreaDialog {
private boolean isSupportHadoop() {
if (GlobalServiceRegister.getDefault().isServiceRegistered(IHadoopService.class)) {
hadoopService = (IHadoopService) GlobalServiceRegister.getDefault().getService(IHadoopService.class);
hadoopService = GlobalServiceRegister.getDefault().getService(IHadoopService.class);
}
return hadoopService != null;
@@ -503,11 +503,7 @@ public class HadoopCustomVersionDefineDialog extends TitleAreaDialog {
private ECustomVersionType[] filterTypes(Object[] types) {
Object[] filteredTypes = ArrayUtils.removeElement(types, ECustomVersionType.ALL);
IDesignerCoreService designerCoreService = CoreRuntimePlugin.getInstance().getDesignerCoreService();
INode node = designerCoreService.getRefrenceNode("tPigLoad"); //$NON-NLS-1$
if (node == null) {
filteredTypes = ArrayUtils.removeElement(filteredTypes, ECustomVersionType.PIG);
}
node = designerCoreService.getRefrenceNode("tMRConfiguration", ComponentCategory.CATEGORY_4_MAPREDUCE.getName());//$NON-NLS-1$
INode node = designerCoreService.getRefrenceNode("tMRConfiguration", ComponentCategory.CATEGORY_4_MAPREDUCE.getName());//$NON-NLS-1$
if (node == null) {
filteredTypes = ArrayUtils.removeElement(filteredTypes, ECustomVersionType.MAP_REDUCE);
}

View File

@@ -425,7 +425,7 @@ public class HadoopVersionDialog extends TitleAreaDialog {
if (isFromExistVersion) {
IHadoopService hadoopService = null;
if (GlobalServiceRegister.getDefault().isServiceRegistered(IHadoopService.class)) {
hadoopService = (IHadoopService) GlobalServiceRegister.getDefault().getService(IHadoopService.class);
hadoopService = GlobalServiceRegister.getDefault().getService(IHadoopService.class);
}
if (hadoopService != null) {
for (ECustomVersionGroup group : existVersionSelectionMap.keySet()) {
@@ -437,10 +437,7 @@ public class HadoopVersionDialog extends TitleAreaDialog {
for (ECustomVersionType type : types) {
if (type.getGroup() == group) {
Set<String> hadoopLibraries = new HashSet<String>();
if (ECustomVersionType.PIG == type || ECustomVersionType.PIG_HBASE == type
|| ECustomVersionType.PIG_HCATALOG == type) {
hadoopLibraries = getLibrariesForPig(type);
} else if (ECustomVersionType.MAP_REDUCE == type) {
if (ECustomVersionType.MAP_REDUCE == type) {
hadoopLibraries = getLibrariesForMapReduce(type);
} else if (ECustomVersionType.SPARK == type || ECustomVersionType.SPARK_STREAMING == type) {
hadoopLibraries = getLibrariesForSpark(type);
@@ -542,42 +539,6 @@ public class HadoopVersionDialog extends TitleAreaDialog {
return neededLibraries;
}
private Set<String> getLibrariesForPig(ECustomVersionType type) {
Set<String> neededLibraries = new HashSet<String>();
INode node = CoreRuntimePlugin.getInstance().getDesignerCoreService().getRefrenceNode("tPigLoad");//$NON-NLS-1$
IElementParameter elementParameter = node.getElementParameter("MAPREDUCE");//$NON-NLS-1$
if (elementParameter != null) {
elementParameter.setValue(true);
}
elementParameter = node.getElementParameter("DISTRIBUTION");//$NON-NLS-1$
if (elementParameter != null) {
elementParameter.setValue(distribution);
}
elementParameter = node.getElementParameter("PIG_VERSION");//$NON-NLS-1$
if (elementParameter != null) {
elementParameter.setValue(version);
}
elementParameter = node.getElementParameter("LOAD");//$NON-NLS-1$
if (elementParameter != null) {
if (ECustomVersionType.PIG_HBASE == type) {
elementParameter.setValue("HBASESTORAGE");//$NON-NLS-1$
} else if (ECustomVersionType.PIG_HCATALOG == type) {
elementParameter.setValue("HCATLOADER");//$NON-NLS-1$
}
}
List<ModuleNeeded> modulesNeeded = node.getModulesNeeded();
for (ModuleNeeded module : modulesNeeded) {
if (module.isRequired(node.getElementParameters())) {
neededLibraries.add(module.getModuleName());
}
}
return neededLibraries;
}
public Map<ECustomVersionType, Map<String, Object>> getTypeConfigurations() {
return this.typeConfigurations;
}

View File

@@ -106,4 +106,6 @@ public interface ISAPConstant {
public static final String PROP_DB_USERNAME = "db.username";//$NON-NLS-1$
public static final String PROP_DB_PASSWORD = "db.password";//$NON-NLS-1$
}
public static final String PROP_DB_ADDITIONAL_PROPERTIES = "db.additionalProperties";//$NON-NLS-1$
}

View File

@@ -281,9 +281,7 @@ public final class MetadataToolHelper {
}
public static boolean isAllowSpecificCharacters() {
IEclipsePreferences coreUIPluginNode = new InstanceScope().getNode(ITalendCorePrefConstants.CoreUIPlugin_ID);
return coreUIPluginNode
.getBoolean(IRepositoryPrefConstants.ALLOW_SPECIFIC_CHARACTERS_FOR_SCHEMA_COLUMNS, false);
return CoreRuntimePlugin.getInstance().getProjectPreferenceManager().isAllowSpecificCharacters();
}
/**

View File

@@ -32,9 +32,7 @@ import org.talend.core.database.EDatabaseTypeName;
import org.talend.core.database.conn.ConnParameterKeys;
import org.talend.core.database.conn.template.EDatabaseConnTemplate;
import org.talend.core.database.conn.version.EDatabaseVersion4Drivers;
import org.talend.core.hadoop.IHadoopClusterService;
import org.talend.core.hadoop.repository.HadoopRepositoryUtil;
import org.talend.core.hadoop.version.custom.ECustomVersionGroup;
import org.talend.core.language.ECodeLanguage;
import org.talend.core.language.LanguageManager;
import org.talend.core.model.metadata.EMetadataEncoding;
@@ -443,6 +441,13 @@ public class RepositoryToComponentProperty {
} else {
return TalendQuoteUtils.addQuotes(connection.getValue(dbPassword, false));
}
} else if ("SAPHANA_PROPERTIES_STRING".equals(value)) { //$NON-NLS-1$
String dbParameters = TaggedValueHelper.getValueString(ISAPConstant.PROP_DB_ADDITIONAL_PROPERTIES, connection);
if (isContextMode(connection, dbParameters)) {
return dbParameters;
} else {
return TalendQuoteUtils.addQuotes(dbParameters);
}
}
return null;
}
@@ -1532,22 +1537,6 @@ public class RepositoryToComponentProperty {
}
if (value.equals("HADOOP_CUSTOM_JARS")) {
if (targetComponent != null && targetComponent.startsWith("tPig")) {
// for pig component
String clusterID = connection.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CLUSTER_ID);
if (clusterID != null) {
if (GlobalServiceRegister.getDefault().isServiceRegistered(IHadoopClusterService.class)) {
IHadoopClusterService hadoopClusterService = GlobalServiceRegister.getDefault()
.getService(IHadoopClusterService.class);
Map<String, String> hadoopCustomLibraries = hadoopClusterService.getHadoopCustomLibraries(clusterID);
if (EDatabaseTypeName.HBASE.getDisplayName().equals(connection.getDatabaseType())) {
return hadoopCustomLibraries.get(ECustomVersionGroup.PIG_HBASE.getName()) == null ? ""
: hadoopCustomLibraries.get(ECustomVersionGroup.PIG_HBASE.getName());
}
}
}
}
return connection.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CUSTOM_JARS);
}

View File

@@ -52,4 +52,6 @@ public interface IRepositoryPrefConstants {
public static final String ALLOW_SPECIFIC_CHARACTERS_FOR_SCHEMA_COLUMNS = "allow_specific_characters_for_schema_columns";
public static final String REF_PROJECT_BRANCH_SETTING = "ref_project_branch_setting";
public static final String ITEM_EXPORT_DEPENDENCIES = "item_export_dependencies";
}

View File

@@ -21,14 +21,13 @@ import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.script.Bindings;
import javax.script.ScriptContext;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import org.apache.commons.lang.StringUtils;
import org.apache.oro.text.regex.MalformedPatternException;
import org.apache.oro.text.regex.PatternCompiler;
@@ -36,8 +35,7 @@ import org.apache.oro.text.regex.Perl5Compiler;
import org.apache.oro.text.regex.Perl5Matcher;
import org.apache.oro.text.regex.Perl5Substitution;
import org.apache.oro.text.regex.Util;
import org.eclipse.core.runtime.preferences.IEclipsePreferences;
import org.eclipse.core.runtime.preferences.InstanceScope;
import org.eclipse.core.runtime.Platform;
import org.talend.commons.utils.PasswordEncryptUtil;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.language.ECodeLanguage;
@@ -50,14 +48,16 @@ import org.talend.core.model.metadata.types.JavaTypesManager;
import org.talend.core.model.process.IContext;
import org.talend.core.model.process.IContextManager;
import org.talend.core.model.process.IContextParameter;
import org.talend.core.model.repository.IRepositoryPrefConstants;
import org.talend.core.prefs.ITalendCorePrefConstants;
import org.talend.core.runtime.CoreRuntimePlugin;
import org.talend.core.runtime.services.IGenericDBService;
import org.talend.core.utils.TalendQuoteUtils;
import org.talend.designer.core.model.utils.emf.talendfile.ContextParameterType;
import org.talend.designer.core.model.utils.emf.talendfile.ContextType;
import org.talend.repository.model.RepositoryConstants;
import delight.rhinosandox.RhinoSandbox;
import delight.rhinosandox.RhinoSandboxes;
/**
* Utilities to work with IContextParamet objects. <br/>
*
@@ -84,6 +84,12 @@ public final class ContextParameterUtils {
private static final String NON_CONTEXT_PATTERN = "[^a-zA-Z0-9_]"; //$NON-NLS-1$
private static final RhinoSandbox SANDBOX = RhinoSandboxes.create();
private static final Map<String, Object> CTX_VARS_LAST = new HashMap<String, Object>();
private static ReadWriteLock CTX_VARS_LOCK = new ReentrantReadWriteLock();
/**
* Constructs a new ContextParameterUtils.
*/
@@ -206,10 +212,37 @@ public final class ContextParameterUtils {
}
}
private static ScriptEngine engine = new ScriptEngineManager().getEngineByName("JavaScript");
public static ScriptEngine getScriptEngine() {
return engine;
private static String preProcessScript(String script) {
String newCode = script;
CTX_VARS_LOCK.readLock().lock();
try {
Set<Entry<String, Object>> entries = CTX_VARS_LAST.entrySet();
for (Entry<String, Object> entry : entries) {
String val = entry.getValue().toString();
if (entry.getValue() instanceof String) {
val = "\"" + val.replace("\"", "\\\"") + "\"";
}
newCode = newCode.replace(JAVA_NEW_CONTEXT_PREFIX + entry.getKey(), val);
}
} finally {
CTX_VARS_LOCK.readLock().unlock();
}
return newCode;
}
public static boolean isValidLiteralValue(String value) {
String newCode = preProcessScript(value);
try {
SANDBOX.eval(null, newCode);
return true;
} catch (Exception e) {
// ignore
}
return false;
}
public static String convertContext2Literal4AnyVar(final String code, final IContext context) {
@@ -223,23 +256,22 @@ public final class ContextParameterUtils {
Object result = code;
if (engine == null) {
engine = new ScriptEngineManager().getEngineByName("JavaScript");
Map<String, Object> varMap = getVarMapForScriptEngine(context);
CTX_VARS_LOCK.writeLock().lock();
try {
CTX_VARS_LAST.clear();
CTX_VARS_LAST.putAll(varMap);
} finally {
CTX_VARS_LOCK.writeLock().unlock();
}
if (engine == null) {
throw new RuntimeException("can't find the script engine");
}
String newCode = preProcessScript(code);
Bindings binding = engine.getBindings(ScriptContext.ENGINE_SCOPE);
if (binding != null) {
binding.clear();
Map<String, Object> varMap = getVarMapForScriptEngine(context);
binding.put("context", varMap);
}
try {
String replacement = " ";
result = engine.eval(code.replace("\r\n", replacement).replace("\n", replacement).replace("\r", replacement));
result = SANDBOX.eval(null,
newCode.replace("\r\n", replacement).replace("\n", replacement).replace("\r", replacement));
} catch (Exception e) {
// ignore the exception
}
@@ -617,8 +649,12 @@ public final class ContextParameterUtils {
}
private static boolean isAllowSpecificCharacters() {
IEclipsePreferences coreUIPluginNode = new InstanceScope().getNode(ITalendCorePrefConstants.CoreUIPlugin_ID);
return coreUIPluginNode.getBoolean(IRepositoryPrefConstants.ALLOW_SPECIFIC_CHARACTERS_FOR_SCHEMA_COLUMNS, false);
if (Platform.isRunning()) {
return CoreRuntimePlugin.getInstance().getProjectPreferenceManager().isAllowSpecificCharacters();
} else {
// Can not get the value if current code is not working in studio
return false;
}
}
public static boolean isEmptyParameter(String source) {

View File

@@ -27,9 +27,6 @@ import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.script.ScriptEngine;
import javax.script.ScriptException;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.model.components.ComponentCategory;
import org.talend.core.model.components.IComponent;
@@ -418,6 +415,52 @@ public class NodeUtil {
return conns;
}
/**
* DOC
* <p>
* The method searches for the incoming node connections of type
* on a processing path and returns the first ones only
* </p>
*
* @param node
* @param type - node type to look for
* @return
*/
public static List<? extends IConnection> getFirstIncomingLineConnectionsOfType(INode node, String type) {
if (type == null)
return new ArrayList<IConnection>();
Set<String> uniqueNamesDone = new HashSet<String>();
List<? extends IConnection> allIncomingConnections = getFirstIncomingLineConnectionsOfType(node, uniqueNamesDone, type);
return allIncomingConnections;
}
private static List<? extends IConnection> getFirstIncomingLineConnectionsOfType(INode node, Set<String> uniqueNamesDone, String type) {
List<IConnection> conns = new ArrayList<IConnection>();
List<? extends IConnection> incomingConnections = node.getIncomingConnections();
if (incomingConnections != null) {
for (int i = 0; i < incomingConnections.size(); i++) {
IConnection connection = incomingConnections.get(i);
INode nextNode = connection.getSource();
if (!uniqueNamesDone.contains(nextNode.getUniqueName())) {
uniqueNamesDone.add(nextNode.getUniqueName());
if (type.equals((String)nextNode.getElementParameter("COMPONENT_NAME").getValue())) {
conns.add(connection);
} else {
conns.addAll(getFirstIncomingLineConnectionsOfType(nextNode, uniqueNamesDone, type)); // follow this way
}
}
}
}
return conns;
}
public static INode getFirstMergeNode(INode node) {
INode mergeNode = null;
for (IConnection connection : node.getOutgoingConnections()) {
@@ -1040,14 +1083,7 @@ public class NodeUtil {
}
private static boolean isValidLiteralValue(String value) {
ScriptEngine se = ContextParameterUtils.getScriptEngine();
if(se==null) return true;
try {
se.eval(value);
return true;
} catch (ScriptException e) {
return false;
}
return ContextParameterUtils.isValidLiteralValue(value);
}
private static String checkStringQuotationMarks(String str) {

View File

@@ -126,6 +126,9 @@ public abstract class AbstractPropertyValueEvaluator implements PropertyValueEva
}
if (GenericTypeUtils.isStringType(property)) {
if (property.isFlag(Property.Flags.ENCRYPT)) {
return TalendQuoteUtils.removeQuotes(stringValue);
}
return TalendQuoteUtils.removeQuotes(StringEscapeUtils.unescapeJava(stringValue));
}
return rawValue;

View File

@@ -48,6 +48,8 @@ public interface MavenConstants {
static final String EXCLUDE_DELETED_ITEMS = "EXCLUDE_DELETED_ITEMS";
static final String SKIP_LOOP_DEPENDENCY_CHECK = "SKIP_LOOP_DEPENDENCY_CHECK";
static final String SKIP_FOLDERS = "SKIP_FOLDERS";
/*

View File

@@ -15,6 +15,7 @@ package org.talend.core.runtime.projectsetting;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.ProjectScope;
import org.eclipse.core.runtime.Assert;
@@ -27,6 +28,7 @@ import org.talend.commons.exception.PersistenceException;
import org.talend.commons.utils.workbench.resources.ResourceUtils;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.model.general.Project;
import org.talend.core.model.repository.IRepositoryPrefConstants;
import org.talend.designer.runprocess.IRunProcessService;
import org.talend.repository.ProjectManager;
import org.talend.repository.documentation.ERepositoryActionName;
@@ -195,7 +197,19 @@ public class ProjectPreferenceManager {
// return qulifierPreference.getBoolean(key, false);
return getPreferenceStore().getBoolean(key);
}
public boolean isAllowSpecificCharacters() {
String value = getValue(IRepositoryPrefConstants.ALLOW_SPECIFIC_CHARACTERS_FOR_SCHEMA_COLUMNS);
if (StringUtils.isNotEmpty(value)) {
return Boolean.valueOf(value);
}
return false; // Default value
}
public void setAllowSpecificCharacters(boolean isAllow) {
setValue(IRepositoryPrefConstants.ALLOW_SPECIFIC_CHARACTERS_FOR_SCHEMA_COLUMNS, isAllow);
}
/**
* Save the configurations.
*/

View File

@@ -150,6 +150,8 @@ public interface IGenericWizardService extends IService {
*/
public ITreeContextualAction getDefaultAction(RepositoryNode node);
public ITreeContextualAction getGenericAction(String typeName, String location);
public void loadAdditionalJDBC();
public List<String> getAllAdditionalJDBCTypes();

View File

@@ -258,6 +258,8 @@ public interface IRunProcessService extends IService {
public boolean isExcludeDeletedItems(Property property);
public boolean getMavenPrefOptionStatus(String prefName);
public static IRunProcessService get() {
if (GlobalServiceRegister.getDefault().isServiceRegistered(IRunProcessService.class)) {
return GlobalServiceRegister.getDefault().getService(IRunProcessService.class);

View File

@@ -42,6 +42,10 @@
id="org.talend.core.ui.token.AdditionalPackageTokenCollector"
name="addtional package">
</provider>
<provider
collector="org.talend.core.ui.token.AMCUsageTokenCollector"
id="AMCUsageTokenCollector">
</provider>
</extension>
<extension

View File

@@ -12,17 +12,17 @@
// ============================================================================
package org.talend.core.ui.preference;
import org.eclipse.jface.preference.BooleanFieldEditor;
import org.eclipse.jface.preference.FieldEditorPreferencePage;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkbenchPreferencePage;
import org.talend.core.model.repository.IRepositoryPrefConstants;
import org.talend.core.ui.CoreUIPlugin;
public class SpecificSettingPreferencePage extends FieldEditorPreferencePage implements IWorkbenchPreferencePage {
public SpecificSettingPreferencePage() {
setPreferenceStore(CoreUIPlugin.getDefault().getPreferenceStore());
setDescription("Specific settings");
noDefaultAndApplyButton();
}
/*
@@ -32,8 +32,6 @@ public class SpecificSettingPreferencePage extends FieldEditorPreferencePage imp
*/
@Override
protected void createFieldEditors() {
addField(new BooleanFieldEditor(IRepositoryPrefConstants.ALLOW_SPECIFIC_CHARACTERS_FOR_SCHEMA_COLUMNS,
"Allow specific characters (UTF8,...) for columns of schemas", getFieldEditorParent()));
}

View File

@@ -14,7 +14,6 @@ package org.talend.core.ui.preference.metadata;
import org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer;
import org.eclipse.jface.preference.IPreferenceStore;
import org.talend.core.model.repository.IRepositoryPrefConstants;
import org.talend.core.prefs.ITalendCorePrefConstants;
import org.talend.core.ui.CoreUIPlugin;
@@ -27,8 +26,6 @@ public class MetadataPreferenceInitializer extends AbstractPreferenceInitializer
public void initializeDefaultPreferences() {
IPreferenceStore store = CoreUIPlugin.getDefault().getPreferenceStore();
store.setDefault(ITalendCorePrefConstants.MAXIMUM_AMOUNT_OF_COLUMNS_FOR_XML, 500);
store.setDefault(IRepositoryPrefConstants.ALLOW_SPECIFIC_CHARACTERS_FOR_SCHEMA_COLUMNS, false);
}
}

View File

@@ -16,7 +16,9 @@ import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.jface.action.Action;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IWorkbenchWindow;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.IService;
import org.talend.core.model.properties.Item;
@@ -28,4 +30,22 @@ public interface IOpenJobScriptActionService extends IService {
public Action getOpenJobScriptAction(IWorkbenchWindow window);
public IFile createWorkspaceLink(IProject fsProject, Item item) throws CoreException;
/**
* If it's a jobscript editor, set readonly parameter
*
* @param editorPart
* @param readonly
* @return true: set readonly parameter successfully
*/
public boolean setEditorReadonly(IEditorPart editorPart, boolean readonly);
public static IOpenJobScriptActionService get() {
GlobalServiceRegister gsr = GlobalServiceRegister.getDefault();
if (gsr.isServiceRegistered(IOpenJobScriptActionService.class)) {
return gsr.getService(IOpenJobScriptActionService.class);
}
return null;
}
}

View File

@@ -0,0 +1,58 @@
// ============================================================================
//
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.core.ui.token;
import java.util.Properties;
import org.eclipse.core.runtime.preferences.ConfigurationScope;
import org.eclipse.core.runtime.preferences.DefaultScope;
import org.eclipse.core.runtime.preferences.IEclipsePreferences;
import org.eclipse.core.runtime.preferences.IScopeContext;
import org.eclipse.core.runtime.preferences.InstanceScope;
import org.talend.commons.runtime.service.ICollectDataService;
import us.monoid.json.JSONObject;
public class AMCUsageTokenCollector extends AbstractTokenCollector {
@Override
public JSONObject collect() throws Exception {
Properties props = new Properties();
ICollectDataService instance = ICollectDataService.getInstance("amc");
if (instance != null) {
props = instance.getCollectedData();
} else {
IScopeContext[] contexts = new IScopeContext[] { InstanceScope.INSTANCE, ConfigurationScope.INSTANCE,
DefaultScope.INSTANCE };
String plugin = "org.talend.amc";
for (IScopeContext context : contexts) {
IEclipsePreferences amc = context.getNode(plugin);
if (amc != null) {
if (amc.getBoolean(ICollectDataService.AMC_FILE_TYPE_USED, false)) {
props.setProperty(ICollectDataService.AMC_PREVIEW_KEY, ICollectDataService.AMC_PREVIEW_FILEVALUE);
} else if (amc.getBoolean(ICollectDataService.AMC_DATABASE_TYPE_USED, false)) {
props.setProperty(ICollectDataService.AMC_PREVIEW_KEY, ICollectDataService.AMC_PREVIEW_DATABASEVALUE);
}
break;
}
}
}
JSONObject finalToken = new JSONObject();
finalToken.put(ICollectDataService.AMC_PREVIEW_KEY, "<Empty>");
for (Object key : props.keySet()) {
finalToken.put((String) key, props.get(key));
}
return finalToken;
}
}

View File

@@ -97,6 +97,7 @@ import org.talend.core.model.routines.RoutinesUtil;
import org.talend.core.model.utils.JavaResourcesHelper;
import org.talend.core.repository.model.ProxyRepositoryFactory;
import org.talend.core.runtime.CoreRuntimePlugin;
import org.talend.core.runtime.maven.MavenConstants;
import org.talend.core.runtime.process.ITalendProcessJavaProject;
import org.talend.core.runtime.process.LastGenerationInfo;
import org.talend.core.runtime.process.TalendProcessArgumentConstant;
@@ -456,9 +457,14 @@ public class ProcessorUtilities {
continue;
}
ProcessItem processItem = ItemCacheManager.getProcessItem(jobId, subNodeversion);
IDesignerCoreService service = CorePlugin.getDefault().getDesignerCoreService();
IProcess subProcess = service.getProcessFromProcessItem(processItem);
hasLoop = checkProcessLoopDependencies(subProcess, jobId, subNodeversion, pathlink, idToLatestVersion);
if (processItem != null) {
IDesignerCoreService service = CorePlugin.getDefault().getDesignerCoreService();
IProcess subProcess = service.getProcessFromProcessItem(processItem);
if (subProcess != null) {
hasLoop = checkProcessLoopDependencies(subProcess, jobId, subNodeversion, pathlink,
idToLatestVersion);
}
}
if (hasLoop) {
break;
}
@@ -592,10 +598,12 @@ public class ProcessorUtilities {
jobInfo.setProcessor(processor);
if (isMainJob && selectedProcessItem != null) {
Property property = selectedProcessItem.getProperty();
String jobId = ProjectManager.getInstance().getCurrentProject().getTechnicalLabel() + ":" + property.getId();
hasLoopDependency = checkProcessLoopDependencies(currentProcess, jobId, property.getVersion(),
new LinkedList<String>(), new HashMap<String, String>());
if (!IRunProcessService.get().getMavenPrefOptionStatus(MavenConstants.SKIP_LOOP_DEPENDENCY_CHECK)) {
Property property = selectedProcessItem.getProperty();
String jobId = ProjectManager.getInstance().getCurrentProject().getTechnicalLabel() + ":" + property.getId();
hasLoopDependency = checkProcessLoopDependencies(currentProcess, jobId, property.getVersion(),
new LinkedList<String>(), new HashMap<String, String>());
}
// clean the previous code in case it has deleted subjob
cleanSourceFolder(progressMonitor, currentProcess, processor);
}
@@ -1046,9 +1054,11 @@ public class ProcessorUtilities {
}
if (isMainJob && selectedProcessItem != null) {
Property property = selectedProcessItem.getProperty();
hasLoopDependency = checkProcessLoopDependencies(currentProcess, property.getId(), property.getVersion(),
new LinkedList<String>(), new HashMap<String, String>());
if (!IRunProcessService.get().getMavenPrefOptionStatus(MavenConstants.SKIP_LOOP_DEPENDENCY_CHECK)) {
Property property = selectedProcessItem.getProperty();
hasLoopDependency = checkProcessLoopDependencies(currentProcess, property.getId(), property.getVersion(),
new LinkedList<String>(), new HashMap<String, String>());
}
// clean the previous code in case it has deleted subjob
cleanSourceFolder(progressMonitor, currentProcess, processor);
}

View File

@@ -26,7 +26,6 @@
<classpathentry exported="true" kind="lib" path="lib/maven-model-builder-3.2.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/maven-repository-metadata-3.2.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/plexus-interpolation-1.19.jar"/>
<classpathentry exported="true" kind="lib" path="lib/plexus-utils-3.0.17.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src/main/java"/>

View File

@@ -16,7 +16,6 @@ Bundle-ClassPath: .,
lib/maven-model-builder-3.2.1.jar,
lib/maven-repository-metadata-3.2.1.jar,
lib/plexus-interpolation-1.19.jar,
lib/plexus-utils-3.0.17.jar,
lib/commons-codec.jar,
lib/httpclient.jar,
lib/httpcore.jar,

View File

@@ -99,6 +99,22 @@
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5.13</version>
<exclusions>
<exclusion>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.15</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.8.0</version>
</dependency>
</dependencies>
<build>

View File

@@ -62,6 +62,11 @@
<artifactId>maven-shared-utils</artifactId>
<version>3.3.3</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.8.0</version>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>

View File

@@ -15,6 +15,11 @@
<artifactId>tycho-compiler-jdt</artifactId>
<version>1.6.0</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.8.0</version>
</dependency>
</dependencies>
<build>
<plugins>

View File

@@ -44,6 +44,12 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>3.0.2</version>
<exclusions>
<exclusion>
<groupId>org.apache.maven.shared</groupId>
<artifactId>maven-shared-utils</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.maven.plugins</groupId>
@@ -65,6 +71,10 @@
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.maven.shared</groupId>
<artifactId>maven-shared-utils</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>

View File

@@ -85,6 +85,12 @@
<groupId>org.talend.components</groupId>
<artifactId>components-salesforce-runtime</artifactId>
<version>${components.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.talend.components</groupId>

View File

@@ -10,7 +10,7 @@
<artifactId>studio-tacokit-dependencies</artifactId>
<packaging>pom</packaging>
<properties>
<tacokit.components.version>1.21.0-SNAPSHOT</tacokit.components.version>
<tacokit.components.version>1.23.0-SNAPSHOT</tacokit.components.version>
</properties>
<repositories>
<repository>
@@ -57,6 +57,12 @@
<groupId>org.talend.components</groupId>
<artifactId>cosmosDB</artifactId>
<version>${tacokit.components.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>

View File

@@ -11,8 +11,8 @@
<packaging>pom</packaging>
<properties>
<tcomp.version>1.32.0</tcomp.version>
<slf4j.version>1.7.25</slf4j.version>
<tcomp.version>1.33.1</tcomp.version>
<slf4j.version>1.7.28</slf4j.version>
</properties>
<repositories>

View File

@@ -15,6 +15,19 @@
<url>https://artifacts-oss.talend.com/nexus/content/repositories/TalendOpenSourceRelease/</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.talend.daikon</groupId>
<artifactId>crypto-utils</artifactId>
<version>${org.talend.daikon.crypto-utils.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson</groupId>
<artifactId>jackson-bom</artifactId>
<version>2.10.1</version>
<type>pom</type>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
@@ -42,6 +55,20 @@
</artifactItems>
</configuration>
</execution>
<execution>
<id>copy-dependencies</id>
<phase>package</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<addParentPoms>true</addParentPoms>
<copyPom>true</copyPom>
<includeScope>compile</includeScope>
<outputDirectory>${basedir}/../tmp/repository</outputDirectory>
<useRepositoryLayout>true</useRepositoryLayout>
</configuration>
</execution>
</executions>
</plugin>
<plugin>

View File

@@ -15,4 +15,5 @@ MavenProjectSettingPage.filterExampleMessage=Filter examples:\nlabel=myJob
MavenProjectSettingPage.refModuleText=Set reference project modules in profile
MavenProjectSettingPage.excludeDeletedItems=Exclude deleted items
MavenProjectSettingPage.syncAllPomsWarning=Click the Force full re-synchronize poms button to apply the new settings.
MavenProjectSettingPage.skipFolders=Skip folders
MavenProjectSettingPage.skipFolders=Skip folders
BuildProjectSettingPage.allowRecursiveJobs=Allow recursive jobs (Not supported - for compatibility only)

View File

@@ -12,15 +12,50 @@
// ============================================================================
package org.talend.designer.maven.ui.setting.project.page;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.talend.core.runtime.maven.MavenConstants;
import org.talend.core.runtime.projectsetting.EmptyProjectSettingPage;
import org.talend.designer.maven.DesignerMavenPlugin;
import org.talend.designer.maven.ui.i18n.Messages;
/**
* DOC ggu class global comment. Detailled comment
*/
public class BuildProjectSettingPage extends EmptyProjectSettingPage {
private IPreferenceStore preferenceStore;
private Button allowRecursiveJobsCheckbox;
public BuildProjectSettingPage() {
super();
}
@Override
protected String getPreferenceName() {
return DesignerMavenPlugin.PLUGIN_ID;
}
@Override
protected void createFieldEditors() {
Composite parent = getFieldEditorParent();
parent.setLayout(new GridLayout());
preferenceStore = getPreferenceStore();
allowRecursiveJobsCheckbox = new Button(parent, SWT.CHECK);
allowRecursiveJobsCheckbox.setText(Messages.getString("BuildProjectSettingPage.allowRecursiveJobs")); //$NON-NLS-1$
allowRecursiveJobsCheckbox.setSelection(!preferenceStore.getBoolean(MavenConstants.SKIP_LOOP_DEPENDENCY_CHECK));
}
@Override
public boolean performOk() {
boolean performOk = super.performOk();
if (preferenceStore != null) {
preferenceStore.setValue(MavenConstants.SKIP_LOOP_DEPENDENCY_CHECK, !allowRecursiveJobsCheckbox.getSelection());
}
return performOk;
}
}

View File

@@ -46,6 +46,11 @@
<artifactId>plexus-utils</artifactId>
<version>3.0.24</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.8.0</version>
</dependency>
</dependencies>
</plugin>
<plugin>
@@ -63,6 +68,11 @@
<artifactId>maven-shared-utils</artifactId>
<version>3.3.3</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.8.0</version>
</dependency>
</dependencies>
</plugin>
<plugin>
@@ -117,6 +127,11 @@
<artifactId>commons-compress</artifactId>
<version>1.19</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.8.0</version>
</dependency>
</dependencies>
</plugin>
<plugin>

View File

@@ -30,6 +30,7 @@ import org.talend.core.model.properties.Property;
import org.talend.core.runtime.projectsetting.IProjectSettingTemplateConstants;
import org.talend.designer.maven.template.MavenTemplateManager;
import org.talend.designer.maven.utils.PomUtil;
import org.talend.designer.runprocess.IRunProcessService;
/**
* DOC ggu class global comment. Detailled comment
@@ -116,7 +117,13 @@ public abstract class AbstractMavenCodesTemplatePom extends AbstractMavenGeneral
} else {
isDeployed = true;
}
if (ignoreModuleInstallationStatus() || isDeployed) {
boolean isCIMode = false;
if (GlobalServiceRegister.getDefault().isServiceRegistered(IRunProcessService.class)) {
IRunProcessService runProcessService = GlobalServiceRegister.getDefault()
.getService(IRunProcessService.class);
isCIMode = runProcessService.isCIMode();
}
if (isCIMode || ignoreModuleInstallationStatus() || isDeployed) {
dependency = PomUtil.createModuleDependency(module.getMavenUri());
if (module.isExcluded())
dependency.setScope("provided");

View File

@@ -315,6 +315,12 @@ public class PomIdsHelper {
return manager.getBoolean(MavenConstants.EXCLUDE_DELETED_ITEMS);
}
public static boolean getMavenPrefOptionStatus(String prefName) {
String projectTechName = ProjectManager.getInstance().getCurrentProject().getTechnicalLabel();
ProjectPreferenceManager manager = getPreferenceManager(projectTechName);
return manager.getBoolean(prefName);
}
private static String getGroupId(String projectTechName, String baseName, Property property) {
if (projectTechName == null) {
projectTechName = ProjectManager.getInstance().getCurrentProject().getTechnicalLabel();
@@ -393,6 +399,7 @@ public class PomIdsHelper {
if (PluginChecker.isTIS()) {
preferenceStore.setValue(MavenConstants.EXCLUDE_DELETED_ITEMS, true);
}
preferenceStore.setValue(MavenConstants.SKIP_LOOP_DEPENDENCY_CHECK, true);
}
preferenceManager.save();
preferenceManagers.put(projectTechName, preferenceManager);

View File

@@ -1,5 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry exported="true" kind="lib" path="lib/axis2-metadata-1.7.9.jar"/>
<classpathentry exported="true" kind="lib" path="lib/axis2-java2wsdl-1.7.9.jar"/>
<classpathentry exported="true" kind="lib" path="lib/axis2-codegen-1.7.9.jar"/>
<classpathentry exported="true" kind="lib" path="lib/woden-api-1.0M9.jar"/>
<classpathentry exported="true" kind="lib" path="lib/axiom-api-1.2.13.jar"/>
<classpathentry exported="true" kind="lib" path="lib/axiom-impl-1.2.13.jar"/>
@@ -11,7 +14,7 @@
<classpathentry exported="true" kind="lib" path="lib/httpcore-4.0.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/neethi-3.0.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/wstx-asl-3.2.9.jar"/>
<classpathentry exported="true" kind="lib" path="lib/xmlschema-core-2.0.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/xmlschema-core-2.2.1.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="output" path="class"/>

View File

@@ -22,8 +22,11 @@ Bundle-ClassPath: lib/activation-1.1.jar,
lib/mail-1.4.jar,
lib/neethi-3.0.1.jar,
lib/wstx-asl-3.2.9.jar,
lib/xmlschema-core-2.0.1.jar,
lib/xmlschema-core-2.2.1.jar,
lib/woden-api-1.0M9.jar,
lib/axis2-codegen-1.7.9.jar,
lib/axis2-java2wsdl-1.7.9.jar,
lib/axis2-metadata-1.7.9.jar,
.
Export-Package: com.ctc.wstx.api,
com.ctc.wstx.cfg,
@@ -129,6 +132,7 @@ Export-Package: com.ctc.wstx.api,
org.apache.axis2.i18n,
org.apache.axis2.java.security,
org.apache.axis2.jaxrs,
org.apache.axis2.jaxws.description,
org.apache.axis2.jsr181,
org.apache.axis2.modules,
org.apache.axis2.namespace,
@@ -170,6 +174,8 @@ Export-Package: com.ctc.wstx.api,
org.apache.ws.commons.schema.internal,
org.apache.ws.commons.schema.resolver,
org.apache.ws.commons.schema.utils,
org.apache.ws.java2wsdl,
org.apache.ws.java2wsdl.utils,
org.codehaus.stax2,
org.codehaus.stax2.evt,
org.codehaus.stax2.io,

View File

@@ -46,6 +46,31 @@
<artifactId>axis2-transport-local</artifactId>
<version>1.7.9</version>
</artifactItem>
<artifactItem>
<groupId>org.apache.axis2</groupId>
<artifactId>axis2-codegen</artifactId>
<version>1.7.9</version>
</artifactItem>
<artifactItem>
<groupId>org.apache.axis2</groupId>
<artifactId>axis2-metadata</artifactId>
<version>1.7.9</version>
</artifactItem>
<artifactItem>
<groupId>org.apache.axis2</groupId>
<artifactId>axis2-java2wsdl</artifactId>
<version>1.7.9</version>
</artifactItem>
<artifactItem>
<groupId>org.apache.axis2</groupId>
<artifactId>axis2-adb-codegen</artifactId>
<version>1.7.9</version>
</artifactItem>
<artifactItem>
<groupId>org.apache.ws.xmlschema</groupId>
<artifactId>xmlschema-core</artifactId>
<version>2.2.1</version>
</artifactItem>
</artifactItems>
</configuration>
</execution>

View File

@@ -2,28 +2,28 @@
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry exported="true" kind="lib" path="lib/jakarta.activation-1.2.2.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-wsdl-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-wsdl-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/jakarta.activation-api-1.2.2.jar"/>
<classpathentry exported="true" kind="lib" path="lib/woodstox-core-6.2.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/jakarta.annotation-api-1.3.5.jar"/>
<classpathentry exported="true" kind="lib" path="lib/stax2-api-4.2.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/neethi-3.1.1.jar"/>
<classpathentry exported="true" kind="lib" path="lib/jakarta.ws.rs-api-2.1.6.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-core-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-bindings-xml-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-frontend-jaxrs-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-rs-client-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-transports-http-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-bindings-soap-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-databinding-jaxb-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-features-clustering-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-frontend-jaxws-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-frontend-simple-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-security-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-security-saml-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-ws-addr-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-ws-policy-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-ws-security-3.3.7.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-core-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-bindings-xml-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-frontend-jaxrs-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-rs-client-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-transports-http-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-bindings-soap-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-databinding-jaxb-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-features-clustering-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-frontend-jaxws-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-frontend-simple-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-security-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-security-saml-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-ws-addr-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-ws-policy-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/cxf-rt-ws-security-3.3.10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/javax.ws.rs-api-2.0-m10.jar"/>
<classpathentry exported="true" kind="lib" path="lib/jakarta.xml.bind-api-2.3.3.jar"/>
<classpathentry exported="true" kind="lib" path="lib/xmlschema-core-2.2.5.jar"/>

View File

@@ -8,22 +8,22 @@ Bundle-ClassPath: .,
lib/neethi-3.1.1.jar,
lib/stax2-api-4.2.1.jar,
lib/javax.activation-1.2.0.jar,
lib/cxf-core-3.3.7.jar,
lib/cxf-rt-bindings-xml-3.3.7.jar,
lib/cxf-rt-frontend-jaxrs-3.3.7.jar,
lib/cxf-rt-rs-client-3.3.7.jar,
lib/cxf-rt-transports-http-3.3.7.jar,
lib/cxf-rt-wsdl-3.3.7.jar,
lib/cxf-rt-bindings-soap-3.3.7.jar,
lib/cxf-rt-databinding-jaxb-3.3.7.jar,
lib/cxf-rt-features-clustering-3.3.7.jar,
lib/cxf-rt-frontend-jaxws-3.3.7.jar,
lib/cxf-rt-frontend-simple-3.3.7.jar,
lib/cxf-rt-security-3.3.7.jar,
lib/cxf-rt-security-saml-3.3.7.jar,
lib/cxf-rt-ws-addr-3.3.7.jar,
lib/cxf-rt-ws-policy-3.3.7.jar,
lib/cxf-rt-ws-security-3.3.7.jar,
lib/cxf-core-3.3.10.jar,
lib/cxf-rt-bindings-xml-3.3.10.jar,
lib/cxf-rt-frontend-jaxrs-3.3.10.jar,
lib/cxf-rt-rs-client-3.3.10.jar,
lib/cxf-rt-transports-http-3.3.10.jar,
lib/cxf-rt-wsdl-3.3.10.jar,
lib/cxf-rt-bindings-soap-3.3.10.jar,
lib/cxf-rt-databinding-jaxb-3.3.10.jar,
lib/cxf-rt-features-clustering-3.3.10.jar,
lib/cxf-rt-frontend-jaxws-3.3.10.jar,
lib/cxf-rt-frontend-simple-3.3.10.jar,
lib/cxf-rt-security-3.3.10.jar,
lib/cxf-rt-security-saml-3.3.10.jar,
lib/cxf-rt-ws-addr-3.3.10.jar,
lib/cxf-rt-ws-policy-3.3.10.jar,
lib/cxf-rt-ws-security-3.3.10.jar,
lib/jakarta.ws.rs-api-2.1.6.jar,
lib/jakarta.annotation-api-1.3.5.jar,
lib/woodstox-core-6.2.1.jar,

View File

@@ -23,82 +23,82 @@
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-core</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-bindings-xml</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-frontend-jaxrs</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-rs-client</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-transports-http</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-wsdl</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-bindings-soap</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-databinding-jaxb</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-features-clustering</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-frontend-jaxws</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-frontend-simple</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-security</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-security-saml</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-ws-addr</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-ws-policy</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-ws-security</artifactId>
<version>3.3.7</version>
<version>3.3.10</version>
</dependency>
<dependency>
<groupId>com.sun.activation</groupId>

View File

@@ -1,19 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-analyzers-common-4.10.4.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-core-4.10.4.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-expressions-4.10.4.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-grouping-4.10.4.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-highlighter-4.10.4.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-join-4.10.4.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-memory-4.10.4.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-misc-4.10.4.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-queries-4.10.4.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-queryparser-4.10.4.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-sandbox-4.10.4.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-spatial-4.10.4.jar"/>
<classpathentry exported="true" kind="lib" path="lib/lucene-suggest-4.10.4.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>org.talend.libraries.apache.lucene4</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.ManifestBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.SchemaBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.pde.PluginNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View File

@@ -1,171 +0,0 @@
Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: Apache Lucene4 Library
Bundle-SymbolicName: org.talend.libraries.apache.lucene4
Bundle-Version: 7.4.1.qualifier
Export-Package: org.apache.lucene;version="4.10.4",
org.apache.lucene.analysis;version="4.10.4",
org.apache.lucene.analysis.ar;version="4.10.4",
org.apache.lucene.analysis.bg;version="4.10.4",
org.apache.lucene.analysis.br;version="4.10.4",
org.apache.lucene.analysis.ca;version="4.10.4",
org.apache.lucene.analysis.charfilter;version="4.10.4",
org.apache.lucene.analysis.cjk;version="4.10.4",
org.apache.lucene.analysis.ckb;version="4.10.4",
org.apache.lucene.analysis.cn;version="4.10.4",
org.apache.lucene.analysis.commongrams;version="4.10.4",
org.apache.lucene.analysis.compound;version="4.10.4",
org.apache.lucene.analysis.compound.hyphenation;version="4.10.4",
org.apache.lucene.analysis.core;version="4.10.4",
org.apache.lucene.analysis.cz;version="4.10.4",
org.apache.lucene.analysis.da;version="4.10.4",
org.apache.lucene.analysis.de;version="4.10.4",
org.apache.lucene.analysis.el;version="4.10.4",
org.apache.lucene.analysis.en;version="4.10.4",
org.apache.lucene.analysis.es;version="4.10.4",
org.apache.lucene.analysis.eu;version="4.10.4",
org.apache.lucene.analysis.fa;version="4.10.4",
org.apache.lucene.analysis.fi;version="4.10.4",
org.apache.lucene.analysis.fr;version="4.10.4",
org.apache.lucene.analysis.ga;version="4.10.4",
org.apache.lucene.analysis.gl;version="4.10.4",
org.apache.lucene.analysis.hi;version="4.10.4",
org.apache.lucene.analysis.hu;version="4.10.4",
org.apache.lucene.analysis.hunspell;version="4.10.4",
org.apache.lucene.analysis.hy;version="4.10.4",
org.apache.lucene.analysis.id;version="4.10.4",
org.apache.lucene.analysis.in;version="4.10.4",
org.apache.lucene.analysis.it;version="4.10.4",
org.apache.lucene.analysis.lv;version="4.10.4",
org.apache.lucene.analysis.miscellaneous;version="4.10.4",
org.apache.lucene.analysis.ngram;version="4.10.4",
org.apache.lucene.analysis.nl;version="4.10.4",
org.apache.lucene.analysis.no;version="4.10.4",
org.apache.lucene.analysis.path;version="4.10.4",
org.apache.lucene.analysis.pattern;version="4.10.4",
org.apache.lucene.analysis.payloads;version="4.10.4",
org.apache.lucene.analysis.position;version="4.10.4",
org.apache.lucene.analysis.pt;version="4.10.4",
org.apache.lucene.analysis.query;version="4.10.4",
org.apache.lucene.analysis.reverse;version="4.10.4",
org.apache.lucene.analysis.ro;version="4.10.4",
org.apache.lucene.analysis.ru;version="4.10.4",
org.apache.lucene.analysis.shingle;version="4.10.4",
org.apache.lucene.analysis.sinks;version="4.10.4",
org.apache.lucene.analysis.snowball;version="4.10.4",
org.apache.lucene.analysis.standard;version="4.10.4",
org.apache.lucene.analysis.standard.std31;version="4.10.4",
org.apache.lucene.analysis.standard.std34;version="4.10.4",
org.apache.lucene.analysis.standard.std36;version="4.10.4",
org.apache.lucene.analysis.standard.std40;version="4.10.4",
org.apache.lucene.analysis.sv;version="4.10.4",
org.apache.lucene.analysis.synonym;version="4.10.4",
org.apache.lucene.analysis.th;version="4.10.4",
org.apache.lucene.analysis.tokenattributes;version="4.10.4",
org.apache.lucene.analysis.tr;version="4.10.4",
org.apache.lucene.analysis.util;version="4.10.4",
org.apache.lucene.analysis.wikipedia;version="4.10.4",
org.apache.lucene.codecs;version="4.10.4",
org.apache.lucene.codecs.blocktree;version="4.10.4",
org.apache.lucene.codecs.compressing;version="4.10.4",
org.apache.lucene.codecs.idversion;version="4.10.4",
org.apache.lucene.codecs.lucene3x;version="4.10.4",
org.apache.lucene.codecs.lucene40;version="4.10.4",
org.apache.lucene.codecs.lucene41;version="4.10.4",
org.apache.lucene.codecs.lucene410;version="4.10.4",
org.apache.lucene.codecs.lucene42;version="4.10.4",
org.apache.lucene.codecs.lucene45;version="4.10.4",
org.apache.lucene.codecs.lucene46;version="4.10.4",
org.apache.lucene.codecs.lucene49;version="4.10.4",
org.apache.lucene.codecs.perfield;version="4.10.4",
org.apache.lucene.collation;version="4.10.4",
org.apache.lucene.collation.tokenattributes;version="4.10.4",
org.apache.lucene.document;version="4.10.4",
org.apache.lucene.expressions;version="4.10.4",
org.apache.lucene.expressions.js;version="4.10.4",
org.apache.lucene.index;version="4.10.4",
org.apache.lucene.index.memory;version="4.10.4",
org.apache.lucene.index.sorter;version="4.10.4",
org.apache.lucene.misc;version="4.10.4",
org.apache.lucene.queries;version="4.10.4",
org.apache.lucene.queries.function;version="4.10.4",
org.apache.lucene.queries.function.docvalues;version="4.10.4",
org.apache.lucene.queries.function.valuesource;version="4.10.4",
org.apache.lucene.queries.mlt;version="4.10.4",
org.apache.lucene.queryparser.analyzing;version="4.10.4",
org.apache.lucene.queryparser.classic;version="4.10.4",
org.apache.lucene.queryparser.complexPhrase;version="4.10.4",
org.apache.lucene.queryparser.ext;version="4.10.4",
org.apache.lucene.queryparser.flexible.core;version="4.10.4",
org.apache.lucene.queryparser.flexible.core.builders;version="4.10.4",
org.apache.lucene.queryparser.flexible.core.config;version="4.10.4",
org.apache.lucene.queryparser.flexible.core.messages;version="4.10.4",
org.apache.lucene.queryparser.flexible.core.nodes;version="4.10.4",
org.apache.lucene.queryparser.flexible.core.parser;version="4.10.4",
org.apache.lucene.queryparser.flexible.core.processors;version="4.10.4",
org.apache.lucene.queryparser.flexible.core.util;version="4.10.4",
org.apache.lucene.queryparser.flexible.messages;version="4.10.4",
org.apache.lucene.queryparser.flexible.precedence;version="4.10.4",
org.apache.lucene.queryparser.flexible.precedence.processors;version="4.10.4",
org.apache.lucene.queryparser.flexible.standard;version="4.10.4",
org.apache.lucene.queryparser.flexible.standard.builders;version="4.10.4",
org.apache.lucene.queryparser.flexible.standard.config;version="4.10.4",
org.apache.lucene.queryparser.flexible.standard.nodes;version="4.10.4",
org.apache.lucene.queryparser.flexible.standard.parser;version="4.10.4",
org.apache.lucene.queryparser.flexible.standard.processors;version="4.10.4",
org.apache.lucene.queryparser.simple;version="4.10.4",
org.apache.lucene.queryparser.surround.parser;version="4.10.4",
org.apache.lucene.queryparser.surround.query;version="4.10.4",
org.apache.lucene.queryparser.xml;version="4.10.4",
org.apache.lucene.queryparser.xml.builders;version="4.10.4",
org.apache.lucene.sandbox.queries;version="4.10.4",
org.apache.lucene.sandbox.queries.regex;version="4.10.4",
org.apache.lucene.search;version="4.10.4",
org.apache.lucene.search.grouping;version="4.10.4",
org.apache.lucene.search.grouping.function;version="4.10.4",
org.apache.lucene.search.grouping.term;version="4.10.4",
org.apache.lucene.search.highlight;version="4.10.4",
org.apache.lucene.search.join;version="4.10.4",
org.apache.lucene.search.payloads;version="4.10.4",
org.apache.lucene.search.postingshighlight;version="4.10.4",
org.apache.lucene.search.similarities;version="4.10.4",
org.apache.lucene.search.spans;version="4.10.4",
org.apache.lucene.search.spell;version="4.10.4",
org.apache.lucene.search.suggest;version="4.10.4",
org.apache.lucene.search.suggest.analyzing;version="4.10.4",
org.apache.lucene.search.suggest.fst;version="4.10.4",
org.apache.lucene.search.suggest.jaspell;version="4.10.4",
org.apache.lucene.search.suggest.tst;version="4.10.4",
org.apache.lucene.search.vectorhighlight;version="4.10.4",
org.apache.lucene.spatial;version="4.10.4",
org.apache.lucene.spatial.bbox;version="4.10.4",
org.apache.lucene.spatial.prefix;version="4.10.4",
org.apache.lucene.spatial.prefix.tree;version="4.10.4",
org.apache.lucene.spatial.query;version="4.10.4",
org.apache.lucene.spatial.serialized;version="4.10.4",
org.apache.lucene.spatial.util;version="4.10.4",
org.apache.lucene.spatial.vector;version="4.10.4",
org.apache.lucene.store;version="4.10.4",
org.apache.lucene.util;version="4.10.4",
org.apache.lucene.util.automaton;version="4.10.4",
org.apache.lucene.util.fst;version="4.10.4",
org.apache.lucene.util.mutable;version="4.10.4",
org.apache.lucene.util.packed;version="4.10.4",
org.tartarus.snowball;version="4.10.4",
org.tartarus.snowball.ext;version="4.10.4"
Bundle-ClassPath: .,
lib/lucene-analyzers-common-4.10.4.jar,
lib/lucene-core-4.10.4.jar,
lib/lucene-expressions-4.10.4.jar,
lib/lucene-grouping-4.10.4.jar,
lib/lucene-highlighter-4.10.4.jar,
lib/lucene-join-4.10.4.jar,
lib/lucene-memory-4.10.4.jar,
lib/lucene-misc-4.10.4.jar,
lib/lucene-queries-4.10.4.jar,
lib/lucene-queryparser-4.10.4.jar,
lib/lucene-sandbox-4.10.4.jar,
lib/lucene-spatial-4.10.4.jar,
lib/lucene-suggest-4.10.4.jar
Bundle-Vendor: .Talend SA.
Bundle-ActivationPolicy: lazy

View File

@@ -1 +0,0 @@
jarprocessor.exclude.children=true

View File

@@ -1,15 +0,0 @@
bin.includes = META-INF/,\
.,\
lib/lucene-analyzers-common-4.10.4.jar,\
lib/lucene-core-4.10.4.jar,\
lib/lucene-expressions-4.10.4.jar,\
lib/lucene-grouping-4.10.4.jar,\
lib/lucene-highlighter-4.10.4.jar,\
lib/lucene-join-4.10.4.jar,\
lib/lucene-memory-4.10.4.jar,\
lib/lucene-misc-4.10.4.jar,\
lib/lucene-queries-4.10.4.jar,\
lib/lucene-queryparser-4.10.4.jar,\
lib/lucene-sandbox-4.10.4.jar,\
lib/lucene-spatial-4.10.4.jar,\
lib/lucene-suggest-4.10.4.jar

View File

@@ -1,240 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Some code in src/java/org/apache/lucene/util/UnicodeUtil.java was
derived from unicode conversion examples available at
http://www.unicode.org/Public/PROGRAMS/CVTUTF. Here is the copyright
from those sources:
/*
* Copyright 2001-2004 Unicode, Inc.
*
* Disclaimer
*
* This source code is provided as is by Unicode, Inc. No claims are
* made as to fitness for any particular purpose. No warranties of any
* kind are expressed or implied. The recipient agrees to determine
* applicability of information provided. If this file has been
* purchased on magnetic or optical media from Unicode, Inc., the
* sole remedy for any claim will be exchange of defective media
* within 90 days of receipt.
*
* Limitations on Rights to Redistribute This Code
*
* Unicode, Inc. hereby grants the right to freely use the information
* supplied in this file in the creation of products supporting the
* Unicode Standard, and to make copies of this file in any form
* for internal or external distribution as long as this notice
* remains attached.
*/
Some code in src/java/org/apache/lucene/util/ArrayUtil.java was
derived from Python 2.4.2 sources available at
http://www.python.org. Full license is here:
http://www.python.org/download/releases/2.4.2/license/

View File

@@ -1,12 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.talend.studio</groupId>
<artifactId>tcommon-studio-se</artifactId>
<version>7.4.1-SNAPSHOT</version>
<relativePath>../../../</relativePath>
</parent>
<artifactId>org.talend.libraries.apache.lucene4</artifactId>
<packaging>eclipse-plugin</packaging>
</project>

View File

@@ -541,9 +541,9 @@ public class ModulesNeededProvider {
Property property = findRoutinesPropery(infor.getId(), infor.getName(), routines, type);
if (property != null) {
if (((RoutineItem) property.getItem()).isBuiltIn()) {
systemRoutines.add(infor.getId());
systemRoutines.add(property.getId());
} else {
userRoutines.add(infor.getId());
userRoutines.add(property.getId());
}
}
}

View File

@@ -25,6 +25,7 @@ import org.eclipse.core.runtime.Path;
import org.eclipse.emf.common.util.EMap;
import org.talend.commons.ui.utils.PathUtils;
import org.talend.core.database.EDatabaseTypeName;
import org.talend.core.database.EImpalaDriver;
import org.talend.core.database.conn.ConnParameterKeys;
import org.talend.core.database.conn.DatabaseConnStrUtil;
import org.talend.core.database.conn.template.DbConnStrForHive;
@@ -1418,8 +1419,12 @@ public final class DBConnectionContextUtils {
// Added 20130311 TDQ-7000, when it is context mode and not general jdbc, reset the url.
String newURL = null;
if (EDatabaseTypeName.IMPALA.equals(EDatabaseTypeName.getTypeFromDbType(dbConn.getDatabaseType()))) {
String template = DbConnStrForHive.URL_HIVE_2_TEMPLATE;
if (EImpalaDriver.IMPALA.getName().equals(cloneConn.getParameters().get(ConnParameterKeys.IMPALA_DRIVER))) {
template = DbConnStrForHive.URL_IMPALA_TEMPLATE;
}
newURL = DatabaseConnStrUtil.getImpalaString(cloneConn, cloneConn.getServerName(), cloneConn.getPort(),
cloneConn.getSID(), DbConnStrForHive.URL_HIVE_2_TEMPLATE);
cloneConn.getSID(), template);
} else {
newURL = DatabaseConnStrUtil.getURLString(cloneConn.getDatabaseType(), dbConn.getDbVersionString(), server, username,
password, port, sidOrDatabase, filePath.toLowerCase(), datasource, dbRootPath, additionParam);

View File

@@ -139,6 +139,7 @@ public final class OtherConnectionContextUtils {
DbSchema,
DbUsername,
DbPassword,
DbParameters,
}
/*
@@ -611,6 +612,10 @@ public final class OtherConnectionContextUtils {
conn.getValue(TaggedValueHelper.getValueString(ISAPConstant.PROP_DB_PASSWORD, conn), false),
JavaTypesManager.PASSWORD);
break;
case DbParameters:
ConnectionContextHelper.createParameters(varList, paramName,
TaggedValueHelper.getValueString(ISAPConstant.PROP_DB_ADDITIONAL_PROPERTIES, conn));
break;
default:
}
}
@@ -722,6 +727,10 @@ public final class OtherConnectionContextUtils {
TaggedValueHelper.setTaggedValue(sapConn, ISAPConstant.PROP_DB_PASSWORD,
ContextParameterUtils.getNewScriptCode(sapBasicVarName, LANGUAGE));
break;
case DbParameters:
TaggedValueHelper.setTaggedValue(sapConn, ISAPConstant.PROP_DB_ADDITIONAL_PROPERTIES,
ContextParameterUtils.getNewScriptCode(sapBasicVarName, LANGUAGE));
break;
default:
}
}
@@ -768,11 +777,14 @@ public final class OtherConnectionContextUtils {
TaggedValueHelper.getValueString(ISAPConstant.PROP_DB_USERNAME, conn)));
String dbPassword = TalendQuoteUtils.removeQuotes(ConnectionContextHelper.getOriginalValue(contextType,
conn.getValue(TaggedValueHelper.getValueString(ISAPConstant.PROP_DB_PASSWORD, conn), false)));
String dbParameters = TalendQuoteUtils.removeQuotes(ConnectionContextHelper.getOriginalValue(contextType,
TaggedValueHelper.getValueString(ISAPConstant.PROP_DB_ADDITIONAL_PROPERTIES, conn)));
TaggedValueHelper.setTaggedValue(conn, ISAPConstant.PROP_DB_HOST, dbHost);
TaggedValueHelper.setTaggedValue(conn, ISAPConstant.PROP_DB_PORT, dbPort);
TaggedValueHelper.setTaggedValue(conn, ISAPConstant.PROP_DB_SCHEMA, dbSchema);
TaggedValueHelper.setTaggedValue(conn, ISAPConstant.PROP_DB_USERNAME, dbUsername);
TaggedValueHelper.setTaggedValue(conn, ISAPConstant.PROP_DB_PASSWORD, conn.getValue(dbPassword, true));
TaggedValueHelper.setTaggedValue(conn, ISAPConstant.PROP_DB_ADDITIONAL_PROPERTIES, dbParameters);
}
public static SAPConnection cloneOriginalValueSAPConnection(SAPConnection fileConn, ContextType contextType) {
@@ -813,11 +825,14 @@ public final class OtherConnectionContextUtils {
TaggedValueHelper.getValueString(ISAPConstant.PROP_DB_USERNAME, fileConn));
String dbPassword = ConnectionContextHelper.getOriginalValue(contextType,
fileConn.getValue(TaggedValueHelper.getValueString(ISAPConstant.PROP_DB_PASSWORD, fileConn), false));
String dbParameters = ConnectionContextHelper.getOriginalValue(contextType,
TaggedValueHelper.getValueString(ISAPConstant.PROP_DB_ADDITIONAL_PROPERTIES, fileConn));
TaggedValueHelper.setTaggedValue(cloneConn, ISAPConstant.PROP_DB_HOST, dbHost);
TaggedValueHelper.setTaggedValue(cloneConn, ISAPConstant.PROP_DB_PORT, dbPort);
TaggedValueHelper.setTaggedValue(cloneConn, ISAPConstant.PROP_DB_SCHEMA, dbSchema);
TaggedValueHelper.setTaggedValue(cloneConn, ISAPConstant.PROP_DB_USERNAME, dbUsername);
TaggedValueHelper.setTaggedValue(cloneConn, ISAPConstant.PROP_DB_PASSWORD, dbPassword);
TaggedValueHelper.setTaggedValue(cloneConn, ISAPConstant.PROP_DB_ADDITIONAL_PROPERTIES, dbParameters);
ConnectionContextHelper.cloneConnectionProperties(fileConn, cloneConn);

View File

@@ -7,7 +7,6 @@ Require-Bundle: org.eclipse.ui,
org.apache.commons.logging,
org.apache.commons.collections,
org.apache.commons.lang,
org.apache.axis,
org.talend.libraries.mdm;resolution:=optional,
org.talend.common.ui.runtime,
org.talend.core.runtime,

View File

@@ -14,6 +14,7 @@ package org.talend.core.model.metadata.builder.database;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
@@ -387,12 +388,12 @@ public class ExtractMetaDataFromDataBase {
private static boolean checkSybaseDB(Connection connection, String database) {
ExtractMetaDataUtils extractMeta = ExtractMetaDataUtils.getInstance();
if (extractMeta != null) {
Statement stmt = null;
PreparedStatement stmt = null;
ResultSet resultSet = null;
try {
stmt = connection.createStatement();
stmt = connection.prepareStatement("sp_helpdb " + database);
extractMeta.setQueryStatementTimeout(stmt);
resultSet = stmt.executeQuery("sp_helpdb " + database);
resultSet = stmt.executeQuery();
return true;
} catch (SQLException e) {
ExceptionHandler.process(e);
@@ -705,25 +706,23 @@ public class ExtractMetaDataFromDataBase {
tableComment = tablesSet.getString(GetTable.REMARKS.name());
}
if (StringUtils.isBlank(tableComment)) {
String selectRemarkOnTable = getSelectRemarkOnTable(tableName);
if (selectRemarkOnTable != null && connection != null) {
tableComment = executeGetCommentStatement(selectRemarkOnTable, connection);
if (connection != null) {
tableComment = executeGetCommentStatement(connection, tableName);
}
}
return tableComment;
}
private static String getSelectRemarkOnTable(String tableName) {
return "SELECT TABLE_COMMENT FROM information_schema.TABLES WHERE TABLE_NAME='" + tableName + "'"; //$NON-NLS-1$ //$NON-NLS-2$
}
private static String executeGetCommentStatement(Connection connection, String tableName) {
String sql = "SELECT TABLE_COMMENT FROM information_schema.TABLES WHERE TABLE_NAME=?";
private static String executeGetCommentStatement(String queryStmt, Connection connection) {
String comment = null;
Statement statement = null;
PreparedStatement statement = null;
ResultSet resultSet = null;
try {
statement = connection.createStatement();
statement.execute(queryStmt);
statement = connection.prepareStatement(sql);
statement.setString(1, tableName);
statement.execute();
// get the results
resultSet = statement.getResultSet();

View File

@@ -15,6 +15,7 @@ package org.talend.core.model.metadata.builder.database.manager;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.Driver;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
@@ -1067,9 +1068,9 @@ public class ExtractManager {
try {
if (!tableInfoParameters.isUsedName()) {
if (tableInfoParameters.getSqlFiter() != null && !"".equals(tableInfoParameters.getSqlFiter())) { //$NON-NLS-1$
Statement stmt = extractMeta.getConn().createStatement();
PreparedStatement stmt = extractMeta.getConn().prepareStatement(tableInfoParameters.getSqlFiter());
extractMeta.setQueryStatementTimeout(stmt);
ResultSet rsTables = stmt.executeQuery(tableInfoParameters.getSqlFiter());
ResultSet rsTables = stmt.executeQuery();
itemTablesName = ExtractMetaDataFromDataBase.getTableNamesFromQuery(rsTables, extractMeta.getConn());
rsTables.close();
stmt.close();

View File

@@ -14,9 +14,9 @@ package org.talend.core.model.metadata.builder.database.manager.dbs;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
@@ -68,15 +68,17 @@ public class IBMDB2ExtractManager extends ExtractManager {
@Override
public String getTableNameBySynonyms(Connection conn, String tableName) {
Statement sta = null;
PreparedStatement sta = null;
ResultSet resultSet = null;
try {
if (conn != null && conn.getMetaData().getDatabaseProductName().startsWith(DATABASE_PRODUCT_NAME)) {
String sql = "SELECT NAME,BASE_NAME FROM SYSIBM.SYSTABLES where TYPE='A' and name ='" + tableName + "'";
sta = conn.createStatement();
String sql = "SELECT NAME,BASE_NAME FROM SYSIBM.SYSTABLES where TYPE='A' and name =?";
sta = conn.prepareStatement(sql);
sta.setString(1, tableName);
ExtractMetaDataUtils.getInstance().setQueryStatementTimeout(sta);
resultSet = sta.executeQuery(sql);
resultSet = sta.executeQuery();
while (resultSet.next()) {
String baseName = resultSet.getString("base_name").trim();
return baseName;
@@ -113,15 +115,16 @@ public class IBMDB2ExtractManager extends ExtractManager {
ExtractMetaDataUtils extractMeta = ExtractMetaDataUtils.getInstance();
// need to retrieve columns of synonym by useing sql rather than get them from jdbc metadata
String synSQL = "SELECT a.*\n" + "FROM SYSCAT.COLUMNS a\n" + "LEFT OUTER JOIN SYSIBM.SYSTABLES b\n"
+ "ON a.TABNAME = b.NAME\n" + "AND a.TABSCHEMA = b.CREATOR\n" + "where a.TABNAME =" + "\'" + tableName
+ "\'\n";
+ "ON a.TABNAME = b.NAME\n" + "AND a.TABSCHEMA = b.CREATOR\n" + "where a.TABNAME =?\n";
if (!("").equals(metadataConnection.getSchema())) {
synSQL += "AND b.CREATOR =\'" + metadataConnection.getSchema() + "\'";
}
synSQL += "ORDER BY a.COLNO";
Statement sta = extractMeta.getConn().createStatement();
PreparedStatement sta = extractMeta.getConn().prepareStatement(synSQL);
sta.setString(1, tableName);
extractMeta.setQueryStatementTimeout(sta);
ResultSet columns = sta.executeQuery(synSQL);
ResultSet columns = sta.executeQuery();
String typeName = null;
int index = 0;
List<String> columnLabels = new ArrayList<String>();

View File

@@ -14,9 +14,9 @@ package org.talend.core.model.metadata.builder.database.manager.dbs;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
@@ -93,8 +93,8 @@ public class ImpalaExtractManager extends ExtractManager {
DatabaseMetaData metaData = conn.getMetaData();
if (!tableInfoParameters.isUsedName()) {
if (tableInfoParameters.getSqlFiter() != null && !"".equals(tableInfoParameters.getSqlFiter())) { //$NON-NLS-1$
Statement stmt = conn.createStatement();
ResultSet rsTables = stmt.executeQuery(tableInfoParameters.getSqlFiter());
PreparedStatement stmt = conn.prepareStatement(tableInfoParameters.getSqlFiter());
ResultSet rsTables = stmt.executeQuery();
itemTablesName = ExtractMetaDataFromDataBase.getTableNamesFromQuery(rsTables, conn);
rsTables.close();
stmt.close();

View File

@@ -17,7 +17,6 @@ import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
@@ -53,16 +52,16 @@ public class MSSQLExtractManager extends ExtractManager {
@Override
public String getTableNameBySynonyms(Connection conn, String tableName) {
Statement sta = null;
PreparedStatement sta = null;
ResultSet resultSet = null;
try {
if (conn != null && conn.getMetaData().getDatabaseProductName().equals(EDatabaseTypeName.MSSQL.getDisplayName())) {
String sql = "SELECT object_id ,parent_object_id as parentid, name AS object_name , base_object_name as base_name from sys.synonyms where name ='"
+ tableName + "'";
sta = conn.createStatement();
String sql = "SELECT object_id ,parent_object_id as parentid, name AS object_name , base_object_name as base_name from sys.synonyms where name =?";
sta = conn.prepareStatement(sql);
sta.setString(1, tableName);
ExtractMetaDataUtils.getInstance().setQueryStatementTimeout(sta);
resultSet = sta.executeQuery(sql);
resultSet = sta.executeQuery();
while (resultSet.next()) {
String baseName = resultSet.getString("base_name").trim();
if (baseName.contains(".") && baseName.length() > 2) {
@@ -124,16 +123,25 @@ public class MSSQLExtractManager extends ExtractManager {
}
}
// need to retrieve columns of synonym by useing sql rather than get them from jdbc metadata
String synSQL = "select * from INFORMATION_SCHEMA.COLUMNS where TABLE_NAME =\'" + TABLE_NAME + "\'";
String synSQL = "select * from INFORMATION_SCHEMA.COLUMNS where TABLE_NAME =?";
if (null != TABLE_SCHEMA) {
synSQL += "\nand TABLE_SCHEMA =\'" + TABLE_SCHEMA + "\'";
synSQL += "\nand TABLE_SCHEMA =?";
}
if (!("").equals(metadataConnection.getDatabase())) {
synSQL += "\nand TABLE_CATALOG =\'" + metadataConnection.getDatabase() + "\'";
synSQL += "\nand TABLE_CATALOG =?";
}
PreparedStatement sta = extractMeta.getConn().prepareStatement(synSQL);
sta.setString(1, TABLE_NAME);
int idx = 2;
if (null != TABLE_SCHEMA) {
sta.setString(idx, TABLE_SCHEMA);
idx++;
}
if (!("").equals(metadataConnection.getDatabase())) {
sta.setString(idx, metadataConnection.getDatabase());
}
Statement sta = extractMeta.getConn().createStatement();
extractMeta.setQueryStatementTimeout(sta);
ResultSet columns = sta.executeQuery(synSQL);
ResultSet columns = sta.executeQuery();
String typeName = null;
int index = 0;
List<String> columnLabels = new ArrayList<String>();

View File

@@ -17,7 +17,6 @@ import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
@@ -72,20 +71,22 @@ public class OracleExtractManager extends ExtractManager {
protected List<String> getTablesToFilter(IMetadataConnection metadataConnection) {
List<String> tablesToFilter = new ArrayList<String>();
Statement stmt;
PreparedStatement stmt = null;
ExtractMetaDataUtils extractMeta = ExtractMetaDataUtils.getInstance();
try {
stmt = extractMeta.getConn().createStatement();
extractMeta.setQueryStatementTimeout(stmt);
if (EDatabaseTypeName.ORACLEFORSID.getDisplayName().equals(metadataConnection.getDbType())
|| EDatabaseTypeName.ORACLESN.getDisplayName().equals(metadataConnection.getDbType())
|| EDatabaseTypeName.ORACLE_CUSTOM.getDisplayName().equals(metadataConnection.getDbType())
|| EDatabaseTypeName.ORACLE_OCI.getDisplayName().equals(metadataConnection.getDbType())) {
ResultSet rsTables = stmt.executeQuery(ORACLE_10G_RECBIN_SQL);
stmt = extractMeta.getConn().prepareStatement(ORACLE_10G_RECBIN_SQL);
extractMeta.setQueryStatementTimeout(stmt);
ResultSet rsTables = stmt.executeQuery();
tablesToFilter = ExtractMetaDataFromDataBase.getTableNamesFromQuery(rsTables, extractMeta.getConn());
rsTables.close();
}
stmt.close();
if (stmt != null) {
stmt.close();
}
} catch (SQLException e) {
ExceptionHandler.process(e);
}
@@ -104,11 +105,12 @@ public class OracleExtractManager extends ExtractManager {
List<String> tablesToFilter = getTablesToFilter(metadataConnection);
try {
Statement stmt = extractMeta.getConn().createStatement();
PreparedStatement stmt = extractMeta.getConn().prepareStatement(GET_ALL_SYNONYMS);
extractMeta.setQueryStatementTimeout(stmt);
ResultSet rsTables = stmt.executeQuery(GET_ALL_SYNONYMS);
ResultSet rsTables = stmt.executeQuery();
getMetadataTables(medataTables, rsTables, dbMetaData.supportsSchemasInTableDefinitions(), tablesToFilter, limit);
rsTables.close();
stmt.close();
} catch (SQLException e) {
ExceptionHandler.process(e);
log.error(e.toString());
@@ -127,18 +129,20 @@ public class OracleExtractManager extends ExtractManager {
@Override
public String getTableNameBySynonyms(Connection conn, String tableName) {
// bug TDI-19382
Statement sta = null;
PreparedStatement sta = null;
ResultSet resultSet = null;
try {
if (conn != null && conn.getMetaData().getDatabaseProductName().equals(DATABASE_PRODUCT_NAME)) {
String sql = "select TABLE_NAME from ALL_SYNONYMS where SYNONYM_NAME = '" + tableName + "'"; //$NON-NLS-1$ //$NON-NLS-2$
String sql = "select TABLE_NAME from ALL_SYNONYMS where SYNONYM_NAME = ?"; //$NON-NLS-1$ //$NON-NLS-2$
// String sql = "select * from all_tab_columns where upper(table_name)='" + name +
// "' order by column_id";
// Statement sta;
sta = conn.createStatement();
sta = conn.prepareStatement(sql);
sta.setString(1, tableName);
ExtractMetaDataUtils.getInstance().setQueryStatementTimeout(sta);
resultSet = sta.executeQuery(sql);
resultSet = sta.executeQuery();
while (resultSet.next()) {
return resultSet.getString("TABLE_NAME"); //$NON-NLS-1$
}
@@ -173,19 +177,25 @@ public class OracleExtractManager extends ExtractManager {
// need to retrieve columns of synonym by useing sql rather than get them from jdbc metadata
String synSQL = "SELECT all_tab_columns.*\n" + "FROM all_tab_columns\n" + "LEFT OUTER JOIN all_synonyms\n"
+ "ON all_tab_columns.TABLE_NAME = all_synonyms.TABLE_NAME\n"
+ "AND ALL_SYNONYMS.TABLE_OWNER = all_tab_columns.OWNER\n" + "WHERE all_synonyms.SYNONYM_NAME =" + "\'"
+ synonymName + "\'\n";
+ "AND ALL_SYNONYMS.TABLE_OWNER = all_tab_columns.OWNER\n" + "WHERE all_synonyms.SYNONYM_NAME =?\n";
// bug TDI-19382
if (!("").equals(metadataConnection.getSchema())) {
synSQL += "and all_synonyms.OWNER =\'" + metadataConnection.getSchema() + "\'";
synSQL += "and all_synonyms.OWNER =?";
} else if (table.eContainer() instanceof Schema) {
Schema schema = (Schema) table.eContainer();
synSQL += "and all_synonyms.OWNER =\'" + schema.getName() + "\'";
synSQL += "and all_synonyms.OWNER =?";
}
synSQL += " ORDER BY all_tab_columns.COLUMN_NAME"; //$NON-NLS-1$
Statement sta = extractMeta.getConn().createStatement();
PreparedStatement sta = extractMeta.getConn().prepareStatement(synSQL);
sta.setString(1, synonymName);
int idx = 2;
if (!("").equals(metadataConnection.getSchema())) {
sta.setString(idx, metadataConnection.getSchema());
} else if (table.eContainer() instanceof Schema) {
Schema schema = (Schema) table.eContainer();
sta.setString(idx, schema.getName());
}
extractMeta.setQueryStatementTimeout(sta);
ResultSet columns = sta.executeQuery(synSQL);
ResultSet columns = sta.executeQuery();
String typeName = null;
int index = 0;
List<String> columnLabels = new ArrayList<String>();
@@ -247,10 +257,11 @@ public class OracleExtractManager extends ExtractManager {
throws SQLException {
ExtractMetaDataUtils extractMeta = ExtractMetaDataUtils.getInstance();
if (extractMeta.isUseAllSynonyms()) {
String sql = "select * from all_tab_columns where table_name='" + tableName + "' ORDER BY all_tab_columns.COLUMN_NAME"; //$NON-NLS-1$ //$NON-NLS-2$
Statement stmt = extractMeta.getConn().createStatement();
String sql = "select * from all_tab_columns where table_name=? ORDER BY all_tab_columns.COLUMN_NAME"; //$NON-NLS-1$ //$NON-NLS-2$
PreparedStatement stmt = extractMeta.getConn().prepareStatement(sql);
stmt.setString(1, tableName);
extractMeta.setQueryStatementTimeout(stmt);
return stmt.executeQuery(sql);
return stmt.executeQuery();
} else {
return super.getColumnsResultSet(dbMetaData, catalogName, schemaName, tableName);
}
@@ -260,10 +271,10 @@ public class OracleExtractManager extends ExtractManager {
public void synchroViewStructure(String catalogName, String schemaName, String tableName) throws SQLException {
ExtractMetaDataUtils extractMeta = ExtractMetaDataUtils.getInstance();
ResultSet results = null;
Statement stmt = null;
PreparedStatement stmt = null;
String sql = null;
if (extractMeta.isUseAllSynonyms()) {
sql = "select * from all_tab_columns where table_name='" + tableName + "'"; //$NON-NLS-1$
sql = "select * from all_tab_columns where table_name=?"; //$NON-NLS-1$
} else {
StringBuffer sqlBuffer = new StringBuffer();
sqlBuffer.append("SELECT * FROM ");
@@ -274,9 +285,12 @@ public class OracleExtractManager extends ExtractManager {
sql = sqlBuffer.toString();
}
try {
stmt = extractMeta.getConn().createStatement();
stmt = extractMeta.getConn().prepareStatement(sql);
if (extractMeta.isUseAllSynonyms()) {
stmt.setString(1, tableName);
}
extractMeta.setQueryStatementTimeout(stmt);
results = stmt.executeQuery(sql);
results = stmt.executeQuery();
} finally {
if (results != null) {
results.close();
@@ -302,8 +316,8 @@ public class OracleExtractManager extends ExtractManager {
PreparedStatement statement = null;
ExtractMetaDataUtils extractMeta = ExtractMetaDataUtils.getInstance();
try {
statement = extractMeta.getConn().prepareStatement("SELECT COMMENTS FROM USER_COL_COMMENTS WHERE TABLE_NAME='" //$NON-NLS-1$
+ tableName + "'"); //$NON-NLS-1$
statement = extractMeta.getConn().prepareStatement("SELECT COMMENTS FROM USER_COL_COMMENTS WHERE TABLE_NAME=?"); //$NON-NLS-1$
statement.setString(1, tableName);
extractMeta.setQueryStatementTimeout(statement);
if (statement.execute()) {
keys = statement.getResultSet();
@@ -337,9 +351,9 @@ public class OracleExtractManager extends ExtractManager {
&& !metadataConnection.getDbVersionString().equals(EDatabaseVersion4Drivers.ORACLE_8.getVersionValue())) {
ExtractMetaDataUtils extractMeta = ExtractMetaDataUtils.getInstance();
try {
Statement stmt = extractMeta.getConn().createStatement();
PreparedStatement stmt = extractMeta.getConn().prepareStatement(TableInfoParameters.ORACLE_10G_RECBIN_SQL);
extractMeta.setQueryStatementTimeout(stmt);
ResultSet rsTables = stmt.executeQuery(TableInfoParameters.ORACLE_10G_RECBIN_SQL);
ResultSet rsTables = stmt.executeQuery();
itemTablesName.removeAll(ExtractMetaDataFromDataBase.getTableNamesFromQuery(rsTables, extractMeta.getConn()));
rsTables.close();
stmt.close();
@@ -359,27 +373,27 @@ public class OracleExtractManager extends ExtractManager {
if (con != null && con.toString().contains("oracle.jdbc.driver") //$NON-NLS-1$
&& extractMeta.isUseAllSynonyms()) {
Set<String> nameFiters = tableInfoParameters.getNameFilters();
Statement stmt = con.createStatement();
extractMeta.setQueryStatementTimeout(stmt);
StringBuffer filters = new StringBuffer();
if (!nameFiters.isEmpty()) {
filters.append(" and ("); //$NON-NLS-1$
final String tStr = " all_synonyms.synonym_name like '"; //$NON-NLS-1$
int i = 0;
for (String s : nameFiters) {
final String tStr = " all_synonyms.synonym_name like ?"; //$NON-NLS-1$
for (int i = 0; i < nameFiters.size(); i++) {
if (i != 0) {
filters.append(" or "); //$NON-NLS-1$
}
filters.append(tStr);
filters.append(s);
filters.append('\'');
i++;
}
filters.append(')');
}
ResultSet rsTables = stmt.executeQuery(GET_ALL_SYNONYMS + filters.toString());
PreparedStatement stmt = con.prepareStatement(GET_ALL_SYNONYMS + filters.toString());
int i = 1;
for (String s : nameFiters) {
stmt.setString(i, s);
i++;
}
extractMeta.setQueryStatementTimeout(stmt);
ResultSet rsTables = stmt.executeQuery();
itemTablesName = ExtractMetaDataFromDataBase.getTableNamesFromQuery(rsTables, extractMeta.getConn());
rsTables.close();
stmt.close();

View File

@@ -15,8 +15,8 @@ package org.talend.metadata.managment.connection.manager;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.Driver;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.Map;
import java.util.Properties;
@@ -25,8 +25,6 @@ import java.util.concurrent.FutureTask;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import metadata.managment.i18n.Messages;
import org.apache.commons.lang.StringUtils;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
@@ -56,6 +54,8 @@ import org.talend.metadata.managment.hive.handler.HDP200YarnHandler;
import org.talend.metadata.managment.hive.handler.HiveConnectionHandler;
import org.talend.metadata.managment.hive.handler.Mapr212Handler;
import metadata.managment.i18n.Messages;
/**
* Created by Marvin Wang on Mar 13, 2013.
*/
@@ -492,26 +492,18 @@ public class HiveConnectionManager extends DataBaseConnectionManager {
}
String jdbcPropertiesStr = String.valueOf(jdbcPropertiesObj);
List<Map<String, Object>> jdbcProperties = HadoopRepositoryUtil.getHadoopPropertiesList(jdbcPropertiesStr);
Statement statement = null;
try {
statement = dbConn.createStatement();
for (Map<String, Object> propMap : jdbcProperties) {
String key = TalendQuoteUtils.removeQuotesIfExist(String.valueOf(propMap.get("PROPERTY"))); //$NON-NLS-1$
String value = TalendQuoteUtils.removeQuotesIfExist(String.valueOf(propMap.get("VALUE"))); //$NON-NLS-1$
if (StringUtils.isNotEmpty(key) && value != null) {
statement.execute("SET " + key + "=" + value); //$NON-NLS-1$ //$NON-NLS-2$
PreparedStatement ps = dbConn.prepareStatement("SET " + key + "=" + value);
ps.execute(); // $NON-NLS-1$ //$NON-NLS-2$
ps.close();
}
}
} catch (SQLException e) {
ExceptionHandler.process(e);
} finally {
if (statement != null) {
try {
statement.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
}

View File

@@ -131,11 +131,8 @@ public class ImpalaConnectionManager extends DataBaseConnectionManager {
if (EImpalaDriver.HIVE2.getDisplayName().equalsIgnoreCase(driverType)) {
driverClass = EImpalaDriver.HIVE2.getDriver();
}
if (EImpalaDriver.IMPALA40.getDisplayName().equalsIgnoreCase(driverType)) {
driverClass = EImpalaDriver.IMPALA40.getDriver();
}
if (EImpalaDriver.IMPALA41.getDisplayName().equalsIgnoreCase(driverType)) {
driverClass = EImpalaDriver.IMPALA41.getDriver();
if (EImpalaDriver.IMPALA.getDisplayName().equalsIgnoreCase(driverType)) {
driverClass = EImpalaDriver.IMPALA.getDriver();
}
} else {
throw new IllegalArgumentException("impala can not work with Hive1");
@@ -165,14 +162,18 @@ public class ImpalaConnectionManager extends DataBaseConnectionManager {
newThread.start();
Connection conn = null;
String connectionInfo = new StringBuilder().append("JDBC Uri: ").append(metadataConn.getUrl()).append(" ").toString();
try {
conn = futureTask.get(getDBConnectionTimeout(), TimeUnit.SECONDS);
if (conn == null) {
throw new SQLException(connectionInfo);
}
} catch (TimeoutException e) {
threadGroup.interrupt();
addBackgroundJob(futureTask, newThread);
throw new SQLException(Messages.getString("ImpalaConnectionManager.getConnection.timeout"), e); //$NON-NLS-1$
throw new SQLException(connectionInfo + Messages.getString("ImpalaConnectionManager.getConnection.timeout"), e); //$NON-NLS-1$
} catch (Throwable e1) {
throw new SQLException(e1);
throw new SQLException(connectionInfo, e1);
}
return conn;
}

View File

@@ -16,9 +16,9 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.sql.DatabaseMetaData;
import java.sql.Driver;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -148,7 +148,7 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
}
}
java.sql.Connection sqlConnection = null;
Statement stmt = null;
PreparedStatement stmt = null;
ResultSet rs = null;
try {
// MetadataConnectionUtils.setMetadataCon(metadataBean);
@@ -176,8 +176,8 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
String databaseType = dbconn.getDatabaseType();
// TDQ-16331 Azure Mysql must use 'select version()' to get correct version
if (EDatabaseTypeName.MYSQL.getDisplayName().equals(databaseType)) {
stmt = sqlConnection.createStatement();
rs = stmt.executeQuery("select version()"); //$NON-NLS-1$
stmt = sqlConnection.prepareStatement("select version()");
rs = stmt.executeQuery(); // $NON-NLS-1$
while (rs.next()) {
productVersion = rs.getString(1);
}
@@ -1021,12 +1021,12 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
isOracle = MetadataConnectionUtils.isOracle(c);
isOracleJdbc = MetadataConnectionUtils.isOracleJDBC(c);
if ((isOracleJdbc || isOracle) && !isOracle8i) {// oracle and not oracle8
Statement stmt;
PreparedStatement stmt;
try {
// MOD qiongli TDQ-4732 use the common method to create statement both DI and DQ,avoid Exception
// for top.
stmt = dbJDBCMetadata.getConnection().createStatement();
ResultSet rsTables = stmt.executeQuery(TableInfoParameters.ORACLE_10G_RECBIN_SQL);
stmt = dbJDBCMetadata.getConnection().prepareStatement(TableInfoParameters.ORACLE_10G_RECBIN_SQL);
ResultSet rsTables = stmt.executeQuery();
tablesToFilter = ExtractMetaDataFromDataBase.getTableNamesFromQuery(rsTables,
dbJDBCMetadata.getConnection());
rsTables.close();
@@ -1124,14 +1124,15 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
&& dbJDBCMetadata.getDatabaseProductName().equals("Microsoft SQL Server")) { //$NON-NLS-1$
for (String element : tableType) {
if (element.equals("SYNONYM")) { //$NON-NLS-1$
Statement stmt = extractMeta.getConn().createStatement();
extractMeta.setQueryStatementTimeout(stmt);
String schemaname = schemaPattern + ".sysobjects"; //$NON-NLS-1$
String sql = "select name from " + schemaname + " where xtype='SN'"; //$NON-NLS-1$//$NON-NLS-2$
if ("dbo".equalsIgnoreCase(schemaPattern)) { //$NON-NLS-1$
PreparedStatement stmt = extractMeta.getConn().prepareStatement(sql);
extractMeta.setQueryStatementTimeout(stmt);
// SELECT name AS object_name ,SCHEMA_NAME(schema_id) AS schema_name FROM sys.objects where
// type='SN'
ResultSet rsTables = stmt.executeQuery(sql);
ResultSet rsTables = stmt.executeQuery();
while (rsTables.next()) {
String nameKey = rsTables.getString("name").trim(); //$NON-NLS-1$
@@ -1143,6 +1144,8 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
metadatatable.setLabel(metadatatable.getName());
list.add(metadatatable);
}
rsTables.close();
stmt.close();
}
}
}
@@ -1150,10 +1153,12 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
&& dbJDBCMetadata.getDatabaseProductName().startsWith("DB2/")) { //$NON-NLS-1$
for (String element : tableType) {
if (element.equals("SYNONYM")) { //$NON-NLS-1$
Statement stmt = extractMeta.getConn().createStatement();
String sql = "SELECT NAME FROM SYSIBM.SYSTABLES where TYPE='A' and BASE_SCHEMA = ?"; //$NON-NLS-1$ //$NON-NLS-2$
PreparedStatement stmt = extractMeta.getConn().prepareStatement(sql);
stmt.setString(1, schemaPattern);
extractMeta.setQueryStatementTimeout(stmt);
String sql = "SELECT NAME FROM SYSIBM.SYSTABLES where TYPE='A' and BASE_SCHEMA = '" + schemaPattern + "'"; //$NON-NLS-1$ //$NON-NLS-2$
ResultSet rsTables = stmt.executeQuery(sql);
ResultSet rsTables = stmt.executeQuery();
while (rsTables.next()) {
String nameKey = rsTables.getString("NAME").trim(); //$NON-NLS-1$
@@ -1165,6 +1170,8 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
metadatatable.setLabel(metadatatable.getName());
list.add(metadatatable);
}
rsTables.close();
stmt.close();
}
}
}
@@ -1277,12 +1284,12 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
boolean isOracleJdbc = MetadataConnectionUtils.isOracleJDBC(c);
// MetadataConnectionUtils.isOracle8i(connection)
if ((isOracle || isOracleJdbc) && !flag) {// oracle and not oracle8
Statement stmt;
try {
// MOD qiongli TDQ-4732 use the common method to create statement both DI and DQ,avoid Exception
// for top.
stmt = dbJDBCMetadata.getConnection().createStatement();
ResultSet rsTables = stmt.executeQuery(TableInfoParameters.ORACLE_10G_RECBIN_SQL);
PreparedStatement stmt = dbJDBCMetadata.getConnection()
.prepareStatement(TableInfoParameters.ORACLE_10G_RECBIN_SQL);
ResultSet rsTables = stmt.executeQuery();
tablesToFilter = ExtractMetaDataFromDataBase.getTableNamesFromQuery(rsTables,
dbJDBCMetadata.getConnection());
rsTables.close();
@@ -1882,11 +1889,11 @@ public class DBConnectionFillerImpl extends MetadataFillerImpl<DatabaseConnectio
*/
private String executeGetCommentStatement(String queryStmt, java.sql.Connection connection) {
String comment = null;
Statement statement = null;
PreparedStatement statement = null;
ResultSet resultSet = null;
try {
statement = connection.createStatement();
statement.execute(queryStmt);
statement = connection.prepareStatement(queryStmt);
statement.execute();
// get the results
resultSet = statement.getResultSet();

View File

@@ -15,10 +15,10 @@ package org.talend.metadata.managment.utils;
import java.io.UnsupportedEncodingException;
import java.sql.DatabaseMetaData;
import java.sql.Driver;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
@@ -1340,10 +1340,10 @@ public class MetadataConnectionUtils {
*/
public static String getColumnTypeName(java.sql.Connection connection, String tableName, int colIndex) {
String columnTypeName = null;
Statement statement = null;
PreparedStatement statement = null;
try {
statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery("SELECT FIRST 1 * FROM " + tableName + ";"); //$NON-NLS-1$ //$NON-NLS-2$
statement = connection.prepareStatement("SELECT FIRST 1 * FROM " + tableName + ";");
ResultSet resultSet = statement.executeQuery(); // $NON-NLS-1$ //$NON-NLS-2$
ResultSetMetaData rsMetaData = resultSet.getMetaData();
columnTypeName = rsMetaData.getColumnTypeName(colIndex);
} catch (Exception e) {

View File

@@ -16,6 +16,7 @@ Application.workspaceInvalidMessage=Selected workspace is not valid; choose a di
Application.workspaceNotExiste=Workspace not exist, cannot start instances in this path.
Application.doNotSupportJavaVersionYetPoweredbyTalend=The Studio does not support Java 8. Java 7 is the recommended JVM version to be used. Refer to the following KB article on Talend Help Center for more information (requires a MyTalend account registration):
Application.doNotSupportJavaVersionYetNoPoweredbyTalend=The Studio does not support Java 8. Java 7 is the recommended JVM version to be used.
Application.InstallingPatchesTaskName=Installing patches...
ApplicationActionBarAdvisor.menuFileLabel=&File
ApplicationActionBarAdvisor.menuEditLabel=&Edit
ApplicationActionBarAdvisor.navigateLabel=&Navigate

View File

@@ -14,6 +14,7 @@ package org.talend.rcp.intro;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.net.MalformedURLException;
import java.net.URL;
@@ -21,6 +22,7 @@ import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.adaptor.EclipseStarter;
@@ -28,6 +30,8 @@ import org.eclipse.core.runtime.preferences.ConfigurationScope;
import org.eclipse.equinox.app.IApplication;
import org.eclipse.equinox.app.IApplicationContext;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.dialogs.ProgressMonitorDialog;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.osgi.service.datalocation.Location;
@@ -288,6 +292,7 @@ public class Application implements IApplication {
}
private boolean installed = false;
private boolean installLocalPatches() {
try {
final boolean forceCheck = Boolean.getBoolean("talend.studio.localpatch.forcecheck");
@@ -319,7 +324,27 @@ public class Application implements IApplication {
boolean needRelaunch = false;
final PatchComponent patchComponent = PatchComponentHelper.getPatchComponent();
if (patchComponent != null) {
final boolean installed = patchComponent.install();
Shell shell = Display.getDefault().getActiveShell();
if (shell == null) {
shell = new Shell();
}
ProgressMonitorDialog dialog = new ProgressMonitorDialog(shell);
IRunnableWithProgress runnable = new IRunnableWithProgress() {
@Override
public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
monitor.beginTask(Messages.getString("Application.InstallingPatchesTaskName"), IProgressMonitor.UNKNOWN); //$NON-NLS-1$
installed = patchComponent.install(monitor);
}
};
try {
dialog.run(true, false, runnable);
} catch (InvocationTargetException | InterruptedException e) {
log.log(Level.ERROR, e.getMessage());
}
if (installed) {
final String installedMessages = patchComponent.getInstalledMessages();
if (installedMessages != null) {
@@ -335,6 +360,7 @@ public class Application implements IApplication {
if (StringUtils.isNotEmpty(patchComponent.getFailureMessage())) {
log.log(Level.ERROR, patchComponent.getFailureMessage());
}
installed = false;
}
final ComponentsInstallComponent installComponent = LocalComponentInstallHelper.getComponent();

Some files were not shown because too many files have changed in this diff Show More