Compare commits

..

6 Commits

Author SHA1 Message Date
Christophe Le Saec
451f25526a TDI-42230 remove SNAPSHOT 2019-07-04 09:58:46 +02:00
Christophe Le Saec
b4c77184f3 fix(TDI-42230) : custom lib on copy with version 2019-07-03 10:28:44 +02:00
Christophe Le Saec
4efe837b40 fix(TDI-42230) - put filecopy librarie directly in studio 2019-07-02 10:17:28 +02:00
Christophe Le Saec
2164c692d2 fix(TDI-42230) : use filecopy from nexus in studio 2019-06-28 16:56:38 +02:00
Christophe Le Saec
c1de5f5fc0 fix(TDI-42230) filecopy.jar removed from git 2019-06-28 09:53:57 +02:00
Christophe Le Saec
bd31878bcd fix(TDI-42230) : utilisation de la classe standard java.nio.Files 2019-06-27 17:17:23 +02:00
852 changed files with 10125 additions and 19883 deletions

View File

@@ -1,57 +0,0 @@
---
version: 6.3.1
module: https://talend.poolparty.biz/coretaxonomy/42
product:
- https://talend.poolparty.biz/coretaxonomy/23
---
# TPS-3694
| Info | Value |
| ---------------- | ---------------- |
| Patch Name | Patch\_20200107\_TPS\-3694\_v1\-6.3.1 |
| Release Date | 2020-01-07 |
| Target Version | Talend-Studio-20161216_1026-V6.3.1 |
| Product affected | Talend Studio |
## Introduction
This is a self-contained patch.
**NOTE**: For information on how to obtain this patch, reach out to your Support contact at Talend.
## Fixed issues
This patch contains the following fixes:
- TPS-3694 [6.3.1] Getting Compilation error while enabling store on Disk in tmap with dynamic datatype columns (TDI-43446)
## Prerequisites
Consider the following requirements for your system:
- Talend Studio 6.3.1 must be installed.
## Installation
### Installing the patch using Software update
1) Logon TAC and switch to Configuration->Software Update, then enter the correct values and save referring to the documentation: https://help.talend.com/reader/f7Em9WV_cPm2RRywucSN0Q/j9x5iXV~vyxMlUafnDejaQ
2) Switch to Software update page, where the new patch will be listed. The patch can be downloaded from here into the nexus repository.
3) On Studio Side: Logon Studio with remote mode, on the logon page the Update button is displayed: click this button to install the patch.
### Installing the patch using Talend Studio
1) Create a folder named "patches" under your studio installer directory and copy the patch .zip file to this folder.
2) Restart your studio: a window pops up, then click OK to install the patch, or restart the commandline and the patch will be installed automatically.
### Installing the patch using Commandline
Execute the following commands:
1. Talend-Studio-win-x86_64.exe -nosplash -application org.talend.commandline.CommandLine -consoleLog -data commandline-workspace startServer -p 8002 --talendDebug
2. initRemote {tac_url} -ul {TAC login username} -up {TAC login password}
3. checkAndUpdate -tu {TAC login username} -tup {TAC login password}

View File

@@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>org.talend.help.tos.components.feature</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.pde.FeatureBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.pde.FeatureNature</nature>
</natures>
</projectDescription>

View File

@@ -1 +0,0 @@
bin.includes = feature.xml

View File

@@ -1,94 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<feature
id="org.talend.help.tos.components.feature"
label="Talend COMPONENTS help feature"
version="7.3.1.qualifier"
provider-name=".Talend SA.">
<description url="http://www.example.com/description">
[Enter Feature Description here.]
</description>
<copyright url="http://www.example.com/copyright">
[Enter Copyright Description here.]
</copyright>
<license url="http://www.example.com/license">
[Enter License Description here.]
</license>
<url>
<update label="CORE Talend update site for milestones" url="http://talendforge.org/core/updatesite/testing/"/>
</url>
<plugin
id="org.talend.help.azurestorage"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="org.talend.help.filedelimited"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="org.talend.help.filterrow"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="org.talend.help.googledrive"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="org.talend.help.jdbc"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="org.talend.help.jira"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="org.talend.help.marketo"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="org.talend.help.salesforce"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="org.talend.help.snowflake"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="org.talend.help.splunk"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
</feature>

View File

@@ -1,12 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.talend.studio</groupId>
<artifactId>tdi-studio-se</artifactId>
<version>7.3.1-SNAPSHOT</version>
<relativePath>../../../</relativePath>
</parent>
<artifactId>org.talend.help.tos.components.feature</artifactId>
<packaging>eclipse-feature</packaging>
</project>

View File

@@ -9,9 +9,9 @@
</url>
<requires>
<import feature="org.talend.tos.feature" version="0.0.0" match="greaterOrEqual"/>
<import feature="org.talend.help.di.feature" version="0.0.0" match="greaterOrEqual"/>
</requires>
<plugin id="org.talend.designer.maven.tos" download-size="0" install-size="0" version="0.0.0" fragment="true"/>
<plugin id="org.talend.libraries.mdm.webservice.ce" download-size="0" install-size="0" version="0.0.0" fragment="true"/>
<plugin id="org.talend.presentation.onboarding" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.presentation.onboarding.nl" download-size="0" install-size="0" version="0.0.0" fragment="true" unpack="false"/>
<plugin id="org.talend.presentation.onboarding.resource" download-size="0" install-size="0" version="0.0.0" unpack="false"/>

View File

@@ -55,6 +55,5 @@
<plugin id="org.talend.testutils" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.updates.runtime.test" download-size="0" install-size="0" version="0.0.0" fragment="true" unpack="false"/>
<plugin id="org.talend.utils.test" download-size="0" install-size="0" version="0.0.0" fragment="true" unpack="false"/>
<plugin id="org.talend.sdk.component.studio-integration.test" download-size="0" install-size="0" version="0.0.0" fragment="true" unpack="false"/>
<plugin id="test.all.test.suite" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
</feature>

View File

@@ -53,6 +53,16 @@
<plugin id="org.talend.designer.xmlmap.nl" download-size="0" install-size="0" version="0.0.0" fragment="true" unpack="false"/>
<plugin id="org.talend.expressionbuilder" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.expressionbuilder.nl" download-size="0" install-size="0" version="0.0.0" fragment="true" unpack="false"/>
<plugin id="org.talend.help.azurestorage" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.help.filedelimited" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.help.filterrow" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.help.googledrive" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.help.jdbc" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.help.jira" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.help.marketo" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.help.salesforce" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.help.snowflake" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.help.splunk" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.repository.generic" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
<plugin id="org.talend.sdk.component.studio-integration" download-size="0" install-size="0" version="0.0.0" unpack="false"/>
</feature>

View File

@@ -22,7 +22,6 @@ Require-Bundle: org.talend.core.repository,
org.apache.log4j,
org.talend.components.api,
org.talend.daikon,
org.talend.daikon.crypto.utils,
org.talend.designer.core.generic
Eclipse-LazyStart: true
Bundle-ClassPath: .

View File

@@ -59,76 +59,88 @@ if((codePart.equals(ECodePart.END))&&(stat || logstashCurrent)){
List<String> needToEndConnNames = new ArrayList<String>();
INode nextNode = node.getOutgoingConnections(EConnectionType.ITERATE).get(0).getTarget();
NodeUtil.fillConnectionsForStat(needToEndConnNames, nextNode);
if(!needToEndConnNames.isEmpty()) {
if(stat && logstashCurrent) {
%>
runStat.updateStatAndLog(execStat,enableLogStash,iterateId,2,0<%for(String connName : needToEndConnNames){%>,"<%=connName%>"<%}%>);
%>
<%if(stat) {%>
if(execStat){
<%
for(String connName : needToEndConnNames){
%>
runStat.updateStatOnConnection("<%=connName%>"+iterateId,2, 0);
<%
} else {
if(stat) {%>
if(execStat){
runStat.updateStatOnConnection(iterateId,2,0<%for(String connName : needToEndConnNames){%>,"<%=connName%>"<%}%>);
}
<%}%>
<%if(logstashCurrent) {//now only finish the log, not send, TODO%>
if(enableLogStash){
runStat.log(iterateId,2,0<%for(String connName : needToEndConnNames){%>,"<%=connName%>"<%}%>);
}
<%
}
}
%>
}
}
if(connSet.size()>0) {
if(stat && logstashCurrent && (connSet.size()==1)) {//the most common case, write this ugly logic for 65535 issue
for(IConnection con:connSet){
INode source = con.getSource();
String sourceNodeId = source.getUniqueName();
String sourceNodeComponent = source.getComponent().getName();
for (INode jobStructureCatcher : jobCatcherNodes) {
%>
if(runStat.updateStatAndLog(execStat,enableLogStash,resourceMap,iterateId,"<%=con.getUniqueName()%>",2,0,
<%=jobStructureCatcher.getUniqueName()%>,"<%=sourceNodeId%>","<%=sourceNodeComponent%>","<%=node.getUniqueName()%>","<%=node.getComponent().getName()%>","<%="REJECT".equals(con.getConnectorName()) ? "reject" : "output"%>")) {
<%=jobStructureCatcher.getDesignSubjobStartNode().getUniqueName() %>Process(globalMap);
}
<%
break;
}
}
} else {
if(stat){
%>
if(execStat){
runStat.updateStat(resourceMap,iterateId,2,0<%for(IConnection con : connSet){%>,"<%=con.getUniqueName()%>"<%}%>);
}
<%
<%}%>
<%if(logstashCurrent) {//now only finish the log, not send, TODO%>
if(enableLogStash){
<%
for(String connName : needToEndConnNames){
%>
runStat.logStatOnConnection("<%=connName%>"+iterateId,2, 0);
<%
}
%>
}
<%
}
%>
<%
}
if(connSet.size()>0){
for(IConnection con:connSet){
INode source = con.getSource();
String sourceNodeId = source.getUniqueName();
String sourceNodeComponent = source.getComponent().getName();
%>
<%if(stat) {%>
if(execStat){
if(resourceMap.get("inIterateVComp") == null || !((Boolean)resourceMap.get("inIterateVComp"))){
runStat.updateStatOnConnection("<%=con.getUniqueName()%>"+iterateId,2, 0);
}
}
<%}%>
if(logstashCurrent){
%>
if(enableLogStash) {
<%
for(IConnection con:connSet){
INode source = con.getSource();
String sourceNodeId = source.getUniqueName();
String sourceNodeComponent = source.getComponent().getName();
<%if(logstashCurrent) {%>
if(enableLogStash){
if(resourceMap.get("inIterateVComp") == null || !((Boolean)resourceMap.get("inIterateVComp"))){
<%
for (INode jobStructureCatcher : jobCatcherNodes) {
%>
if(runStat.log(resourceMap,iterateId,"<%=con.getUniqueName()%>",2,0,
<%=jobStructureCatcher.getUniqueName()%>,"<%=sourceNodeId%>","<%=sourceNodeComponent%>","<%=node.getUniqueName()%>","<%=node.getComponent().getName()%>","<%="REJECT".equals(con.getConnectorName()) ? "reject" : "output"%>")) {
<%=jobStructureCatcher.getDesignSubjobStartNode().getUniqueName() %>Process(globalMap);
}
RunStat.StatBean talend_statebean = runStat.logStatOnConnection("<%=con.getUniqueName()%>"+iterateId,2, 0);
<%=jobStructureCatcher.getUniqueName() %>.addConnectionMessage(
"<%=sourceNodeId%>",
"<%=sourceNodeComponent%>",
false,
"<%="REJECT".equals(con.getConnectorName()) ? "reject" : "output"%>",
"<%=con.getUniqueName()%>",
talend_statebean.getNbLine(),
talend_statebean.getStartTime(),
talend_statebean.getEndTime()
);
<%=jobStructureCatcher.getUniqueName() %>.addConnectionMessage(
"<%=node.getUniqueName()%>",
"<%=node.getComponent().getName()%>",
true,
"input",
"<%=con.getUniqueName()%>",
talend_statebean.getNbLine(),
talend_statebean.getStartTime(),
talend_statebean.getEndTime()
);
<%=jobStructureCatcher.getDesignSubjobStartNode().getUniqueName() %>Process(globalMap);
<%
break;
}
}
%>
}
<%
%>
}
}
}
<%}%>
<%
}
}
}
%>
@@ -329,26 +341,14 @@ end_Hash.put("<%=node.getUniqueName() %>", System.currentTimeMillis());
}
// Else, that means the component is a ON_COMPONENT_OK virtual component typed.
}
if(generateMethodCall) {
/*check if parralel iterate call not finished from this component */
boolean parallelIterate = false;
for (IConnection iterateConn : iterateConnSet) {
parallelIterate = "true".equals(ElementParameterParser.getValue(iterateConn, "__ENABLE_PARALLEL__"));
if (parallelIterate) {
if (codePart.equals(ECodePart.END)) {
String iterateTargetNodeName = iterateConn.getTarget().getUniqueName();
%>
mtp_<%=iterateTargetNodeName %>.waitForEndOfQueue();
<%
}
}
}
%>
<%=outgoingConn.getTarget().getUniqueName() %>Process(globalMap);
<%
}
}
}
if (outgoingConn.getLineStyle().equals(EConnectionType.RUN_IF)) {
%>
@@ -356,27 +356,12 @@ end_Hash.put("<%=node.getUniqueName() %>", System.currentTimeMillis());
<%//send the true status to socket
if(stat){
%>
if(execStat){
if(execStat){
runStat.updateStatOnConnection("<%=outgoingConn.getUniqueName() %>", 0, "true");
}
<%
}
/*check if parralel iterate call not finished from this component */
boolean parallelIterate = false;
for (IConnection iterateConn : iterateConnSet) {
parallelIterate = "true".equals(ElementParameterParser.getValue(iterateConn, "__ENABLE_PARALLEL__"));
if (parallelIterate) {
if (codePart.equals(ECodePart.END)) {
String iterateTargetNodeName = iterateConn.getTarget().getUniqueName();
%>
mtp_<%=iterateTargetNodeName %>.waitForEndOfQueue();
<%
}
}
}
%>
<%=outgoingConn.getTarget().getUniqueName() %>Process(globalMap);
}
<%}%>
<%=outgoingConn.getTarget().getUniqueName() %>Process(globalMap);
}
<%
//send the false status to socket
@@ -403,7 +388,7 @@ end_Hash.put("<%=node.getUniqueName() %>", System.currentTimeMillis());
parallelIterate = "true".equals(ElementParameterParser.getValue(iterateConn, "__ENABLE_PARALLEL__"));
if (parallelIterate) {
if (codePart.equals(ECodePart.END)) {
String iterateTargetNodeName = iterateConn.getTarget().getUniqueName();
String iterateTargetNodeName = iterateConn.getTarget().getUniqueName();
%>
mtp_<%=iterateTargetNodeName %>.waitForEndOfQueue();

View File

@@ -146,82 +146,83 @@
boolean logstashCurrent = !cid.startsWith("tJobStructureCatcher") && !cid.startsWith("talend") && enableLogStash;
if ((codePart.equals(ECodePart.BEGIN))&&(stat || logstashCurrent)&&connSet.size()>0) {
if(containsTPartitioner) {
%>
if(<%if(stat){%>execStat<%}%><%if(stat && logstashCurrent){%> || <%}%><%if(logstashCurrent){%>enableLogStash<%}%>) {
runStat.updateStatOnConnectionAndLog(resourceMap,globalMap,iterateLoop,iterateId,<%if(stat){%>execStat<%} else {%>false<%}%>,enableLogStash,0<%for(IConnection con : connSet){%>,"<%=con.getUniqueName()%>"<%}%>);
}
<%
} else {
if(stat && logstashCurrent) {
%>
runStat.updateStatAndLog(execStat,enableLogStash,resourceMap,iterateId,0,0<%for(IConnection con : connSet){%>,"<%=con.getUniqueName()%>"<%}%>);
<%
} else {
if(stat) {
%>
if(execStat) {
runStat.updateStatOnConnection(resourceMap,iterateId,0,0<%for(IConnection con : connSet){%>,"<%=con.getUniqueName()%>"<%}%>);
}
<%
for(IConnection con:connSet){
%>
if (<%if(stat){%>execStat<%}%><%if(stat && logstashCurrent){%> || <%}%><%if(logstashCurrent){%>enableLogStash<%}%>) {
if(resourceMap.get("inIterateVComp") == null){
<%if(containsTPartitioner){%>
java.util.concurrent.ConcurrentHashMap<Object, Object> concurrentHashMap_<%=con.getUniqueName() %> = (java.util.concurrent.ConcurrentHashMap) globalMap.get("concurrentHashMap");
concurrentHashMap_<%=con.getUniqueName() %>.putIfAbsent("<%=con.getUniqueName() %>" + iterateLoop,new java.util.concurrent.atomic.AtomicInteger(0));
java.util.concurrent.atomic.AtomicInteger stats_<%=con.getUniqueName() %> = (java.util.concurrent.atomic.AtomicInteger) concurrentHashMap_<%=con.getUniqueName() %>.get("<%=con.getUniqueName() %>" + iterateLoop);
int step_<%=con.getUniqueName() %> = stats_<%=con.getUniqueName() %>.incrementAndGet()<=1?0:1;
<%if(stat) {%>
if(execStat) {
runStat.updateStatOnConnection("<%=con.getUniqueName() %>"+iterateId, step_<%=con.getUniqueName()%>, 0);
}
<%}%>
<%if(logstashCurrent) {%>
if(enableLogStash) {
runStat.logStatOnConnection("<%=con.getUniqueName()%>"+iterateId, step_<%=con.getUniqueName()%>, 0);
}
<%}%>
<%}else{%>
<%if(stat) {%>
if(execStat) {
runStat.updateStatOnConnection("<%=con.getUniqueName() %>"+iterateId, 0, 0);
}
<%}%>
<%if(logstashCurrent) {%>
if(enableLogStash) {
runStat.logStatOnConnection("<%=con.getUniqueName()%>"+iterateId, 0, 0);
}
<%}%>
<%}%>
}
if(logstashCurrent) {
%>
if(enableLogStash) {
runStat.log(resourceMap,iterateId,0,0<%for(IConnection con : connSet){%>,"<%=con.getUniqueName()%>"<%}%>);
}
<%
}
}
}
<%
}
}
if((codePart.equals(ECodePart.MAIN))&&(stat || logstashCurrent)&&connSet.size()>0){
if(!node.getComponent().useMerge()) {
if(stat && logstashCurrent) {
%>
runStat.updateStatAndLog(execStat,enableLogStash,iterateId,1,1<%for(IConnection con : connSet){%>,"<%=con.getUniqueName()%>"<%}%>);
<%
} else {
if(stat) {
%>
if(execStat){
runStat.updateStatOnConnection(iterateId,1,1<%for(IConnection con : connSet){%>,"<%=con.getUniqueName()%>"<%}%>);
}
<%
for(IConnection con:connSet){
%>
//<%=con.getUniqueName()%>
//<%=(String)codeGenArgument.getIncomingName()%>
<%if (!node.getComponent().useMerge()) {%>
<%if(stat) {%>
if(execStat){
runStat.updateStatOnConnection("<%=con.getUniqueName() %>"+iterateId,1, 1);
}
<%}%>
if(logstashCurrent) {
%>
if(enableLogStash) {
runStat.log(iterateId,1,1<%for(IConnection con : connSet){%>,"<%=con.getUniqueName()%>"<%}%>);
}
<%
<%if(logstashCurrent) {%>
if(enableLogStash) {
runStat.logStatOnConnection("<%=con.getUniqueName() %>"+iterateId,1, 1);
}
}
} else {
for(IConnection connection:connSet){
if(connection.getUniqueName().equals((String)codeGenArgument.getIncomingName())){
if(stat && logstashCurrent) {
%>
runStat.updateStatAndLog(execStat,enableLogStash,iterateId,1,1<%for(IConnection con : connSet){if(con.getUniqueName().equals((String)codeGenArgument.getIncomingName())){%>,"<%=con.getUniqueName()%>"<%}}%>);
<%
} else {
if(stat) {%>
if(execStat){
runStat.updateStatOnConnection(iterateId,1,1<%for(IConnection con : connSet){if(con.getUniqueName().equals((String)codeGenArgument.getIncomingName())){%>,"<%=con.getUniqueName()%>"<%}}%>);
}
<%}
if(logstashCurrent) {%>
if(enableLogStash) {
runStat.log(iterateId,1,1<%for(IConnection con : connSet){if(con.getUniqueName().equals((String)codeGenArgument.getIncomingName())){%>,"<%=con.getUniqueName()%>"<%}}%>);
}
<%}
}
<%}%>
<%
} else if(con.getUniqueName().equals((String)codeGenArgument.getIncomingName())){
%>
<%if(stat) {%>
if(execStat){
runStat.updateStatOnConnection("<%=con.getUniqueName() %>"+iterateId,1, 1);
}
}
<%}%>
<%if(logstashCurrent) {%>
if(enableLogStash) {
runStat.logStatOnConnection("<%=con.getUniqueName() %>"+iterateId,1, 1);
}
<%}%>
<%}%>
<%
}
}
@@ -250,7 +251,7 @@
for (INode jobStructureCatcher : jobCatcherNodes) {
%>
if(enableLogStash) {
<%=jobStructureCatcher.getUniqueName() %>.addCM("<%=node.getUniqueName()%>", "<%=node.getComponent().getName()%>");
<%=jobStructureCatcher.getUniqueName() %>.addComponentMessage("<%=node.getUniqueName()%>", "<%=node.getComponent().getName()%>");
<%=jobStructureCatcher.getDesignSubjobStartNode().getUniqueName() %>Process(globalMap);
}
<%
@@ -265,37 +266,46 @@
List<String> needToStartConnNames = new ArrayList<String>();
INode nextNode = node.getOutgoingConnections(EConnectionType.ITERATE).get(0).getTarget();
NodeUtil.fillConnectionsForStat(needToStartConnNames, nextNode);
if(needToStartConnNames.isEmpty()) {
//do nothing
} else if(containsTPartitioner){
%>
if(<%if(stat){%>execStat<%}%><%if(stat && logstashCurrent){%> || <%}%><%if(logstashCurrent){%>enableLogStash<%}%>){
runStat.updateStatOnConnectionAndLog(globalMap,iterateLoop,iterateId,<%if(stat){%>execStat<%} else {%>false<%}%>,enableLogStash,0<%for(String connName : needToStartConnNames){%>,"<%=connName%>"<%}%>);
}
<%
} else {
if(stat && logstashCurrent) {
%>
runStat.updateStatAndLog(execStat,enableLogStash,iterateId,0,0<%for(String connName : needToStartConnNames){%>,"<%=connName%>"<%}%>);
<%
} else {
if(stat){
%>
if(execStat){
runStat.updateStatOnConnection(iterateId,0,0<%for(String connName : needToStartConnNames){%>,"<%=connName%>"<%}%>);
}
<%
}
%>
<%if(logstashCurrent) {%>
if(enableLogStash){
runStat.log(iterateId,0,0<%for(String connName : needToStartConnNames){%>,"<%=connName%>"<%}%>);
}
<%
}
for(String connName : needToStartConnNames){
%>
<%if(containsTPartitioner){%>
java.util.concurrent.ConcurrentHashMap<Object, Object> concurrentHashMap_<%=connName%> = (java.util.concurrent.ConcurrentHashMap) globalMap.get("concurrentHashMap");
concurrentHashMap_<%=connName%>.putIfAbsent("<%=connName%>" + iterateLoop,new java.util.concurrent.atomic.AtomicInteger(0));
java.util.concurrent.atomic.AtomicInteger stats_<%=connName%> = (java.util.concurrent.atomic.AtomicInteger) concurrentHashMap_<%=connName%>.get("<%=connName%>" + iterateLoop);
int step_<%=connName %> = stats_<%=connName%>.incrementAndGet()<=1?0:1;
<%if(stat) {%>
if(execStat) {
runStat.updateStatOnConnection("<%=connName%>"+iterateId, step_<%=connName%>, 0);
}
<%}%>
<%if(logstashCurrent) {%>
if(enableLogStash) {
runStat.logStatOnConnection("<%=connName%>"+iterateId, step_<%=connName%>, 0);
}
<%}%>
<%}else{%>
<%if(stat) {%>
if(execStat) {
runStat.updateStatOnConnection("<%=connName%>"+iterateId, 0, 0);
}
<%}%>
<%if(logstashCurrent) {%>
if(enableLogStash) {
runStat.logStatOnConnection("<%=connName%>"+iterateId, 0, 0);
}
<%}%>
<%}%>
<%
}
%>
}
<%
}else if(codePart.equals(ECodePart.MAIN)){
%>
resourceMap.put("inIterateVComp", true);

View File

@@ -54,9 +54,7 @@
jobFolderName = jobFolderName + JavaResourcesHelper.getJobFolderName(process.getName(), process.getVersion());
String jobClassPackageFolder = codeGenArgument.getCurrentProjectName().toLowerCase() + '/' + jobFolderName;
boolean isLog4jEnabled = ("true").equals(ElementParameterParser.getValue(process, "__LOG4J_ACTIVATE__"));//log4j enable
boolean isLog4j2Enabled = ("true").equals(ElementParameterParser.getValue(process, "__LOG4J2_ACTIVATE__"));//log4j2 enable
boolean isLog4j1Enabled = !isLog4j2Enabled;//log4j1 enable
boolean isLog4jEnabled = ("true").equals(ElementParameterParser.getValue(process, "__LOG4J_ACTIVATE__"));
boolean exist_tParallelize = false;
List<? extends INode> tParallelizeList = process.getNodesOfType("tParallelize");
@@ -447,11 +445,9 @@
lastStr = "";
}
}
enableLogStash = "true".equalsIgnoreCase(System.getProperty("monitoring"));
<%if(isLog4jEnabled){%>
if(!"".equals(log4jLevel)){
<%if(isLog4j1Enabled){%>
if("trace".equalsIgnoreCase(log4jLevel)){
log.setLevel(org.apache.log4j.Level.TRACE);
}else if("debug".equalsIgnoreCase(log4jLevel)){
@@ -468,26 +464,6 @@
log.setLevel(org.apache.log4j.Level.OFF);
}
org.apache.log4j.Logger.getRootLogger().setLevel(log.getLevel());
<%}%>
<%if(isLog4j2Enabled){%>
if("trace".equalsIgnoreCase(log4jLevel)){
org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), org.apache.logging.log4j.Level.TRACE);
}else if("debug".equalsIgnoreCase(log4jLevel)){
org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), org.apache.logging.log4j.Level.DEBUG);
}else if("info".equalsIgnoreCase(log4jLevel)){
org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), org.apache.logging.log4j.Level.INFO);
}else if("warn".equalsIgnoreCase(log4jLevel)){
org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), org.apache.logging.log4j.Level.WARN);
}else if("error".equalsIgnoreCase(log4jLevel)){
org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), org.apache.logging.log4j.Level.ERROR);
}else if("fatal".equalsIgnoreCase(log4jLevel)){
org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), org.apache.logging.log4j.Level.FATAL);
}else if ("off".equalsIgnoreCase(log4jLevel)){
org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), org.apache.logging.log4j.Level.OFF);
}
org.apache.logging.log4j.core.config.Configurator.setLevel(org.apache.logging.log4j.LogManager.getRootLogger().getName(), log.getLevel());
<%}%>
}
log.info("TalendJob: '<%=codeGenArgument.getJobName()%>' - Start.");
<%}%>
@@ -517,18 +493,7 @@
properties_<%=jobCatcherNode.getUniqueName()%>.setProperty("appender.file.maxsize", "52428800");
properties_<%=jobCatcherNode.getUniqueName()%>.setProperty("appender.file.maxbackup", "20");
properties_<%=jobCatcherNode.getUniqueName()%>.setProperty("host", "false");
System.getProperties().stringPropertyNames().stream()
.filter(it -> it.startsWith("monitoring.audit.logger.properties."))
.forEach(key -> properties_<%=jobCatcherNode.getUniqueName()%>.setProperty(key.substring("monitoring.audit.logger.properties.".length()), System.getProperty(key)));
<%if(isLog4j1Enabled) {%>
org.apache.log4j.Logger.getLogger("audit").setLevel(org.apache.log4j.Level.DEBUG);
<%}%>
<%if(isLog4j2Enabled) {%>
org.apache.logging.log4j.core.config.Configurator.setLevel("audit", org.apache.logging.log4j.Level.DEBUG);
<%}%>
auditLogger_<%=jobCatcherNode.getUniqueName()%> = org.talend.job.audit.JobEventAuditLoggerFactory.createJobAuditLogger(properties_<%=jobCatcherNode.getUniqueName()%>);
}
<%
@@ -803,25 +768,6 @@ if(execStat) {
startTime = System.currentTimeMillis();
<%
boolean isCustomProxySettingsUsed = false;
List<? extends INode> proxyNodes = process.getNodesOfType("tSetProxy");
for (int i = 0; i < proxyNodes.size(); i++) {
INode proxyNode = proxyNodes.get(i);
String useCustomProxyCheckboxValue = ElementParameterParser.getValue(proxyNode, "__PROXY_GLOBAL__");
if (useCustomProxyCheckboxValue != null && !"true".equals(useCustomProxyCheckboxValue)) {
isCustomProxySettingsUsed = true;
break;
}
}
if (isCustomProxySettingsUsed) {
%>
java.net.ProxySelector proxySelectorOnStartJob = java.net.ProxySelector.getDefault();
<%
}
//1. send the begin msg
for (INode statCatcherNode : process.getNodesOfType("tStatCatcher")) {
@@ -1044,13 +990,6 @@ this.globalResumeTicket = true;//to run tPostJob
<%= ElementParameterParser.getValue(process, "__FOOTER_CODE__")%>
<%
if (isCustomProxySettingsUsed) {
%>
java.net.ProxySelector.setDefault(proxySelectorOnStartJob);
<%
}
%>
end = System.currentTimeMillis();
if (watch) {
@@ -1570,10 +1509,12 @@ if (execStat) {
}
} else if (arg.startsWith("--log4jLevel=")) {
log4jLevel = arg.substring(13);
} else if (arg.startsWith("--monitoring") && arg.contains("=")) {//for trunjob call
final int equal = arg.indexOf('=');
final String key = arg.substring("--".length(), equal);
System.setProperty(key, arg.substring(equal + 1));
} else if (arg.startsWith("--monitoring=")) {//for trunjob call
enableLogStash = "true".equalsIgnoreCase(arg.substring(13));
}
if(!enableLogStash) {
enableLogStash = "true".equalsIgnoreCase(System.getProperty("monitoring"));
}
}

View File

@@ -69,140 +69,145 @@ if(hasInput){
}
}
}
boolean hasValidInput = inputConn!=null;
if (hasValidInput) {
IMetadataTable metadata = null;
List<IMetadataTable> metadatas = node.getMetadataList();
boolean haveValidNodeMetadata = ((metadatas != null) && (metadatas.size() > 0) && (metadata = metadatas.get(0)) != null);
if (hasValidInput && haveValidNodeMetadata) {
List<IMetadataColumn> input_columnList = inputConn.getMetadataTable().getListColumns();
if(input_columnList == null) {
input_columnList = new ArrayList<IMetadataColumn>();
}
// add incoming (not present) columns to enforcer for this comps
if (cid.contains("tDataStewardship") || cid.contains("tMarkLogic")){
%>
boolean shouldCreateRuntimeSchemaForIncomingNode = false;
<%
for (int i = 0; i < input_columnList.size(); i++) {
if(!input_columnList.get(i).getTalendType().equals("id_Dynamic")) {
if (input_columnList!=null && !input_columnList.isEmpty()) {
// add incoming (not present) columns to enforcer for this comps
if (cid.contains("tDataStewardship") || cid.contains("tMarkLogic")){
%>
if (incomingEnforcer_<%=cid%>.getDesignSchema().getField("<%=input_columnList.get(i)%>") == null){
incomingEnforcer_<%=cid%>.addIncomingNodeField("<%=input_columnList.get(i)%>", ((Object) <%=inputConn.getName()%>.<%=input_columnList.get(i)%>).getClass().getCanonicalName());
shouldCreateRuntimeSchemaForIncomingNode = true;
boolean shouldCreateRuntimeSchemaForIncomingNode = false;
<%
for (int i = 0; i < input_columnList.size(); i++) {
if(!input_columnList.get(i).getTalendType().equals("id_Dynamic")) {
%>
if (incomingEnforcer_<%=cid%>.getDesignSchema().getField("<%=input_columnList.get(i)%>") == null){
incomingEnforcer_<%=cid%>.addIncomingNodeField("<%=input_columnList.get(i)%>", ((Object) <%=inputConn.getName()%>.<%=input_columnList.get(i)%>).getClass().getCanonicalName());
shouldCreateRuntimeSchemaForIncomingNode = true;
}
<%
}
<%
}
}
%>
if (shouldCreateRuntimeSchemaForIncomingNode){
incomingEnforcer_<%=cid%>.createRuntimeSchema();
}
<%
}
// If there are dynamic columns in the schema, they need to be
// initialized into the runtime schema of the actual IndexedRecord
// provided to the component.
int dynamicPos = -1;
for (int i = 0; i < input_columnList.size(); i++) {
if (input_columnList.get(i).getTalendType().equals("id_Dynamic")) {
dynamicPos = i;
break;
}
}
if (dynamicPos != -1) {
%>
if (!incomingEnforcer_<%=cid%>.areDynamicFieldsInitialized()) {
// Initialize the dynamic columns when they are first encountered.
for (routines.system.DynamicMetadata dm_<%=cid%> : <%=inputConn.getName()%>.<%=input_columnList.get(dynamicPos).getLabel()%>.metadatas) {
incomingEnforcer_<%=cid%>.addDynamicField(
dm_<%=cid%>.getName(),
dm_<%=cid%>.getType(),
dm_<%=cid%>.getLogicalType(),
dm_<%=cid%>.getFormat(),
dm_<%=cid%>.getDescription(),
dm_<%=cid%>.isNullable());
}
incomingEnforcer_<%=cid%>.createRuntimeSchema();
}
<%
}
%>
incomingEnforcer_<%=cid%>.createNewRecord();
<%
for (int i = 0; i < input_columnList.size(); i++) { // column
IMetadataColumn column = input_columnList.get(i);
if (dynamicPos != i) {
%>
//skip the put action if the input column doesn't appear in component runtime schema
if (incomingEnforcer_<%=cid%>.getRuntimeSchema().getField("<%=input_columnList.get(i)%>") != null){
incomingEnforcer_<%=cid%>.put("<%=column.getLabel()%>", <%=inputConn.getName()%>.<%=column.getLabel()%>);
}
<%
} else {
%>
for (int i = 0; i < <%=inputConn.getName()%>.<%=column.getLabel()%>.getColumnCount(); i++) {
incomingEnforcer_<%=cid%>.put(<%=inputConn.getName()%>.<%=column.getLabel()%>.getColumnMetadata(i).getName(),
<%=inputConn.getName()%>.<%=column.getLabel()%>.getColumnValue(i));
if (shouldCreateRuntimeSchemaForIncomingNode){
incomingEnforcer_<%=cid%>.createRuntimeSchema();
}
<%
}
} // column
// If there are dynamic columns in the schema, they need to be
// initialized into the runtime schema of the actual IndexedRecord
// provided to the component.
// If necesary, generate the code to handle outgoing connections.
// TODO: For now, this can only handle one outgoing record for
// each incoming record. To handle multiple outgoing records, code
// generation needs to occur in component_begin in order to open
// a for() loop.
int dynamicPos = -1;
for (int i = 0; i < input_columnList.size(); i++) {
if (input_columnList.get(i).getTalendType().equals("id_Dynamic")) {
dynamicPos = i;
break;
}
}
// There will be a ClassCastException if the output component does
// not implement WriterWithFeedback, but permits outgoing
// connections.
ComponentProperties componentProps = node.getComponentProperties();
ProcessPropertiesGenerator generator = new ProcessPropertiesGenerator(cid, component);
List<Component.CodegenPropInfo> propsToProcess = component.getCodegenPropInfos(componentProps);
for (Component.CodegenPropInfo propInfo : propsToProcess) { // propInfo
List<NamedThing> properties = propInfo.props.getProperties();
for (NamedThing prop : properties) { // property
if (prop instanceof Property) { // if, only deal with valued Properties
Property property = (Property)prop;
if (property.getFlags() != null && (property.getFlags().contains(Property.Flags.DESIGN_TIME_ONLY) || property.getFlags().contains(Property.Flags.HIDDEN)))
continue;
if(property.getTaggedValue(IGenericConstants.DYNAMIC_PROPERTY_VALUE)!=null && Boolean.valueOf(String.valueOf(property.getTaggedValue(IGenericConstants.DYNAMIC_PROPERTY_VALUE)))) {
generator.setPropertyValues(property, propInfo, null, false, false);
if (dynamicPos != -1) {
%>
if (!incomingEnforcer_<%=cid%>.areDynamicFieldsInitialized()) {
// Initialize the dynamic columns when they are first encountered.
for (routines.system.DynamicMetadata dm_<%=cid%> : <%=inputConn.getName()%>.<%=input_columnList.get(dynamicPos).getLabel()%>.metadatas) {
incomingEnforcer_<%=cid%>.addDynamicField(
dm_<%=cid%>.getName(),
dm_<%=cid%>.getType(),
dm_<%=cid%>.getLogicalType(),
dm_<%=cid%>.getFormat(),
dm_<%=cid%>.getDescription(),
dm_<%=cid%>.isNullable());
}
incomingEnforcer_<%=cid%>.createRuntimeSchema();
}
<%
}
%>
incomingEnforcer_<%=cid%>.createNewRecord();
<%
for (int i = 0; i < input_columnList.size(); i++) { // column
IMetadataColumn column = input_columnList.get(i);
if (dynamicPos != i) {
%>
//skip the put action if the input column doesn't appear in component runtime schema
if (incomingEnforcer_<%=cid%>.getRuntimeSchema().getField("<%=input_columnList.get(i)%>") != null){
incomingEnforcer_<%=cid%>.put("<%=column.getLabel()%>", <%=inputConn.getName()%>.<%=column.getLabel()%>);
}
<%
} else {
%>
for (int i = 0; i < <%=inputConn.getName()%>.<%=column.getLabel()%>.getColumnCount(); i++) {
incomingEnforcer_<%=cid%>.put(<%=inputConn.getName()%>.<%=column.getLabel()%>.getColumnMetadata(i).getName(),
<%=inputConn.getName()%>.<%=column.getLabel()%>.getColumnValue(i));
}
<%
}
} // column
// If necesary, generate the code to handle outgoing connections.
// TODO: For now, this can only handle one outgoing record for
// each incoming record. To handle multiple outgoing records, code
// generation needs to occur in component_begin in order to open
// a for() loop.
// There will be a ClassCastException if the output component does
// not implement WriterWithFeedback, but permits outgoing
// connections.
ComponentProperties componentProps = node.getComponentProperties();
ProcessPropertiesGenerator generator = new ProcessPropertiesGenerator(cid, component);
List<Component.CodegenPropInfo> propsToProcess = component.getCodegenPropInfos(componentProps);
for (Component.CodegenPropInfo propInfo : propsToProcess) { // propInfo
List<NamedThing> properties = propInfo.props.getProperties();
for (NamedThing prop : properties) { // property
if (prop instanceof Property) { // if, only deal with valued Properties
Property property = (Property)prop;
if (property.getFlags() != null && (property.getFlags().contains(Property.Flags.DESIGN_TIME_ONLY) || property.getFlags().contains(Property.Flags.HIDDEN)))
continue;
if(property.getTaggedValue(IGenericConstants.DYNAMIC_PROPERTY_VALUE)!=null && Boolean.valueOf(String.valueOf(property.getTaggedValue(IGenericConstants.DYNAMIC_PROPERTY_VALUE)))) {
generator.setPropertyValues(property, propInfo, null, false, false);
}
}
}
} // property
} // propInfo
%>
org.apache.avro.generic.IndexedRecord data_<%=cid%> = incomingEnforcer_<%=cid%>.getCurrentRecord();
<%
boolean isParallelize ="true".equalsIgnoreCase(ElementParameterParser.getValue(node, "__PARALLELIZE__"));
if (isParallelize) {
String sourceComponentId = inputConn.getSource().getUniqueName();
if(sourceComponentId!=null && sourceComponentId.contains("tAsyncIn")) {
%>
globalMap.put(buffersSizeKey_<%=cid%>, buffersSize_<%=sourceComponentId%>);
<%
}
}
%>
writer_<%=cid%>.write(data_<%=cid%>);
nb_line_<%=cid %>++;
<%if(hasMainOutput){
} // property
} // propInfo
%>
if(!(writer_<%=cid%> instanceof org.talend.components.api.component.runtime.WriterWithFeedback)) {
// For no feedback writer,just pass the input record to the output
if (data_<%=cid%>!=null) {
outgoingMainRecordsList_<%=cid%> = java.util.Arrays.asList(data_<%=cid%>);
}
}
<%
org.apache.avro.generic.IndexedRecord data_<%=cid%> = incomingEnforcer_<%=cid%>.getCurrentRecord();
<%
boolean isParallelize ="true".equalsIgnoreCase(ElementParameterParser.getValue(node, "__PARALLELIZE__"));
if (isParallelize) {
String sourceComponentId = inputConn.getSource().getUniqueName();
if(sourceComponentId!=null && sourceComponentId.contains("tAsyncIn")) {
%>
globalMap.put(buffersSizeKey_<%=cid%>, buffersSize_<%=sourceComponentId%>);
<%
}
}
%>
writer_<%=cid%>.write(data_<%=cid%>);
nb_line_<%=cid %>++;
<%if(hasMainOutput){
%>
if(!(writer_<%=cid%> instanceof org.talend.components.api.component.runtime.WriterWithFeedback)) {
// For no feedback writer,just pass the input record to the output
if (data_<%=cid%>!=null) {
outgoingMainRecordsList_<%=cid%> = java.util.Arrays.asList(data_<%=cid%>);
}
}
<%
}
}
}
} // canStart

View File

@@ -139,11 +139,9 @@ for (INode node : process.getNodesOfType("tRESTClient")) {
}
boolean talendEsbJobFactory = actAsProvider || !process.getNodesOfType("tRouteInput").isEmpty();
boolean talendEsbJob = talendEsbJobFactory || actAsConsumer || ProcessorUtilities.isEsbJob(process);
boolean talendEsbJob = talendEsbJobFactory || actAsConsumer || ProcessorUtilities.isEsbJob(process.getId(), process.getVersion());
boolean isLog4jEnabled = ("true").equals(ElementParameterParser.getValue(process, "__LOG4J_ACTIVATE__"));//log4j enable
boolean isLog4j2Enabled = ("true").equals(ElementParameterParser.getValue(process, "__LOG4J2_ACTIVATE__"));//log4j2 enable
boolean isLog4j1Enabled = !isLog4j2Enabled;//log4j1 enable
boolean isLog4jEnabled = ("true").equals(ElementParameterParser.getValue(process, "__LOG4J_ACTIVATE__"));
if (talendMdmJob) {
talendJobInterfaces += ", TalendMDMJob"; // Talend MDM job
@@ -158,14 +156,7 @@ if (talendEsbJobFactory) {
public class <%=className%> implements <%=talendJobInterfaces%> {
<%if(isLog4jEnabled){%>
static {System.setProperty("TalendJob.log", "<%=className%>.log");}
<%if(isLog4j1Enabled){%>
private static org.apache.log4j.Logger log = org.apache.log4j.Logger.getLogger(<%=className%>.class);
<%}%>
<%if(isLog4j2Enabled){%>
private static org.apache.logging.log4j.Logger log = org.apache.logging.log4j.LogManager.getLogger(<%=className%>.class);
<%}%>
<%}%>
protected static void logIgnoredError(String message, Throwable cause) {

View File

@@ -94,7 +94,7 @@ public class JavaRoutineSynchronizer extends AbstractRoutineSynchronizer {
private void syncRoutineItems(Collection<RoutineItem> routineObjects, boolean forceUpdate) throws SystemException {
for (RoutineItem routineItem : routineObjects) {
syncRoutine(routineItem, true, forceUpdate);
syncRoutine(routineItem, true, true, forceUpdate);
}
syncSystemRoutine(ProjectManager.getInstance().getCurrentProject());
}

View File

@@ -0,0 +1,88 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="org.talend.designer.components.libs" default="process" basedir=".">
<property name="component.plugin.home" value="../../../org.talend.designer.components.bigdata/components" />
<!-- #################################################### -->
<!-- modification 1: config -->
<property name="jar.name" value="GoogleStorageUtils.jar" />
<property name="component.name" value="tGSPut" />
<property name="author.name" value="jzhao" />
<!-- modification 2: compile classpath -->
<path id="compile.classpath">
<pathelement location="${component.plugin.home}/tBigQueryBulkExec/jets3t-0.9.0.jar" />
</path>
<!-- #################################################### -->
<!-- sourcecode and final jar path -->
<property name="source.home" value="." />
<property name="jar.home" value="${component.plugin.home}/${component.name}/${jar.name}" />
<!-- temp dir for clasee files -->
<property name="build.dir" value="../../build" />
<!-- compile option -->
<property name="compile.debug" value="true" />
<property name="compile.deprecation" value="false" />
<property name="compile.optimize" value="true" />
<target name="process" description="prepare a temp dir">
<antcall target="prepare" />
<antcall target="compile" />
<antcall target="clean" />
</target>
<target name="prepare" description="prepare a temp dir">
<delete dir="${build.dir}" />
<mkdir dir="${build.dir}" />
<mkdir dir="${build.dir}/classes" />
</target>
<target name="compile" description="Compile Java sources">
<!-- compile -->
<javac srcdir="${source.home}" destdir="${build.dir}/classes" debug="${compile.debug}" deprecation="${compile.deprecation}" optimize="${compile.optimize}">
<classpath refid="compile.classpath" />
</javac>
<!-- include source code -->
<copy todir="${build.dir}/classes">
<fileset dir="${source.home}">
<exclude name="build.xml" />
</fileset>
</copy>
<!-- make jar -->
<tstamp>
<format property="date" pattern="yyyy-MM-dd HH:mm:ss" />
</tstamp>
<jar destfile="${build.dir}/${jar.name}" basedir="${build.dir}/classes">
<manifest>
<!-- who -->
<attribute name="Built-By" value="${author.name}" />
<!-- when -->
<attribute name="Built-Date" value="${date}"/>
<!-- JDK version -->
<attribute name="Created-By" value="${java.version} (${java.vendor})" />
<!-- Information about the program itself -->
<attribute name="Implementation-Vendor" value="Talend SA" />
<attribute name="Implementation-Title" value="${jar.name}" />
<attribute name="Implementation-Version" value="1.0" />
</manifest>
</jar>
<!-- move jar -->
<move file="${build.dir}/${jar.name}" tofile="${jar.home}" />
</target>
<target name="clean" description="clean the temp dir">
<delete dir="${build.dir}" />
<mkdir dir="${build.dir}" />
</target>
</project>

View File

@@ -0,0 +1,127 @@
package org.talend.gs.util;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import org.jets3t.service.model.GSObject;
import org.jets3t.service.utils.Mimetypes;
public class GSObjectUtil {
public List<GSObject> genGSObjectList(List<GSObject> objects, File file,
String keyParent, boolean isGenFileObject, boolean isGenFolderObject)
throws NoSuchAlgorithmException, IOException {
if (file.isDirectory()) {
if (keyParent != null && !"".equals(keyParent)) {
if (keyParent.trim().lastIndexOf("/") != keyParent.trim()
.length() - 1) {
keyParent = keyParent + "/";
}
if(isGenFolderObject){
objects.add(new GSObject(keyParent));
}
}
File[] files = file.listFiles();
for (File f : files) {
if (f.isDirectory()) {
objects = genGSObjectList(objects, f,
keyParent + f.getName() + "/", isGenFileObject,
isGenFolderObject);
} else {
objects = genGSObjectList(objects, f,
keyParent + f.getName(), isGenFileObject,
isGenFolderObject);
}
}
} else {
if (isGenFileObject) {
GSObject obj = new GSObject(file);
obj.setKey(keyParent);
objects.add(obj);
}
}
return objects;
}
public java.util.Map<String,String> genFileFilterList(java.util.List<java.util.Map<String,String>> list,String localdir,String remotedir){
if (remotedir != null && !"".equals(remotedir)) {
if (remotedir.trim().lastIndexOf("/") != remotedir.trim()
.length() - 1) {
remotedir = remotedir + "/";
}
}
java.util.Map<String,String> fileMap=new HashMap<String,String>();
for (java.util.Map<String, String> map : list) {
java.util.Set<String> keySet = map.keySet();
for (String key : keySet){
String tempdir = localdir;
String filemask = key;
String dir = null;
String mask = filemask.replaceAll("\\\\", "/") ;
int i = mask.lastIndexOf('/');
if (i!=-1){
dir = mask.substring(0, i);
mask = mask.substring(i+1);
}
if (dir!=null && !"".equals(dir)) tempdir = tempdir + "/" + dir;
mask = mask.replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*");
final String finalMask = mask;
java.io.File[] listings = null;
java.io.File file = new java.io.File(tempdir);
if (file.isDirectory()) {
listings = file.listFiles(new java.io.FileFilter() {
public boolean accept(java.io.File pathname) {
boolean result = false;
if (pathname != null && pathname.isFile()) {
result = java.util.regex.Pattern.compile(finalMask).matcher(pathname.getName()).find();
}
return result;
}
});
}
if(listings == null || listings.length <= 0){
System.err.println("No match file("+key+") exist!");
}else{
String localFilePath = "";
String newObjectKey = "";
for (int m = 0; m < listings.length; m++){
if (listings[m].getName().matches(mask)){
localFilePath = listings[m].getAbsolutePath();
if(map.get(key)!=null && map.get(key).length()>0){
newObjectKey = remotedir+map.get(key);
}else{
newObjectKey = remotedir+listings[m].getName();
}
fileMap.put(localFilePath, newObjectKey);
}
}
}
}
}
return fileMap;
}
public List<GSObject> genObjectByFileMap(java.util.Map<String,String> fileMap) throws Exception, IOException{
Set<String> localFiles=fileMap.keySet();
List<GSObject> objects=new ArrayList<GSObject>();
for(String localFilePath:localFiles){
GSObject object=new GSObject(new File(localFilePath));
object.setKey(fileMap.get(localFilePath));
objects.add(object);
}
return objects;
}
public void initMimeTypes() throws IOException {
InputStream mimetypesFile = this.getClass().getResourceAsStream(
"/resource/mime.types");
Mimetypes.getInstance().loadAndReplaceMimetypes(mimetypesFile);
}
}

View File

@@ -4,7 +4,7 @@
<groupId>com.microsoft.azure</groupId>
<artifactId>adal4j</artifactId>
<version>1.1.1-20191012</version>
<version>1.1.1-patch</version>
<packaging>jar</packaging>
<name>adal4j</name>
<description>
@@ -105,7 +105,7 @@
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
<version>1.7.5</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>

View File

@@ -33,15 +33,10 @@
<version>3.0.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.25</version>
<version>1.7.5</version>
</dependency>
<!-- Spring 3 dependencies -->
<dependency>

View File

@@ -27,8 +27,7 @@ import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang3.text.WordUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.log4j.Logger;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
@@ -42,7 +41,7 @@ import org.json.JSONObject;
*/
public class JSONHelper {
private static Logger logger = LoggerFactory.getLogger(JSONHelper.class);
private static Logger logger = Logger.getLogger(JSONHelper.class);
JSONHelper() {
// PropertyConfigurator.configure("log4j.properties");

View File

@@ -1,69 +1,69 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.libraries</groupId>
<artifactId>checkArchive-1.1-20190917</artifactId>
<version>6.0.0</version>
<name>checkArchive</name>
<description>Dependence for tFileArchive and tFileUnAchive</description>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
<java.source.version>1.7</java.source.version>
</properties>
<distributionManagement>
<snapshotRepository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceSnapshot/</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
<releases>
<enabled>false</enabled>
</releases>
</snapshotRepository>
<repository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceRelease/</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
<releases>
<enabled>true</enabled>
</releases>
</repository>
</distributionManagement>
<dependencies>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-compress -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>1.19</version>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/java</directory>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>${java.source.version}</source>
<target>${java.source.version}</target>
<showDeprecation>true</showDeprecation>
<showWarnings>true</showWarnings>
<compilerArgument>-XDignore.symbol.file</compilerArgument>
<fork>true</fork>
</configuration>
</plugin>
</plugins>
</build>
</project>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.libraries</groupId>
<artifactId>checkArchive-1.1-20181130</artifactId>
<version>6.0.0</version>
<name>checkArchive</name>
<description>Dependence for tFileArchive and tFileUnAchive</description>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
<java.source.version>1.7</java.source.version>
</properties>
<distributionManagement>
<snapshotRepository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceSnapshot/</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
<releases>
<enabled>false</enabled>
</releases>
</snapshotRepository>
<repository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceRelease/</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
<releases>
<enabled>true</enabled>
</releases>
</repository>
</distributionManagement>
<dependencies>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-compress -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>1.10</version>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/java</directory>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>${java.source.version}</source>
<target>${java.source.version}</target>
<showDeprecation>true</showDeprecation>
<showWarnings>true</showWarnings>
<compilerArgument>-XDignore.symbol.file</compilerArgument>
<fork>true</fork>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@@ -1,9 +1,9 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components.lib</groupId>
<groupId>org.talend.libraries</groupId>
<artifactId>commons-net-ftps-proxy</artifactId>
<version>3.6.1-talend-20190819</version>
<version>3.6.1-talend-20190128</version>
<name>commons-net-talend</name>

View File

@@ -3,7 +3,6 @@ package org.talend.ftp;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.Socket;
import java.net.InetAddress;
import java.util.Locale;
import javax.net.ssl.SSLContext;
@@ -30,13 +29,11 @@ public class SSLSessionReuseFTPSClient extends FTPSClient {
final Object cache = sessionHostPortCache.get(context);
final Method putMethod = cache.getClass().getDeclaredMethod("put", Object.class, Object.class);
putMethod.setAccessible(true);
InetAddress address = socket.getInetAddress();
int port = socket.getPort();
String key = String.format("%s:%s", address.getHostName(), String.valueOf(port)).toLowerCase(Locale.ROOT);
putMethod.invoke(cache, key, session);
key = String.format("%s:%s", address.getHostAddress(), String.valueOf(port)).toLowerCase(Locale.ROOT);
final Method getHostMethod = socket.getClass().getDeclaredMethod("getHost");
getHostMethod.setAccessible(true);
Object host = getHostMethod.invoke(socket);
final String key =
String.format("%s:%s", host, String.valueOf(socket.getPort())).toLowerCase(Locale.ROOT);
putMethod.invoke(cache, key, session);
} catch (Exception e) {
e.printStackTrace();

View File

@@ -1,88 +1,88 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="org.talend.designer.components.libs" default="process" basedir=".">
<property name="component.plugin.home" value="../../../org.talend.designer.components.localprovider/components" />
<!-- #################################################### -->
<!-- modification 1: config -->
<property name="jar.name" value="external_sort.jar" />
<property name="component.name" value="tSortOut" />
<property name="author.name" value="wyang" />
<!-- modification 2: compile classpath -->
<path id="compile.classpath">
</path>
<!-- #################################################### -->
<!-- sourcecode and final jar path -->
<property name="source.home" value="." />
<property name="jar.home" value="${component.plugin.home}/${component.name}/${jar.name}" />
<!-- temp dir for clasee files -->
<property name="build.dir" value="../../build" />
<!-- compile option -->
<property name="compile.debug" value="true" />
<property name="compile.deprecation" value="false" />
<property name="compile.optimize" value="true" />
<target name="process" description="prepare a temp dir">
<antcall target="prepare" />
<antcall target="compile" />
<antcall target="clean" />
</target>
<target name="prepare" description="prepare a temp dir">
<delete dir="${build.dir}" />
<mkdir dir="${build.dir}" />
<mkdir dir="${build.dir}/classes" />
</target>
<target name="compile" description="Compile Java sources">
<!-- compile -->
<javac srcdir="${source.home}" destdir="${build.dir}/classes" debug="${compile.debug}" deprecation="${compile.deprecation}" optimize="${compile.optimize}">
<classpath refid="compile.classpath" />
</javac>
<!-- include source code -->
<copy todir="${build.dir}/classes">
<fileset dir="${source.home}">
<exclude name="build.xml" />
</fileset>
</copy>
<!-- make jar -->
<tstamp>
<format property="date" pattern="yyyy-MM-dd HH:mm:ss" />
</tstamp>
<jar destfile="${build.dir}/${jar.name}" basedir="${build.dir}/classes">
<manifest>
<!-- who -->
<attribute name="Built-By" value="${author.name}" />
<!-- when -->
<attribute name="Built-Date" value="${date}"/>
<!-- JDK version -->
<attribute name="Created-By" value="${java.version} (${java.vendor})" />
<!-- Information about the program itself -->
<attribute name="Implementation-Vendor" value="Talend SA" />
<attribute name="Implementation-Title" value="${jar.name}" />
<attribute name="Implementation-Version" value="1.0" />
</manifest>
</jar>
<!-- move jar -->
<move file="${build.dir}/${jar.name}" tofile="${jar.home}" />
</target>
<target name="clean" description="clean the temp dir">
<delete dir="${build.dir}" />
<mkdir dir="${build.dir}" />
</target>
</project>
<?xml version="1.0" encoding="UTF-8"?>
<project name="org.talend.designer.components.libs" default="process" basedir=".">
<property name="component.plugin.home" value="../../../org.talend.designer.components.localprovider/components" />
<!-- #################################################### -->
<!-- modification 1: config -->
<property name="jar.name" value="external_sort.jar" />
<property name="component.name" value="tSortOut" />
<property name="author.name" value="wyang" />
<!-- modification 2: compile classpath -->
<path id="compile.classpath">
</path>
<!-- #################################################### -->
<!-- sourcecode and final jar path -->
<property name="source.home" value="." />
<property name="jar.home" value="${component.plugin.home}/${component.name}/${jar.name}" />
<!-- temp dir for clasee files -->
<property name="build.dir" value="../../build" />
<!-- compile option -->
<property name="compile.debug" value="true" />
<property name="compile.deprecation" value="false" />
<property name="compile.optimize" value="true" />
<target name="process" description="prepare a temp dir">
<antcall target="prepare" />
<antcall target="compile" />
<antcall target="clean" />
</target>
<target name="prepare" description="prepare a temp dir">
<delete dir="${build.dir}" />
<mkdir dir="${build.dir}" />
<mkdir dir="${build.dir}/classes" />
</target>
<target name="compile" description="Compile Java sources">
<!-- compile -->
<javac srcdir="${source.home}" destdir="${build.dir}/classes" debug="${compile.debug}" deprecation="${compile.deprecation}" optimize="${compile.optimize}">
<classpath refid="compile.classpath" />
</javac>
<!-- include source code -->
<copy todir="${build.dir}/classes">
<fileset dir="${source.home}">
<exclude name="build.xml" />
</fileset>
</copy>
<!-- make jar -->
<tstamp>
<format property="date" pattern="yyyy-MM-dd HH:mm:ss" />
</tstamp>
<jar destfile="${build.dir}/${jar.name}" basedir="${build.dir}/classes">
<manifest>
<!-- who -->
<attribute name="Built-By" value="${author.name}" />
<!-- when -->
<attribute name="Built-Date" value="${date}"/>
<!-- JDK version -->
<attribute name="Created-By" value="${java.version} (${java.vendor})" />
<!-- Information about the program itself -->
<attribute name="Implementation-Vendor" value="Talend SA" />
<attribute name="Implementation-Title" value="${jar.name}" />
<attribute name="Implementation-Version" value="1.0" />
</manifest>
</jar>
<!-- move jar -->
<move file="${build.dir}/${jar.name}" tofile="${jar.home}" />
</target>
<target name="clean" description="clean the temp dir">
<delete dir="${build.dir}" />
<mkdir dir="${build.dir}" />
</target>
</project>

View File

@@ -1,88 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright (C) 2006-2020 Talend Inc. - www.talend.com
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components</groupId>
<artifactId>google-storage-utils</artifactId>
<version>1.0.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
</properties>
<name>Google Storage Utils</name>
<description>Talend Helper library for Google Storage</description>
<url>
https://github.com/Talend/tdi-studio-se/
</url>
<licenses>
<license>
<name>Apache-2.0</name>
<url>
http://www.talendforge.org/modules/licenses/APACHE_v2.txt
</url>
</license>
</licenses>
<dependencies>
<dependency>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>
<version>0.9.0</version>
</dependency>
</dependencies>
<distributionManagement>
<snapshotRepository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceSnapshot/</url>
</snapshotRepository>
<repository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceRelease/</url>
</repository>
</distributionManagement>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<version>2.8.2</version>
</plugin>
</plugins>
<resources>
<resource>
<directory>src/main/java</directory>
</resource>
<resource>
<directory>src/main/resources</directory>
<targetPath>resource</targetPath>
</resource>
</resources>
</build>
</project>

View File

@@ -1,123 +0,0 @@
package org.talend.gs.util;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import org.jets3t.service.model.GSObject;
import org.jets3t.service.utils.Mimetypes;
public class GSObjectUtil {
public List<GSObject> genGSObjectList(List<GSObject> objects, File file, String keyParent, boolean isGenFileObject,
boolean isGenFolderObject) throws NoSuchAlgorithmException, IOException {
if (file.isDirectory()) {
if (keyParent != null && !"".equals(keyParent)) {
if (keyParent.trim().lastIndexOf("/") != keyParent.trim().length() - 1) {
keyParent = keyParent + "/";
}
if (isGenFolderObject) {
objects.add(new GSObject(keyParent));
}
}
File[] files = file.listFiles();
for (File f : files) {
if (f.isDirectory()) {
objects = genGSObjectList(objects, f, keyParent + f.getName() + "/", isGenFileObject,
isGenFolderObject);
} else {
objects = genGSObjectList(objects, f, keyParent + f.getName(), isGenFileObject, isGenFolderObject);
}
}
} else {
if (isGenFileObject) {
GSObject obj = new GSObject(file);
obj.setKey(keyParent);
objects.add(obj);
}
}
return objects;
}
public java.util.Map<String, String> genFileFilterList(java.util.List<java.util.Map<String, String>> list,
String localdir, String remotedir) {
if (remotedir != null && !"".equals(remotedir)) {
if (remotedir.trim().lastIndexOf("/") != remotedir.trim().length() - 1) {
remotedir = remotedir + "/";
}
}
java.util.Map<String, String> fileMap = new HashMap<String, String>();
for (java.util.Map<String, String> map : list) {
java.util.Set<String> keySet = map.keySet();
for (String key : keySet) {
String tempdir = localdir;
String filemask = key;
String dir = null;
String mask = filemask.replaceAll("\\\\", "/");
int i = mask.lastIndexOf('/');
if (i != -1) {
dir = mask.substring(0, i);
mask = mask.substring(i + 1);
}
if (dir != null && !"".equals(dir))
tempdir = tempdir + "/" + dir;
mask = mask.replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*");
final String finalMask = mask;
java.io.File[] listings = null;
java.io.File file = new java.io.File(tempdir);
if (file.isDirectory()) {
listings = file.listFiles(new java.io.FileFilter() {
public boolean accept(java.io.File pathname) {
boolean result = false;
if (pathname != null && pathname.isFile()) {
result = java.util.regex.Pattern.compile(finalMask).matcher(pathname.getName()).find();
}
return result;
}
});
}
if (listings == null || listings.length <= 0) {
System.err.println("No match file(" + key + ") exist!");
} else {
String localFilePath = "";
String newObjectKey = "";
for (int m = 0; m < listings.length; m++) {
if (listings[m].getName().matches(mask)) {
localFilePath = listings[m].getAbsolutePath();
if (map.get(key) != null && map.get(key).length() > 0) {
newObjectKey = remotedir + map.get(key);
} else {
newObjectKey = remotedir + listings[m].getName();
}
fileMap.put(localFilePath, newObjectKey);
}
}
}
}
}
return fileMap;
}
public List<GSObject> genObjectByFileMap(java.util.Map<String, String> fileMap) throws Exception, IOException {
Set<String> localFiles = fileMap.keySet();
List<GSObject> objects = new ArrayList<GSObject>();
for (String localFilePath : localFiles) {
GSObject object = new GSObject(new File(localFilePath));
object.setKey(fileMap.get(localFilePath));
objects.add(object);
}
return objects;
}
public void initMimeTypes() throws IOException {
InputStream mimetypesFile = this.getClass().getResourceAsStream("/resource/mime.types");
Mimetypes.getInstance().loadAndReplaceMimetypes(mimetypesFile);
}
}

View File

@@ -7,7 +7,7 @@
<groupId>net.sf.json-lib</groupId>
<artifactId>json-lib</artifactId>
<packaging>jar</packaging>
<version>2.4.4-talend</version>
<version>2.4.1-talend</version>
<name>json-lib</name>
<properties>

View File

@@ -25,12 +25,12 @@ import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import net.sf.json.util.JSONUtils;
import net.sf.json.util.JsonEventListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Base class for JSONObject and JSONArray.
*
@@ -227,8 +227,7 @@ abstract class AbstractJSON implements JSON {
}
protected Object _processValue( Object value, JsonConfig jsonConfig ) {
if( JSONNull.getInstance().equals( value ) &&
((jsonConfig.getJsonStandard() != JsonStandard.WRAP_NULL_STRINGS) || !"null".equals(value))) {
if( JSONNull.getInstance().equals( value ) ) {
return JSONNull.getInstance();
} else if( Class.class.isAssignableFrom( value.getClass() ) || value instanceof Class ) {
return ((Class) value).getName();

View File

@@ -2391,7 +2391,7 @@ public final class JSONObject extends AbstractJSON implements JSON, Map, Compara
}
try{
Iterator keys = keys();
StringBuilder sb = new StringBuilder( "{" );
StringBuffer sb = new StringBuffer( "{" );
while( keys.hasNext() ){
if( sb.length() > 1 ){
@@ -2460,7 +2460,7 @@ public final class JSONObject extends AbstractJSON implements JSON, Map, Compara
return this.toString();
}
Iterator keys = keys();
StringBuilder sb = new StringBuilder( "{" );
StringBuffer sb = new StringBuffer( "{" );
int newindent = indent + indentFactor;
Object o;
if( n == 1 ){
@@ -2549,7 +2549,7 @@ public final class JSONObject extends AbstractJSON implements JSON, Map, Compara
if( o instanceof JSONArray ){
((JSONArray) o).element( value, jsonConfig );
}else{
setInternal( key, new JSONArray().element( o, jsonConfig )
setInternal( key, new JSONArray().element( o )
.element( value, jsonConfig ), jsonConfig );
}
}

View File

@@ -86,6 +86,7 @@ public class JsonConfig {
private boolean handleJettisonEmptyElement;
private boolean handleJettisonSingleElementArray;
private boolean ignoreDefaultExcludes;
//private boolean ignoreJPATransient;
private boolean ignoreTransientFields;
private boolean ignorePublicFields = true;
private boolean javascriptCompliant;
@@ -109,7 +110,6 @@ public class JsonConfig {
private Map typeMap = new HashMap();
private List ignoreFieldAnnotations = new ArrayList();
private boolean allowNonStringKeys = false;
private JsonStandard jsonStandard = JsonStandard.LEGACY;
public JsonConfig() {
}
@@ -1246,20 +1246,7 @@ public class JsonConfig {
this.newBeanInstanceStrategy = newBeanInstanceStrategy == null ? DEFAULT_NEW_BEAN_INSTANCE_STRATEGY
: newBeanInstanceStrategy;
}
/**
* Sets the config to wrap "null" strings as strings instead of JsonNull.
*
*/
public void setJsonStandard(JsonStandard wrapNullStringValues) {
this.jsonStandard = wrapNullStringValues;
}
public JsonStandard getJsonStandard() {
return jsonStandard;
}
/**
* Sets a PropertyExclusionClassMatcher to use.<br>
* Will set default value (PropertyExclusionClassMatcher.DEFAULT) if null.<br>

View File

@@ -1,14 +0,0 @@
package net.sf.json;
public enum JsonStandard {
/**
* Out of date standard used to be default before
*/
LEGACY,
/**
* Updated standard due to RFC 7159 to not unwrap "null" strings (keep quotations)
*/
WRAP_NULL_STRINGS
}

View File

@@ -15,7 +15,6 @@
*/
package net.sf.json.util;
import java.math.BigDecimal;
import net.sf.json.JSONArray;
import net.sf.json.JSONException;
import net.sf.json.JSONNull;
@@ -415,7 +414,7 @@ public class JSONTokener {
}
try{
return createNumber(s);
return NumberUtils.createNumber(s);
}catch( Exception e ){
return s;
}
@@ -436,26 +435,6 @@ public class JSONTokener {
return s;
}
/**
* This method has been added to fix https://jira.talendforge.org/browse/TDI-42689
*
* @param s The String representation of the number
* @return The Number instance
*/
private Number createNumber(String s){
boolean isDecimal = s.indexOf('.') != -1;
if(isDecimal){
Double d = Double.valueOf(s);
if(Double.POSITIVE_INFINITY == Math.abs(d)){
return new BigDecimal(s);
}
return d;
}
return NumberUtils.createNumber(s);
}
/**
* Look at the next character in the source string.
*

View File

@@ -24,8 +24,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.beanutils.DynaBean;
import net.sf.ezmorph.MorphUtils;
import net.sf.ezmorph.MorpherRegistry;
import net.sf.ezmorph.bean.MorphDynaBean;
@@ -38,9 +36,10 @@ import net.sf.json.JSONNull;
import net.sf.json.JSONObject;
import net.sf.json.JSONString;
import net.sf.json.JsonConfig;
import net.sf.json.JsonStandard;
import net.sf.json.regexp.RegexpUtils;
import org.apache.commons.beanutils.DynaBean;
/**
* Provides useful methods on java objects and JSON values.
*
@@ -125,7 +124,7 @@ public final class JSONUtils {
public static String getFunctionBody( String function ) {
return RegexpUtils.getMatcher( FUNCTION_BODY_PATTERN, true ).getGroupIfMatches( function, 1 );
}
/**
* Returns the params of a function literal.
*/
@@ -185,12 +184,14 @@ public final class JSONUtils {
return Integer.class;
}else if( isLong( n ) ){
return Long.class;
}else if( isFloat( n ) ){
return Float.class;
}else if( isBigInteger( n ) ){
return BigInteger.class;
}else if( isDouble( n ) ){
return Double.class;
}else if( isBigDecimal( n ) ){
return BigDecimal.class;
}else if( isDouble( n ) ){
return Double.class;
}else{
throw new JSONException( "Unsupported type" );
}
@@ -606,7 +607,7 @@ public final class JSONUtils {
return input.startsWith( SINGLE_QUOTE ) && input.endsWith( SINGLE_QUOTE ) ||
input.startsWith( DOUBLE_QUOTE ) && input.endsWith( DOUBLE_QUOTE );
}
public static boolean isJsonKeyword( String input, JsonConfig jsonConfig ) {
if( input == null ){
return false;
@@ -616,7 +617,7 @@ public final class JSONUtils {
"false".equals( input ) ||
(jsonConfig.isJavascriptCompliant() && "undefined".equals( input ));
}
/**
* Throw an exception if the object is an NaN or infinite number.
*
@@ -755,77 +756,7 @@ public final class JSONUtils {
return quote( value.toString() );
}
public static String jsonToStandardizedString(JSON json, JsonStandard standard) {
switch (standard) {
case WRAP_NULL_STRINGS:
if (json.isArray()) {
JSONArray jsonArray = (JSONArray) json;
return jsonArrayToWrappedNullStrings(jsonArray);
} else if (!JSONNull.getInstance().equals(json)) {
return jsonToWrappedNullStrings((JSONObject) json);
}
default:
return json.toString();
}
}
/**
*
* @return plain String from JSONObject (@see JSONObject#toString()), but wrap null strings to quotation
*/
private static String jsonToWrappedNullStrings(JSONObject json) {
if (json.isNullObject()) {
return JSONNull.getInstance()
.toString();
}
try {
Iterator keys = json.keys();
StringBuilder sb = new StringBuilder("{");
while (keys.hasNext()) {
if (sb.length() > 1) {
sb.append(',');
}
Object o = keys.next();
sb.append(quote(o.toString()));
sb.append(':');
sb.append(valueToStringWrappedNullStrings(json.get(o)));
}
sb.append('}');
return sb.toString();
} catch (Exception e) {
return null;
}
}
private static String jsonArrayToWrappedNullStrings(JSONArray jsonArray) {
final String separator = ",";
StringBuilder sb = new StringBuilder("[");
for (int i = 0; i < jsonArray.size(); i++) {
if (i > 0) {
sb.append(separator);
}
sb.append(JSONUtils.valueToStringWrappedNullStrings(jsonArray.get(i)));
}
return sb.append("]").toString();
}
private static String valueToStringWrappedNullStrings(Object o) {
if ("null".equals(o)) {
return quote(o.toString());
} else if (o instanceof JSONArray) {
return jsonArrayToWrappedNullStrings((JSONArray) o);
} else if (o instanceof JSONObject) {
return jsonToWrappedNullStrings((JSONObject) o);
} else {
return valueToString(o);
}
}
/**
/**
* Finds out if n represents a BigInteger
*
* @return true if n is instanceOf BigInteger or the literal value can be

View File

@@ -22,8 +22,6 @@ import net.sf.json.JSONException;
import net.sf.json.JSONFunction;
import net.sf.json.JSONNull;
import net.sf.json.JSONObject;
import net.sf.json.JsonConfig;
import net.sf.json.JsonStandard;
import net.sf.json.util.JSONUtils;
import nu.xom.Attribute;
import nu.xom.Builder;
@@ -31,6 +29,7 @@ import nu.xom.Document;
import nu.xom.Element;
import nu.xom.Elements;
import nu.xom.Node;
import nu.xom.ProcessingInstruction;
import nu.xom.Serializer;
import nu.xom.Text;
import org.apache.commons.lang.ArrayUtils;
@@ -164,15 +163,6 @@ public class XMLSerializer {
*/
private boolean useLongDecimals;
/**
* The config parameter to wrap "null" strings as strings instead of JsonNull.
*/
private JsonStandard jsonStandard;
/**
* flag for if parse empty elements as empty strings
*/
private boolean useEmptyStrings;
/**
* Creates a new XMLSerializer with default options.<br>
* <ul>
@@ -377,19 +367,17 @@ public class XMLSerializer {
return JSONNull.getInstance();
}
String defaultType = getType( root, JSONTypes.STRING );
JsonConfig config = new JsonConfig();
config.setJsonStandard(jsonStandard);
if( isArray( root, true ) ){
json = processArrayElement( root, defaultType );
if( forceTopLevelObject ){
String key = removeNamespacePrefix( root.getQualifiedName() );
json = new JSONObject().element(key, json, config);
json = new JSONObject().element( key, json );
}
}else{
json = processObjectElement( root, defaultType );
if( forceTopLevelObject ){
String key = removeNamespacePrefix( root.getQualifiedName() );
json = new JSONObject().element(key, json, config);
json = new JSONObject().element( key, json );
}
}
}catch( JSONException jsone ){
@@ -667,10 +655,6 @@ public class XMLSerializer {
this.useLongDecimals = useLongDecimals;
}
public void setJsonStandard(JsonStandard jsonStandard) {
this.jsonStandard = jsonStandard;
}
/**
* Writes a JSON value into a XML string with UTF-8 encoding.<br>
*
@@ -819,8 +803,6 @@ public class XMLSerializer {
clazz = JSONTypes.OBJECT;
}else if( JSONTypes.ARRAY.compareToIgnoreCase( clazzText ) == 0 ){
clazz = JSONTypes.ARRAY;
} else if(JSONTypes.STRING.equalsIgnoreCase(clazzText)) {
clazz = JSONTypes.STRING;
}
}
return clazz;
@@ -1275,16 +1257,14 @@ public class XMLSerializer {
}
private void setOrAccumulate( JSONObject jsonObject, String key, Object value ) {
JsonConfig config = new JsonConfig();
config.setJsonStandard(jsonStandard);
if( jsonObject.has( key ) ){
jsonObject.accumulate(key, value, config);
jsonObject.accumulate( key, value );
Object val = jsonObject.get( key );
if( val instanceof JSONArray ){
((JSONArray) val).setExpandElements( true );
}
}else{
jsonObject.element( key, value, config);
jsonObject.element( key, value );
}
}
@@ -1292,10 +1272,9 @@ public class XMLSerializer {
String clazz = getClass( element );
String type = getType( element );
type = (type == null) ? defaultType : type;
JsonConfig config = new JsonConfig();
config.setJsonStandard(jsonStandard);
if( hasNamespaces( element ) && !skipNamespaces ){
jsonArray.element(simplifyValue(null, processElement(element, type)), config);
jsonArray.element( simplifyValue( null, processElement( element, type ) ) );
return;
}else if( element.getAttributeCount() > 0 ){
if( isFunction( element ) ){
@@ -1303,10 +1282,10 @@ public class XMLSerializer {
String[] params = null;
String text = element.getValue();
params = StringUtils.split( paramsAttribute.getValue(), "," );
jsonArray.element(new JSONFunction(params, text));
jsonArray.element( new JSONFunction( params, text ) );
return;
}else{
jsonArray.element(simplifyValue(null, processElement(element, type)), config);
jsonArray.element( simplifyValue( null, processElement( element, type ) ) );
return;
}
}
@@ -1314,10 +1293,10 @@ public class XMLSerializer {
boolean classProcessed = false;
if( clazz != null ){
if( clazz.compareToIgnoreCase( JSONTypes.ARRAY ) == 0 ){
jsonArray.element(processArrayElement(element, type), config);
jsonArray.element( processArrayElement( element, type ) );
classProcessed = true;
}else if( clazz.compareToIgnoreCase( JSONTypes.OBJECT ) == 0 ){
jsonArray.element(simplifyValue(null, processObjectElement( element, type)), config);
jsonArray.element( simplifyValue( null, processObjectElement( element, type ) ) );
classProcessed = true;
}
}
@@ -1353,12 +1332,12 @@ public class XMLSerializer {
jsonArray.element( new JSONFunction( params, text ) );
}else{
if( isArray( element, false ) ){
jsonArray.element(processArrayElement(element, defaultType), config);
jsonArray.element( processArrayElement( element, defaultType ) );
}else if( isObject( element, false ) ){
jsonArray.element(simplifyValue(null, processObjectElement(element,
defaultType)), config);
jsonArray.element( simplifyValue( null, processObjectElement( element,
defaultType ) ) );
}else{
jsonArray.element(trimSpaceFromValue(element.getValue()), config);
jsonArray.element( trimSpaceFromValue( element.getValue() ) );
}
}
}
@@ -1435,8 +1414,6 @@ public class XMLSerializer {
String text = element.getValue();
params = StringUtils.split( paramsAttribute.getValue(), "," );
setOrAccumulate( jsonObject, key, new JSONFunction( params, text ) );
} else if( useEmptyStrings && clazz != null && clazz.equalsIgnoreCase(JSONTypes.STRING) ) {
setTextValue(jsonObject, key, element);
}else{
if( isArray( element, false ) ){
setOrAccumulate( jsonObject, key, processArrayElement( element, defaultType ) );
@@ -1444,23 +1421,19 @@ public class XMLSerializer {
setOrAccumulate( jsonObject, key, simplifyValue( jsonObject,
processObjectElement( element, defaultType ) ) );
}else{
setTextValue(jsonObject, key, element);
String value;
if( isKeepCData && isCData( element ) ){
value = "<![CDATA[" + element.getValue() + "]]>";
}else{
value = element.getValue();
}
setOrAccumulate( jsonObject, key, trimSpaceFromValue( value ) );
}
}
}
}
}
private void setTextValue(final JSONObject jsonObject, final String key, final Element element) {
String value;
if( isKeepCData && isCData( element ) ){
value = "<![CDATA[" + element.getValue() + "]]>";
}else{
value = element.getValue();
}
setOrAccumulate( jsonObject, key, trimSpaceFromValue( value ) );
}
private boolean isCData( Element element ) {
if( element.getChildCount() == 1 ){
final Node child = element.getChild( 0 );
@@ -1520,14 +1493,6 @@ public class XMLSerializer {
return str;
}
public void setUseEmptyStrings(boolean useEmptyStrings) {
this.useEmptyStrings = useEmptyStrings;
}
public boolean isUseEmptyStrings() {
return this.useEmptyStrings;
}
private static class CustomElement extends Element {
private static String getName( String name ) {
int colon = name.indexOf( ':' );

View File

@@ -27,7 +27,6 @@ import net.sf.json.processors.PropertyNameProcessor;
import net.sf.json.sample.BeanA;
import net.sf.json.sample.BeanB;
import net.sf.json.sample.BeanC;
import net.sf.json.sample.BeanD;
import net.sf.json.sample.BeanFoo;
import net.sf.json.sample.BeanWithFunc;
import net.sf.json.sample.ChildBean;
@@ -1043,17 +1042,6 @@ public class TestJSONObject extends TestCase {
JSONArray.toArray( jsonObject.getJSONArray( "intarray" ) ) );
}
public void testToBean_BeanD() {
String json = "{bool:true,integer:1,string:\"json\",doublearray:[4.2424245783E7, 123456789.2424245783E7, 6.0]}";
JSONObject jsonObject = JSONObject.fromObject( json );
BeanD bean = (BeanD) JSONObject.toBean( jsonObject, BeanD.class );
assertEquals( jsonObject.get( "bool" ), Boolean.valueOf( bean.isBool() ) );
assertEquals( jsonObject.get( "integer" ), new Integer( bean.getInteger() ) );
assertEquals( jsonObject.get( "string" ), bean.getString() );
Assertions.assertEquals( bean.getDoublearray(),
JSONArray.toArray( jsonObject.getJSONArray( "doublearray" ) ) );
}
public void testToBean_ClassBean() {
JSONObject json = new JSONObject();
json.element( "klass", "java.lang.Object" );
@@ -1062,29 +1050,9 @@ public class TestJSONObject extends TestCase {
assertEquals( Object.class, bean.getKlass() );
}
public void testToBean_DynaBean__BigInteger_Double() {
BigInteger l = new BigDecimal( "1.7976931348623157E308" ).toBigInteger();
BigDecimal m = new BigDecimal( "1.7976931348623157E307" ).add( new BigDecimal( "0.0001" ) );
JSONObject json = new JSONObject().element( "i", BigInteger.ZERO )
.element( "d", MorphUtils.BIGDECIMAL_ONE )
.element( "bi", l )
.element( "bd", m );
Object bean = JSONObject.toBean( json );
Object i = ((MorphDynaBean) bean).get( "i" );
Object d = ((MorphDynaBean) bean).get( "d" );
assertTrue( i instanceof Integer );
assertTrue( d instanceof Integer );
Object bi = ((MorphDynaBean) bean).get( "bi" );
Object bd = ((MorphDynaBean) bean).get( "bd" );
assertTrue( bi instanceof BigInteger );
assertTrue( bd instanceof Double );
}
public void testToBean_DynaBean__BigInteger_BigDecimal() {
BigInteger l = new BigDecimal( "1.7976931348623157E308" ).toBigInteger();
BigDecimal m = new BigDecimal( "-1.7976931348623157E309" ).add( new BigDecimal( "0.0001" ) );
BigDecimal m = new BigDecimal( "1.7976931348623157E307" ).add( new BigDecimal( "0.0001" ) );
JSONObject json = new JSONObject().element( "i", BigInteger.ZERO )
.element( "d", MorphUtils.BIGDECIMAL_ONE )
.element( "bi", l )

View File

@@ -1,41 +0,0 @@
/*
* Copyright 2002-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.json.sample;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.apache.commons.lang.builder.ToStringStyle;
import java.math.BigDecimal;
/**
* @author Andres Almiray <aalmiray@users.sourceforge.net>
*/
public class BeanD extends BeanA {
private Double[] doublearray = new Double[3];
public Double[] getDoublearray() {
return doublearray;
}
public void setDoublearray(Double[] doublearray) {
this.doublearray = doublearray;
}
public String toString() {
return ToStringBuilder.reflectionToString( this, ToStringStyle.MULTI_LINE_STYLE );
}
}

View File

@@ -17,7 +17,6 @@
package net.sf.json.util;
import java.io.StringWriter;
import java.math.BigDecimal;
import junit.framework.TestCase;
import net.sf.json.JSONFunction;
@@ -94,7 +93,7 @@ public class TestJSONBuilder extends TestCase {
.endObject();
JSONObject jsonObj = JSONObject.fromObject( w.toString() );
assertEquals( Boolean.TRUE, jsonObj.get( "bool" ) );
assertEquals( Double.valueOf( "1.1" ), jsonObj.get( "numDouble" ) );
assertEquals( new Double( 1.1d ), jsonObj.get( "numDouble" ) );
assertEquals( new Long( 2 ).longValue(), ((Number) jsonObj.get( "numInt" )).longValue() );
assertEquals( "text", jsonObj.get( "text" ) );
assertTrue( JSONUtils.isFunction( jsonObj.get( "func" ) ) );

View File

@@ -20,8 +20,6 @@ import junit.framework.TestCase;
import net.sf.json.JSONFunction;
import net.sf.json.JSONObject;
import java.math.BigDecimal;
/**
* @author Andres Almiray <aalmiray@users.sourceforge.net>
*/
@@ -87,7 +85,7 @@ public class TestJSONStringer extends TestCase {
.endObject();
JSONObject jsonObj = JSONObject.fromObject( b.toString() );
assertEquals( Boolean.TRUE, jsonObj.get( "bool" ) );
assertEquals( Double.valueOf( "1.1" ), jsonObj.get( "numDouble" ) );
assertEquals( new Double( 1.1d ), jsonObj.get( "numDouble" ) );
assertEquals( new Long( 2 ).longValue(), ((Number) jsonObj.get( "numInt" )).longValue() );
assertEquals( "text", jsonObj.get( "text" ) );
assertTrue( JSONUtils.isFunction( jsonObj.get( "func" ) ) );

View File

@@ -17,25 +17,16 @@
package net.sf.json.util;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
import junit.framework.TestCase;
import net.sf.json.JSONArray;
import net.sf.json.JSONException;
import net.sf.json.JSONFunction;
import net.sf.json.JSONObject;
import net.sf.json.JsonConfig;
import net.sf.json.JsonStandard;
/**
* @author Andres Almiray <aalmiray@users.sourceforge.net>
*/
public class TestJSONUtils extends TestCase {
private static Map<String, String> valuesMap;
public static void main( String[] args ) {
junit.textui.TestRunner.run( TestJSONUtils.class );
}
@@ -44,14 +35,6 @@ public class TestJSONUtils extends TestCase {
super( name );
}
public void setUp() throws Exception {
super.setUp();
valuesMap = new LinkedHashMap<>();
valuesMap.put("key1", "null");
valuesMap.put("key2", "not_null");
}
public void testDoubleToString_infinite() {
assertEquals( "null", JSONUtils.doubleToString( Double.POSITIVE_INFINITY ) );
}
@@ -194,48 +177,4 @@ public class TestJSONUtils extends TestCase {
// ok
}
}
public void testNullStringsWrapped() {
JsonConfig config = new JsonConfig();
config.setJsonStandard(JsonStandard.WRAP_NULL_STRINGS);
JSONObject jsonObject = new JSONObject();
jsonObject.putAll(valuesMap, config);
String resultingString = JSONUtils.jsonToStandardizedString(jsonObject, JsonStandard.WRAP_NULL_STRINGS);
assertFalse("Wrapping null strings standard's broken", Objects.equals(jsonObject.toString(), resultingString));
assertTrue(resultingString.contains("\"null\""));
}
public void testNullStringsUnwrapped() {
JSONObject jsonObject = new JSONObject();
jsonObject.putAll(valuesMap);
String resultingString = JSONUtils.jsonToStandardizedString(jsonObject, JsonStandard.WRAP_NULL_STRINGS);
assertEquals(jsonObject.toString(), resultingString);
}
public void testNullStringsOnArrayWrapped() {
JsonConfig config = new JsonConfig();
config.setJsonStandard(JsonStandard.WRAP_NULL_STRINGS);
JSONArray jsonArray = new JSONArray();
jsonArray.add("abc");
jsonArray.add("null", config);
jsonArray.add(null);
String resultingString = JSONUtils.jsonToStandardizedString(jsonArray, JsonStandard.WRAP_NULL_STRINGS);;
assertFalse("Wrapping null strings standard's broken", Objects.equals(jsonArray.toString(), resultingString));
assertTrue(resultingString.contains("\"null\""));
}
public void testNullStringsOnArrayUnwrapped() {
JSONArray jsonArray = new JSONArray();
jsonArray.add("abc");
jsonArray.add("null");
jsonArray.add(null);
String resultingString = JSONUtils.jsonToStandardizedString(jsonArray, JsonStandard.LEGACY);
assertEquals(jsonArray.toString(), resultingString);
}
}

View File

@@ -4,7 +4,7 @@
<groupId>org.neo4j.talend</groupId>
<artifactId>neo4j-talend-component</artifactId>
<version>1.3-20191012</version>
<version>1.3-20171206</version>
<packaging>jar</packaging>
<name>Neo4j Talend </name>
@@ -100,23 +100,12 @@
<artifactId>neo4j-import-tool</artifactId>
<version>${neo4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
<!-- Talend log4j library -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.25</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
<scope>test</scope>
<version>1.2.16</version>
<scope>provided</scope>
</dependency>
<!-- JUnit -->
<dependency>

View File

@@ -1,7 +1,6 @@
package org.neo4j.talend;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.log4j.Logger;
import org.neo4j.graphdb.DynamicLabel;
import org.neo4j.graphdb.index.IndexHits;
import org.neo4j.helpers.collection.MapUtil;
@@ -24,7 +23,7 @@ public class Neo4jBatchDatabase {
/**
* The logger
*/
private static Logger log = LoggerFactory.getLogger(Neo4jBatchDatabase.class);
private static Logger log = Logger.getLogger(Neo4jBatchDatabase.class);
/**
* Name of the field index for the importId

View File

@@ -1,8 +1,7 @@
package org.neo4j.talend;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.log4j.Logger;
import org.neo4j.graphdb.DynamicRelationshipType;
@@ -12,7 +11,7 @@ import java.util.Map;
public class Neo4jBatchInserterRelationship extends Neo4jBatchInserterAbstract {
private static Logger log = LoggerFactory.getLogger(Neo4jBatchInserterRelationship.class);
private static Logger log = Logger.getLogger(Neo4jBatchInserterRelationship.class);
private String relationshipTypeField;
private String direction;

View File

@@ -1,8 +1,7 @@
package org.neo4j.talend;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.log4j.Logger;
import org.neo4j.tooling.ImportTool;
import java.io.BufferedWriter;
@@ -19,7 +18,7 @@ public class Neo4jImportTool {
/**
* The logger
*/
private final static Logger log = LoggerFactory.getLogger(Neo4jImportTool.class);
private final static Logger log = Logger.getLogger(Neo4jImportTool.class);
protected final static String HEADERS_KEY = "HEADERS";
protected final static String FILE_KEY = "FILE";

View File

@@ -1,125 +1,95 @@
<!--
Copyright (C) 2006-2020 Talend Inc. - www.talend.com
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components</groupId>
<artifactId>components-servicenow</artifactId>
<name>servicenow</name>
<version>1.0.1</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
</properties>
<description>Talend Helper library for ServiceNow components</description>
<url>
https://github.com/Talend/tdi-studio-se/
</url>
<licenses>
<license>
<name>Apache-2.0</name>
<url>
http://www.talendforge.org/modules/licenses/APACHE_v2.txt
</url>
</license>
</licenses>
<dependencies>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.6</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.1.3</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.3.1</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient-cache</artifactId>
<version>4.3.1</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpmime</artifactId>
<version>4.3.1</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.3.1</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20150729</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.0</version>
<type>jar</type>
<optional>true</optional>
</dependency>
</dependencies>
<distributionManagement>
<snapshotRepository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceSnapshot/</url>
</snapshotRepository>
<repository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceRelease/</url>
</repository>
</distributionManagement>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<version>2.8.2</version>
</plugin>
</plugins>
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components</groupId>
<artifactId>components-servicenow</artifactId>
<name>servicenow</name>
<version>1.0.0</version>
<resources>
<resource>
<directory>src/main/java</directory>
</resource>
</resources>
</build>
<dependencies>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.6</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.1.3</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.3.1</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient-cache</artifactId>
<version>4.3.1</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpmime</artifactId>
<version>4.3.1</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.3.1</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.3.1</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20150729</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.0</version>
<type>jar</type>
<optional>true</optional>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>1.6</source>
<target>1.6</target>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
</plugins>
<resources>
<resource>
<directory>src/main/java</directory>
</resource>
</resources>
<!-- TODO
<testResources>
<testResource>
<directory>src/test/java</directory>
</testResource>
</testResources>
-->
</build>
</project>

View File

@@ -1,269 +1,274 @@
/**
* Copyright (C) 2006-2019 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.talend.servicenow;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.http.HttpResponse;
import org.apache.http.ParseException;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
public class Util {
private HttpClient client;
private String baseurl;
public Util(HttpClient client, String baseurl) {
this.client = client;
this.baseurl = baseurl;
}
private Map<String, String> typeMapping = new HashMap<String, String>();
{
typeMapping.put("string", "id_String");
typeMapping.put("boolean", "id_Boolean");
typeMapping.put("integer", "id_Integer");
typeMapping.put("decimal", "id_BigDecimal");
typeMapping.put("float", "id_Float");
typeMapping.put("glide_date_time", "id_Date");
typeMapping.put("glide_date", "id_Date");
typeMapping.put("glide_time", "id_Date");
}
public static class ColumnMetadata {
private String name;
private String type;
private int maxlength;
private String pattern;
public ColumnMetadata(String name, String type, int maxlength, String pattern) {
this.name = name;
this.type = type;
this.maxlength = maxlength;
this.pattern = pattern;
}
public String getName() {
return name;
}
public String getType() {
return type;
}
public int getMaxLength() {
return maxlength;
}
public String getPattern() {
return pattern;
}
public String toString() {
return name + ":" + type + ":" + maxlength;
}
}
public Map<String, ColumnMetadata> getMetadata(String tablename) throws ClientProtocolException, IOException {
Map<String, ColumnMetadata> result = new HashMap<String, ColumnMetadata>();
List<String> relation = getRelationship(tablename);
StringBuilder sb = new StringBuilder();
for (String tname : relation) {
sb.setLength(0);
sb.append(this.baseurl);
sb.append("/api/now/table/");
sb.append("sys_dictionary");
sb.append("?sysparm_exclude_reference_link=true");
sb.append("&sysparm_query=name=");
sb.append(tname);
sb.append("&sysparm_fields=element,internal_type,max_length,active");
HttpGet httpget = new HttpGet(sb.toString());
httpget.setHeader("Accept", "application/json");
HttpResponse response = this.client.execute(httpget);
List<Map<String, String>> info = extractResponse4MultiRowFromArray(response);
for (Map<String, String> row : info) {
String element = row.get("element");
boolean active = Boolean.parseBoolean(row.get("active"));
if (element != null && !element.isEmpty() && active) {
String talend_type = null;
String pattern = null;
String servicenow_type = (String) row.get("internal_type");
if (servicenow_type != null && !servicenow_type.isEmpty()) {
talend_type = typeMapping.get(servicenow_type);
//need date pattern
if ("glide_date_time".equals(servicenow_type)) {
pattern = "yyyy-MM-dd HH:mm:ss";
} else if ("glide_date".equals(servicenow_type)) {
pattern = "yyyy-MM-dd";
} else if ("glide_time".equals(servicenow_type)) {
pattern = "HH:mm:ss";
}
}
if (talend_type == null) {
talend_type = "id_String";
}
int mlength = 64;
String maxlength = (String) row.get("max_length");
if (maxlength != null && !maxlength.isEmpty()) {
mlength = Integer.parseInt(maxlength);
}
ColumnMetadata column = new ColumnMetadata(element, talend_type, mlength, pattern);
result.put(element, column);
}
}
}
return result;
}
private List<String> getRelationship(String tablename) throws ClientProtocolException, IOException {
LinkedList<String> result = new LinkedList<String>();
StringBuilder sb = new StringBuilder();
sb.append(this.baseurl);
sb.append("/api/now/table/");
sb.append("sys_db_object");
sb.append("?sysparm_exclude_reference_link=true");
sb.append("&sysparm_query=name=");
sb.append(tablename);
sb.append("&sysparm_fields=name,super_class");
HttpGet httpget = new HttpGet(sb.toString());
httpget.setHeader("Accept", "application/json");
HttpResponse response = this.client.execute(httpget);
Map<String, String> info = extractResponse4OneRowFromArray(response);
result.add(info.get("name"));
String superclass = info.get("super_class");
while (superclass != null && !superclass.isEmpty()) {
sb.setLength(0);
sb.append(this.baseurl);
sb.append("/api/now/table/");
sb.append("sys_db_object/");
sb.append(superclass);
sb.append("?sysparm_exclude_reference_link=true");
sb.append("&sysparm_fields=name,super_class");
httpget = new HttpGet(sb.toString());
httpget.setHeader("Accept", "application/json");
response = this.client.execute(httpget);
info = extractResponse4OneRowFromObject(response);
result.add(info.get("name"));
superclass = info.get("super_class");
}
Collections.reverse(result);
return result;
}
private Map<String, String> extractResponse4OneRowFromArray(HttpResponse response)
throws ParseException, IOException {
validateResponse(response);
Map<String, String> result = new HashMap<String, String>();
org.json.JSONArray array = (org.json.JSONArray) getResult(response);
for (int i = 0; i < array.length(); i++) {
org.json.JSONObject row = (org.json.JSONObject) array.get(i);
for (String key : row.keySet()) {
result.put(key, (String) row.get(key));
}
}
return result;
}
private Map<String, String> extractResponse4OneRowFromObject(HttpResponse response)
throws ParseException, IOException {
validateResponse(response);
Map<String, String> result = new HashMap<String, String>();
org.json.JSONObject object = (org.json.JSONObject) getResult(response);
for (String key : object.keySet()) {
result.put(key, (String) object.get(key));
}
return result;
}
private List<Map<String, String>> extractResponse4MultiRowFromArray(HttpResponse response)
throws ParseException, IOException {
validateResponse(response);
List<Map<String, String>> result = new ArrayList<Map<String, String>>();
org.json.JSONArray array = (org.json.JSONArray) getResult(response);
for (int i = 0; i < array.length(); i++) {
Map<String, String> element = new HashMap<String, String>();
org.json.JSONObject row = (org.json.JSONObject) array.get(i);
for (String key : row.keySet()) {
element.put(key, (String) row.get(key));
}
result.add(element);
}
return result;
}
private void validateResponse(HttpResponse response) {
if (response.getStatusLine().getStatusCode() != 200) {
throw new RuntimeException(
"Fail to get the table metadata. The operation has returned the code : " + response.getStatusLine()
+ ".");
}
}
private Object getResult(HttpResponse response) throws ParseException, IOException {
String responseBody = org.apache.http.util.EntityUtils.toString(response.getEntity());
org.json.JSONObject json = new org.json.JSONObject(responseBody);
return json.get("result");
}
package org.talend.servicenow;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.http.HttpResponse;
import org.apache.http.ParseException;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
public class Util {
private HttpClient client;
private String baseurl;
public Util(HttpClient client, String baseurl) {
this.client = client;
this.baseurl = baseurl;
}
@SuppressWarnings("rawtypes")
public static void main(String[] args) throws ClientProtocolException, IOException {
boolean exception = true;
org.apache.http.impl.client.CloseableHttpClient client = null;
while(exception) {
try {
//login
org.apache.http.client.CredentialsProvider credsProvider = new org.apache.http.impl.client.BasicCredentialsProvider();
credsProvider.setCredentials(
new org.apache.http.auth.AuthScope(new org.apache.http.HttpHost("sandbox.service-now.com")), new org.apache.http.auth.UsernamePasswordCredentials("talend", "talend")
);
client = org.apache.http.impl.client.HttpClients.custom().setDefaultCredentialsProvider(credsProvider).build();
//get metadata
Util util = new Util(client,"https://sandbox.service-now.com");
Object result = util.getMetadata("incident");
System.out.println(result);
System.out.println(((Map)result).size());
} catch(Exception e) {
System.out.println(e.getMessage());
continue;
} finally {
client.close();
}
exception = false;
}
}
private Map<String,String> typeMapping = new HashMap<String,String>();
{
typeMapping.put("string", "id_String");
typeMapping.put("boolean", "id_Boolean");
typeMapping.put("integer", "id_Integer");
typeMapping.put("decimal", "id_BigDecimal");
typeMapping.put("float", "id_Float");
typeMapping.put("glide_date_time", "id_Date");
typeMapping.put("glide_date", "id_Date");
typeMapping.put("glide_time", "id_Date");
}
public static class ColumnMetadata {
private String name;
private String type;
private int maxlength;
private String pattern;
public ColumnMetadata(String name, String type, int maxlength, String pattern) {
this.name = name;
this.type = type;
this.maxlength = maxlength;
this.pattern = pattern;
}
public String getName() {
return name;
}
public String getType() {
return type;
}
public int getMaxLength() {
return maxlength;
}
public String getPattern() {
return pattern;
}
public String toString() {
return name + ":" + type + ":" + maxlength;
}
}
public Map<String,ColumnMetadata> getMetadata(String tablename) throws ClientProtocolException, IOException {
Map<String,ColumnMetadata> result = new HashMap<String,ColumnMetadata>();
List<String> relation = getRelationship(tablename);
StringBuilder sb = new StringBuilder();
for(String tname : relation) {
sb.setLength(0);
sb.append(this.baseurl);
sb.append("/api/now/table/");
sb.append("sys_dictionary");
sb.append("?sysparm_exclude_reference_link=true");
sb.append("&sysparm_query=name=");
sb.append(tname);
sb.append("&sysparm_fields=element,internal_type,max_length,active");
HttpGet httpget = new HttpGet(sb.toString());
httpget.setHeader("Accept", "application/json");
HttpResponse response = this.client.execute(httpget);
List<Map<String, String>> info = extractResponse4MultiRowFromArray(response);
for(Map<String, String> row : info) {
String element = row.get("element");
boolean active = Boolean.parseBoolean(row.get("active"));
if(element!=null && !element.isEmpty() && active) {
String talend_type = null;
String pattern = null;
String servicenow_type = (String)row.get("internal_type");
if(servicenow_type!=null && !servicenow_type.isEmpty()) {
talend_type = typeMapping.get(servicenow_type);
//need date pattern
if("glide_date_time".equals(servicenow_type)) {
pattern = "yyyy-MM-dd HH:mm:ss";
} else if("glide_date".equals(servicenow_type)) {
pattern = "yyyy-MM-dd";
} else if("glide_time".equals(servicenow_type)) {
pattern = "HH:mm:ss";
}
}
if(talend_type == null) {
talend_type = "id_String";
}
int mlength = 64;
String maxlength = (String)row.get("max_length");
if(maxlength!=null && !maxlength.isEmpty()) {
mlength = Integer.parseInt(maxlength);
}
ColumnMetadata column = new ColumnMetadata(element,talend_type,mlength,pattern);
result.put(element, column);
}
}
}
return result;
}
private List<String> getRelationship(String tablename) throws ClientProtocolException, IOException {
LinkedList<String> result = new LinkedList<String>();
StringBuilder sb = new StringBuilder();
sb.append(this.baseurl);
sb.append("/api/now/table/");
sb.append("sys_db_object");
sb.append("?sysparm_exclude_reference_link=true");
sb.append("&sysparm_query=name=");
sb.append(tablename);
sb.append("&sysparm_fields=name,super_class");
HttpGet httpget = new HttpGet(sb.toString());
httpget.setHeader("Accept", "application/json");
HttpResponse response = this.client.execute(httpget);
Map<String,String> info = extractResponse4OneRowFromArray(response);
result.add(info.get("name"));
String superclass = info.get("super_class");
while(superclass!=null && !superclass.isEmpty()) {
sb.setLength(0);
sb.append(this.baseurl);
sb.append("/api/now/table/");
sb.append("sys_db_object/");
sb.append(superclass);
sb.append("?sysparm_exclude_reference_link=true");
sb.append("&sysparm_fields=name,super_class");
httpget = new HttpGet(sb.toString());
httpget.setHeader("Accept", "application/json");
response = this.client.execute(httpget);
info = extractResponse4OneRowFromObject(response);
result.add(info.get("name"));
superclass = info.get("super_class");
}
Collections.reverse(result);
return result;
}
private Map<String,String> extractResponse4OneRowFromArray(HttpResponse response) throws ParseException, IOException {
validateResponse(response);
Map<String,String> result = new HashMap<String,String>();
org.json.JSONArray array = (org.json.JSONArray)getResult(response);
for(int i=0;i<array.length();i++) {
org.json.JSONObject row = (org.json.JSONObject)array.get(i);
for(String key : row.keySet()) {
result.put(key, (String)row.get(key));
}
}
return result;
}
private Map<String,String> extractResponse4OneRowFromObject(HttpResponse response) throws ParseException, IOException {
validateResponse(response);
Map<String,String> result = new HashMap<String,String>();
org.json.JSONObject object = (org.json.JSONObject)getResult(response);
for(String key : object.keySet()) {
result.put(key, (String)object.get(key));
}
return result;
}
private List<Map<String, String>> extractResponse4MultiRowFromArray(HttpResponse response) throws ParseException, IOException {
validateResponse(response);
List<Map<String, String>> result = new ArrayList<Map<String,String>>();
org.json.JSONArray array = (org.json.JSONArray)getResult(response);
for(int i=0;i<array.length();i++) {
Map<String,String> element = new HashMap<String,String>();
org.json.JSONObject row = (org.json.JSONObject)array.get(i);
for(String key : row.keySet()) {
element.put(key, (String)row.get(key));
}
result.add(element);
}
return result;
}
private void validateResponse(HttpResponse response) {
if(response.getStatusLine().getStatusCode() != 200) {
throw new RuntimeException("Fail to get the table metadata. The operation has returned the code : " + response.getStatusLine() + ".");
}
}
private Object getResult(HttpResponse response) throws ParseException, IOException {
String responseBody = org.apache.http.util.EntityUtils.toString(response.getEntity());
org.json.JSONObject json = new org.json.JSONObject(responseBody);
return json.get("result");
}
}

View File

@@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.libraries</groupId>
<artifactId>simpleexcel-2.2-20190722</artifactId>
<artifactId>simpleexcel-2.1-20190507</artifactId>
<version>6.0.0</version>
<packaging>jar</packaging>

View File

@@ -12,21 +12,17 @@
// ============================================================================
package com.talend.excel.xssf.event;
import org.apache.poi.ooxml.util.PackageHelper;
import org.apache.poi.openxml4j.opc.OPCPackage;
import org.apache.poi.poifs.crypt.Decryptor;
import org.apache.poi.poifs.crypt.EncryptionInfo;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.usermodel.DataFormatter;
import org.apache.poi.xssf.eventusermodel.ReadOnlySharedStringsTable;
import org.apache.poi.xssf.eventusermodel.XSSFReader;
import org.apache.poi.xssf.model.StylesTable;
import org.apache.poi.ooxml.util.PackageHelper;
import org.xml.sax.ContentHandler;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.File;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.LinkedHashMap;
@@ -36,6 +32,7 @@ import java.util.concurrent.FutureTask;
/**
* created by wwang on 2012-9-27 Detailled comment
*
*/
public class ExcelReader implements Callable {
@@ -49,8 +46,6 @@ public class ExcelReader implements Callable {
private String charset = "UTF-8";
private String password = null;
private java.io.InputStream is;
private List<String> sheetNames = new ArrayList<String>();
@@ -69,17 +64,15 @@ public class ExcelReader implements Callable {
task = new Thread(futureTask);
}
public void parse(String fileURL, String charset, String password) {
public void parse(String fileURL, String charset) {
this.fileURL = fileURL;
this.charset = charset;
this.password = password;
task.start();
}
public void parse(java.io.InputStream is, String charset, String password) {
public void parse(java.io.InputStream is, String charset) {
this.is = is;
this.charset = charset;
this.password = password;
task.start();
}
@@ -127,25 +120,11 @@ public class ExcelReader implements Callable {
public Object call() throws Exception {
OPCPackage pkg = null;
POIFSFileSystem fs = null;
try {
if (password != null) {
if (fileURL != null) {
fs = new POIFSFileSystem(new File(fileURL));
} else {
fs = new POIFSFileSystem(is);
}
Decryptor d = Decryptor.getInstance(new EncryptionInfo(fs));
if (!d.verifyPassword(password)) {
throw new RuntimeException("Error: Cannot decrypt Excel file. Invalid password.");
}
pkg = OPCPackage.open(d.getDataStream(fs));
if (fileURL != null) {
pkg = OPCPackage.open(fileURL);
} else {
if (fileURL != null) {
pkg = OPCPackage.open(fileURL);
} else {
pkg = PackageHelper.open(is);
}
pkg = PackageHelper.open(is);
}
XSSFReader r = new XSSFReader(pkg);
@@ -225,9 +204,6 @@ public class ExcelReader implements Callable {
if (pkg != null) {
pkg.revert();
}
if (fs != null) {
fs.close();
}
cache.notifyErrorOccurred();
}
return null;

View File

@@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.libraries</groupId>
<artifactId>talend-soap</artifactId>
<version>2.1-20190716</version>
<version>2.1-20190513</version>
<packaging>jar</packaging>
<name>talend-soap</name>

View File

@@ -124,9 +124,7 @@ public class SOAPUtil {
}
SOAPMessage message = messageFactory.createMessage();
MimeHeaders mimeHeaders = message.getMimeHeaders();
setSoapAction(version, soapAction, mimeHeaders);
mimeHeaders.setHeader("SOAPAction", soapAction);
if (basicAuth) {
addBasicAuthHeader(mimeHeaders, username, password);
}
@@ -265,8 +263,8 @@ public class SOAPUtil {
}
SOAPMessage message = messageFactory.createMessage();
MimeHeaders mimeHeaders = message.getMimeHeaders();
setSoapAction(version, soapAction, mimeHeaders);
SOAPPart soapPart = message.getSOAPPart();
mimeHeaders.setHeader("SOAPAction", soapAction);
SOAPPart soapPart = message.getSOAPPart();
String encoding = getEncoding(soapMessage);
@@ -293,20 +291,8 @@ public class SOAPUtil {
return null;
}
}
/* https://jira.talendforge.org/browse/TDI-42581 skip add SOAPAction directly to header v1.2 */
private void setSoapAction(String version, String soapAction, MimeHeaders mimeHeaders) {
if (SOAP12.equals(version)) {
// in soap version 1.2 param 'action' optional and should not be empty
if( soapAction != null && !soapAction.trim().isEmpty()) {
mimeHeaders.setHeader("Content-Type", "application/soap+xml; charset=utf-8; action=\"" + soapAction + "\"");
}
} else {
mimeHeaders.setHeader("SOAPAction", soapAction);
}
}
private String getEncoding(String text) {
private String getEncoding(String text) {
String result = Charset.defaultCharset().name();
if(text == null) {

View File

@@ -1,95 +1,95 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="org.talend.designer.components.libs" default="process" basedir=".">
<property name="component.plugin.home" value="../../../org.talend.designer.components.localprovider/components" />
<!-- #################################################### -->
<!-- modification 1: config -->
<property name="jar.name" value="sugarCRMManagement.jar" />
<property name="component.name" value="tSugarCRMInput" />
<property name="author.name" value="wyang" />
<!-- modification 2: compile classpath -->
<path id="compile.classpath">
<pathelement location="${component.plugin.home}/tWebServiceInput/axis.jar" />
<pathelement location="${component.plugin.home}/tWebServiceInput/commons-discovery-0.2.jar" />
<pathelement location="${component.plugin.home}/tWebServiceInput/wsdl4j-1.5.1.jar" />
<pathelement location="${component.plugin.home}/tWebServiceInput/saaj.jar" />
<pathelement location="${component.plugin.home}/tWebServiceInput/jaxrpc.jar" />
<pathelement location="${component.plugin.home}/tXMLRPCInput/commons-logging-1.1.jar" />
</path>
<!-- #################################################### -->
<!-- sourcecode and final jar path -->
<property name="source.home" value="." />
<property name="jar.home" value="${component.plugin.home}/${component.name}/${jar.name}" />
<!-- temp dir for clasee files -->
<property name="build.dir" value="../../build" />
<!-- compile option -->
<property name="compile.debug" value="true" />
<property name="compile.deprecation" value="false" />
<property name="compile.optimize" value="true" />
<target name="process" description="prepare a temp dir">
<antcall target="prepare" />
<antcall target="compile" />
<antcall target="clean" />
</target>
<target name="prepare" description="prepare a temp dir">
<delete dir="${build.dir}" />
<mkdir dir="${build.dir}" />
<mkdir dir="${build.dir}/classes" />
</target>
<target name="compile" description="Compile Java sources">
<!-- compile -->
<javac srcdir="${source.home}" destdir="${build.dir}/classes" debug="${compile.debug}" deprecation="${compile.deprecation}" optimize="${compile.optimize}">
<classpath refid="compile.classpath" />
</javac>
<!-- include source code -->
<copy todir="${build.dir}/classes">
<fileset dir="${source.home}">
<exclude name="build.xml" />
</fileset>
</copy>
<!-- make jar -->
<tstamp>
<format property="date" pattern="yyyy-MM-dd HH:mm:ss" />
</tstamp>
<jar destfile="${build.dir}/${jar.name}" basedir="${build.dir}/classes">
<manifest>
<!-- who -->
<attribute name="Built-By" value="${author.name}" />
<!-- when -->
<attribute name="Built-Date" value="${date}" />
<!-- JDK version -->
<attribute name="Created-By" value="${java.version} (${java.vendor})" />
<!-- Information about the program itself -->
<attribute name="Implementation-Vendor" value="Talend SA" />
<attribute name="Implementation-Title" value="${jar.name}" />
<attribute name="Implementation-Version" value="1.0" />
</manifest>
</jar>
<!-- move jar -->
<move file="${build.dir}/${jar.name}" tofile="${jar.home}" />
</target>
<target name="clean" description="clean the temp dir">
<delete dir="${build.dir}" />
<mkdir dir="${build.dir}" />
</target>
</project>
<?xml version="1.0" encoding="UTF-8"?>
<project name="org.talend.designer.components.libs" default="process" basedir=".">
<property name="component.plugin.home" value="../../../org.talend.designer.components.localprovider/components" />
<!-- #################################################### -->
<!-- modification 1: config -->
<property name="jar.name" value="sugarCRMManagement.jar" />
<property name="component.name" value="tSugarCRMInput" />
<property name="author.name" value="wyang" />
<!-- modification 2: compile classpath -->
<path id="compile.classpath">
<pathelement location="${component.plugin.home}/tWebServiceInput/axis.jar" />
<pathelement location="${component.plugin.home}/tWebServiceInput/commons-discovery-0.2.jar" />
<pathelement location="${component.plugin.home}/tWebServiceInput/wsdl4j-1.5.1.jar" />
<pathelement location="${component.plugin.home}/tWebServiceInput/saaj.jar" />
<pathelement location="${component.plugin.home}/tWebServiceInput/jaxrpc.jar" />
<pathelement location="${component.plugin.home}/tXMLRPCInput/commons-logging-1.1.jar" />
</path>
<!-- #################################################### -->
<!-- sourcecode and final jar path -->
<property name="source.home" value="." />
<property name="jar.home" value="${component.plugin.home}/${component.name}/${jar.name}" />
<!-- temp dir for clasee files -->
<property name="build.dir" value="../../build" />
<!-- compile option -->
<property name="compile.debug" value="true" />
<property name="compile.deprecation" value="false" />
<property name="compile.optimize" value="true" />
<target name="process" description="prepare a temp dir">
<antcall target="prepare" />
<antcall target="compile" />
<antcall target="clean" />
</target>
<target name="prepare" description="prepare a temp dir">
<delete dir="${build.dir}" />
<mkdir dir="${build.dir}" />
<mkdir dir="${build.dir}/classes" />
</target>
<target name="compile" description="Compile Java sources">
<!-- compile -->
<javac srcdir="${source.home}" destdir="${build.dir}/classes" debug="${compile.debug}" deprecation="${compile.deprecation}" optimize="${compile.optimize}">
<classpath refid="compile.classpath" />
</javac>
<!-- include source code -->
<copy todir="${build.dir}/classes">
<fileset dir="${source.home}">
<exclude name="build.xml" />
</fileset>
</copy>
<!-- make jar -->
<tstamp>
<format property="date" pattern="yyyy-MM-dd HH:mm:ss" />
</tstamp>
<jar destfile="${build.dir}/${jar.name}" basedir="${build.dir}/classes">
<manifest>
<!-- who -->
<attribute name="Built-By" value="${author.name}" />
<!-- when -->
<attribute name="Built-Date" value="${date}" />
<!-- JDK version -->
<attribute name="Created-By" value="${java.version} (${java.vendor})" />
<!-- Information about the program itself -->
<attribute name="Implementation-Vendor" value="Talend SA" />
<attribute name="Implementation-Title" value="${jar.name}" />
<attribute name="Implementation-Version" value="1.0" />
</manifest>
</jar>
<!-- move jar -->
<move file="${build.dir}/${jar.name}" tofile="${jar.home}" />
</target>
<target name="clean" description="clean the temp dir">
<delete dir="${build.dir}" />
<mkdir dir="${build.dir}" />
</target>
</project>

View File

@@ -7,7 +7,7 @@
<groupId>org.talend.libraries</groupId>
<artifactId>talend-codegen-utils</artifactId>
<!-- release for revert version of library -->
<version>0.28.0</version>
<version>0.25.4</version>
<packaging>jar</packaging>
<properties>

View File

@@ -51,8 +51,6 @@ public final class TypeConverter {
public static final String LIST = "id_List";
public static final String OBJECT = "id_Object";
private TypeConverter() {
// Class provides static utility methods and shouldn't be instantiated
}
@@ -169,8 +167,6 @@ public final class TypeConverter {
return SHORT;
case "java.util.Date":
return DATE;
case "java.lang.Object":
return OBJECT;
default:
throw new UnsupportedOperationException("Unrecognized java class " + javaClass);
}

View File

@@ -5,9 +5,9 @@
>
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components.lib</groupId>
<groupId>org.talend.libraries</groupId>
<artifactId>talend-db-exasol</artifactId>
<version>2.1.5</version>
<version>2.1.2</version>
<packaging>jar</packaging>
<name>talend-db-exasol</name>
@@ -33,22 +33,10 @@
</distributionManagement>
<dependencies>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.25</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>

View File

@@ -27,15 +27,15 @@ import java.util.Collections;
import java.util.Date;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
/**
* This class provides an IMPORT command for the EXASol database.
* @author Jan Lolling, jan.lolling@cimt-ag.de
*/
public class EXABulkUtil {
private static Logger logger = LoggerFactory.getLogger(EXABulkUtil.class);
private static Logger logger = Logger.getLogger(EXABulkUtil.class);
public static final String CSV = "CSV";
public static final String FBV = "FBV";
public static final String ORA = "ORA";
@@ -83,14 +83,23 @@ public class EXABulkUtil {
private int sourceIdentifierCase = 0; // 0 = unchanged, 1 = lower case, 2 = upper case
private boolean onlyBuildSQLCode = false;
public void setDebug(boolean debug) {
if (debug) {
logger.setLevel(Level.DEBUG);
} else {
logger.setLevel(Level.INFO);
}
}
private String createNumberFormat(Integer length, Integer precision, boolean hasGroups) {
if (length != null && length.intValue() > 0) {
StringBuilder sb = new StringBuilder();
for (int i = length - 1; i >= 0; i--) {
if(hasGroups && i < length - 1 && i > 0 && (i % 3 == 2)) {
sb.append("G");
}
sb.append("9");
int numGroups = (length.intValue() / 3) + 1;
for (int i = 0; i < numGroups; i++) {
if (i > 0 && hasGroups) {
sb.append("G");
}
sb.append("999");
}
if (precision != null && precision.intValue() > 0) {
sb.append("D");

View File

@@ -39,6 +39,7 @@ public class TestEXABulkUtil {
truncateStat.execute("truncate table " + errorTable);
truncateStat.close();
EXABulkUtil u = new EXABulkUtil();
u.setDebug(true);
u.setTransferSecure(true);
u.setConnection(connection);
u.setTable(table);
@@ -82,6 +83,7 @@ public class TestEXABulkUtil {
}
truncateStat.close();
EXABulkUtil u = new EXABulkUtil();
u.setDebug(true);
u.setConnection(connection);
u.setTable(table);
u.setRemoteFileUrl("sftp://172.16.214.132/home/tisadmin/Downloads/");
@@ -128,6 +130,7 @@ public class TestEXABulkUtil {
}
truncateStat.close();
EXABulkUtil u = new EXABulkUtil();
u.setDebug(true);
u.setConnection(connection);
u.setTable(table);
u.setDbmsSourceType("JDBC");
@@ -167,6 +170,7 @@ public class TestEXABulkUtil {
}
truncateStat.close();
EXABulkUtil u = new EXABulkUtil();
u.setDebug(true);
u.setConnection(connection);
u.setTable(table);
u.setDbmsSourceType("ORA");
@@ -207,6 +211,7 @@ public class TestEXABulkUtil {
}
stat.close();
EXABulkUtil u = new EXABulkUtil();
u.setDebug(true);
u.setConnection(connection);
u.setTable(table);
u.setDbmsSourceType("JDBC");
@@ -245,6 +250,7 @@ public class TestEXABulkUtil {
}
stat.close();
EXABulkUtil u = new EXABulkUtil();
u.setDebug(true);
u.setConnection(connection);
u.setTable(table);
u.setDbmsSourceType("JDBC");
@@ -284,6 +290,7 @@ public class TestEXABulkUtil {
}
stat.close();
EXABulkUtil u = new EXABulkUtil();
u.setDebug(true);
u.setConnection(connection);
u.setTable(table);
u.setDbmsSourceType("EXA");

View File

@@ -4,7 +4,7 @@
<groupId>org.talend</groupId>
<artifactId>talend-httputil</artifactId>
<name>talend-httputil</name>
<version>1.0.5</version>
<version>1.0.4</version>
<properties>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
@@ -20,7 +20,7 @@
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.10.1</version>
<version>2.9.9</version>
</dependency>
<dependency>

View File

@@ -6,7 +6,7 @@
<groupId>org.talend.libraries</groupId>
<artifactId>job-audit</artifactId>
<version>1.1</version>
<version>1.0</version>
<properties>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
@@ -50,43 +50,28 @@
<dependencies>
<dependency>
<groupId>org.talend.daikon</groupId>
<artifactId>audit-common</artifactId>
<version>1.8.0</version>
<artifactId>daikon-audit</artifactId>
<version>0.31.8</version>
</dependency>
<!--
<dependency>
<groupId>org.talend.daikon</groupId>
<artifactId>audit-log4j1</artifactId>
<version>1.8.0</version>
<artifactId>audit-logback</artifactId>
<version>0.31.8</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.3.0-alpha4</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
<scope>test</scope>
</dependency>
-->
<dependency>
<groupId>org.talend.daikon</groupId>
<artifactId>audit-log4j2</artifactId>
<version>1.8.0</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>2.12.1</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.12.1</version>
<optional>true</optional>
</dependency>
</dependencies>
</project>

View File

@@ -18,30 +18,13 @@ public class JobEventAuditLoggerFactory {
final AuditConfigurationMap config = AuditConfiguration.loadFromProperties(properties);
AbstractBackend logger = null;
//load log4j2 implement firstly
String loggerClass = "org.talend.logging.audit.log4j2.Log4j2Backend";
String loggerClass = "org.talend.logging.audit.logback.LogbackBackend";
try {
final Class<?> clz = Class.forName(loggerClass);
logger = (AbstractBackend) clz.getConstructor(AuditConfigurationMap.class).newInstance(config);
} catch (ReflectiveOperationException e) {
// do nothing
}
//load log4j1 implement if not found log4j2
if (logger == null) {
loggerClass = "org.talend.logging.audit.log4j1.Log4j1Backend";
try {
final Class<?> clz = Class.forName(loggerClass);
logger = (AbstractBackend) clz.getConstructor(AuditConfigurationMap.class).newInstance(config);
} catch (ReflectiveOperationException e) {
// do nothing
}
}
if(logger == null) {
throw new RuntimeException("Unable to load backend : " + loggerClass);
}
final Class<?> clz = Class.forName(loggerClass);
logger = (AbstractBackend) clz.getConstructor(AuditConfigurationMap.class).newInstance(config);
} catch (ReflectiveOperationException e) {
throw new RuntimeException("Unable to load backend " + loggerClass, e);
}
final DefaultAuditLoggerBase loggerBase = new DefaultAuditLoggerBase(logger, config);

View File

@@ -8,8 +8,7 @@ public class JobAuditLoggerTest {
public static void main(String[] args) {
Properties props = new Properties();
String root_logger_name = "audit";
props.setProperty("root.logger", root_logger_name);
props.setProperty("root.logger", "audit");
props.setProperty("encoding", "UTF-8");
props.setProperty("application.name", "Talend Studio");
props.setProperty("service.name", "Talend Studio Job");
@@ -20,10 +19,6 @@ public class JobAuditLoggerTest {
props.setProperty("appender.file.maxsize", "52428800");
props.setProperty("appender.file.maxbackup", "20");
props.setProperty("host", "false");
org.apache.logging.log4j.core.config.Configurator.setLevel(root_logger_name, org.apache.logging.log4j.Level.DEBUG);
//org.apache.log4j.Logger.getLogger("audit").setLevel(org.apache.log4j.Level.DEBUG);
final JobAuditLogger logger = JobEventAuditLoggerFactory.createJobAuditLogger(props);
Context context = JobContextBuilder.create().jobName("fetch_from_s3_every_day").jobId("jobid_123")
.jobVersion("0.1").connectorType("tXMLMAP").connectorId("tXMLMap_1")

View File

@@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.libraries</groupId>
<artifactId>talend-mscrm</artifactId>
<version>3.4-20191012</version>
<version>3.4-20190513</version>
<packaging>jar</packaging>
<name>talend-mscrm</name>
@@ -109,7 +109,7 @@
<dependency>
<groupId>com.microsoft.azure</groupId>
<artifactId>adal4j</artifactId>
<version>1.1.1-20191012</version>
<version>1.1.1-patch</version>
</dependency>
</dependencies>
<build>

View File

@@ -22,8 +22,7 @@ import org.apache.axis2.transport.http.HTTPConstants;
import org.apache.axis2.transport.http.HTTPTransportConstants;
import org.apache.axis2.transport.http.HttpTransportProperties;
import org.apache.axis2.transport.http.HttpTransportProperties.ProxyProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.log4j.Logger;
import org.datacontract.schemas._2004._07.system_collections_generic.KeyValuePairOfEndpointTypestringztYlk6OT;
import org.talend.ms.crm.sdk.OnlineAuthenticationPolicy;
import org.talend.ms.crm.sdk.OrganizationServiceStubWrapper;
@@ -62,7 +61,7 @@ import com.microsoft.schemas.xrm._2011.contracts.discovery.RetrieveOrganizationR
*/
public class MSCRMClient {
static Logger logger = LoggerFactory.getLogger(MSCRMClient.class.getName());
static Logger logger = Logger.getLogger(MSCRMClient.class.getName());
/**
* Microsoft account (e.g. youremail@live.com) or Microsoft Office 365 (Org ID e.g.

View File

@@ -38,8 +38,7 @@ import org.apache.http.impl.client.SystemDefaultHttpClient;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.CoreConnectionPNames;
import org.apache.http.params.HttpParams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
@@ -54,7 +53,7 @@ public final class DeviceIdManager {
/**
* Logger
*/
static final Logger Log = LoggerFactory.getLogger(DeviceIdManager.class.getName());
static final Logger Log = Logger.getLogger(DeviceIdManager.class.getName());
private static final Random RandomInstance = new Random();

View File

@@ -16,8 +16,7 @@ import javax.wsdl.WSDLException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPathExpressionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.log4j.Logger;
import org.xml.sax.SAXException;
/**
@@ -26,7 +25,7 @@ import org.xml.sax.SAXException;
*/
public final class OnlineAuthenticationPolicy {
static Logger logger = LoggerFactory.getLogger(OnlineAuthenticationPolicy.class.getName());
static Logger logger = Logger.getLogger(OnlineAuthenticationPolicy.class.getName());
/**
* Construct an Instance of the OnlineAuthenticationPolicy class.

View File

@@ -44,8 +44,7 @@ import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.CoreConnectionPNames;
import org.apache.http.params.HttpParams;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
@@ -56,7 +55,7 @@ import org.xml.sax.SAXException;
*/
public final class WsdlTokenManager {
static Logger logger = LoggerFactory.getLogger(WsdlTokenManager.class.getName());
static Logger logger = Logger.getLogger(WsdlTokenManager.class.getName());
private final String DeviceTokenTemplate = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + " <s:Envelope "
+ " xmlns:s=\"http://www.w3.org/2003/05/soap-envelope\""

View File

@@ -1,55 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components.lib</groupId>
<artifactId>talend-proxy</artifactId>
<version>1.0.0</version>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
</plugins>
</build>
<properties>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
</properties>
<distributionManagement>
<snapshotRepository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceSnapshot/</url>
<snapshots><enabled>true</enabled></snapshots>
<releases><enabled>false</enabled></releases>
</snapshotRepository>
<repository>
<id>talend_nexus_deployment</id>
<url>${talend.nexus.url}/nexus/content/repositories/TalendOpenSourceRelease/</url>
<snapshots><enabled>false</enabled></snapshots>
<releases><enabled>true</enabled></releases>
</repository>
</distributionManagement>
<dependencies>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.9</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
</dependencies>
</project>

View File

@@ -1,20 +0,0 @@
package org.talend.proxy;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
public class ProxyCreds {
private Pair<String, String> credPair;
public ProxyCreds(String userName, String pass) {
this.credPair = new ImmutablePair<>(userName, pass);
}
public String getUser() {
return credPair.getKey();
}
public String getPass() {
return credPair.getValue();
}
}

View File

@@ -1,36 +0,0 @@
package org.talend.proxy;
import java.net.Proxy;
import java.util.HashMap;
import java.util.Map;
/**
* Use only inside of ThreadLocal
*/
public class ProxyHolder {
private Map<String, Proxy> proxyMap;
public ProxyHolder() {
proxyMap = new HashMap<>();
}
/**
*
* @param proxy HTTP or SOCKS proxy instance to use
* @param host without protocol
* @param port -1 to apply proxy for every port
*/
public void putNewHost(Proxy proxy, String host, int port) {
if (port != -1) {
proxyMap.put(host + ":" + port, proxy);
} else {
proxyMap.put(host, proxy);
}
}
public Map<String, Proxy> getProxyMap() {
return proxyMap;
}
}

View File

@@ -1,41 +0,0 @@
package org.talend.proxy;
import java.net.Authenticator;
import java.net.PasswordAuthentication;
import java.util.HashMap;
import java.util.Map;
public class TalendProxyAuthenticator extends Authenticator {
private static TalendProxyAuthenticator instance;
public static synchronized TalendProxyAuthenticator getInstance() {
if (instance == null) {
instance = new TalendProxyAuthenticator();
}
return instance;
}
private TalendProxyAuthenticator() {
}
private Map<String, ProxyCreds> proxyCredsMap = new HashMap<>();
public synchronized void addAuthForProxy(String host, String port, String userName, String pass) {
proxyCredsMap.put(host + ":" + port, new ProxyCreds(userName, pass));
}
public synchronized ProxyCreds getCredsForProxyURI(String proxyURI) {
return proxyCredsMap.get(proxyURI);
}
@Override
protected synchronized PasswordAuthentication getPasswordAuthentication() {
String requestURI = super.getRequestingHost() + ":" + super.getRequestingPort();
if (proxyCredsMap.containsKey(requestURI)) {
return new PasswordAuthentication(proxyCredsMap.get(requestURI).getUser(), proxyCredsMap.get(requestURI).getPass().toCharArray());
} else {
return super.getPasswordAuthentication(); //don't use authentication
}
}
}

View File

@@ -1,98 +0,0 @@
package org.talend.proxy;
import java.io.IOException;
import java.net.Proxy;
import java.net.ProxySelector;
import java.net.SocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import org.apache.log4j.Logger;
public class TalendProxySelector extends ProxySelector {
private static TalendProxySelector instance;
private static final Logger log = Logger.getLogger(TalendProxySelector.class);
private ThreadLocal<ProxyHolder> threadLocalProxyHolder;
private ProxyHolder globalProxyHolder;
public static synchronized TalendProxySelector getInstance() {
if (instance == null) {
instance = new TalendProxySelector();
}
return instance;
}
private TalendProxySelector() {
globalProxyHolder = new ProxyHolder();
}
public synchronized void addProxySettings(Proxy proxy, boolean threadSpecific, String host, int port) {
if (threadSpecific) {
if (threadLocalProxyHolder == null) {
threadLocalProxyHolder = new ThreadLocal<>();
}
if (threadLocalProxyHolder.get() == null) {
ProxyHolder newProxyHolder = new ProxyHolder();
threadLocalProxyHolder.set(newProxyHolder);
}
threadLocalProxyHolder.get().putNewHost(proxy, host, port);
} else {
globalProxyHolder.putNewHost(proxy, host, port);
}
}
/**
* Finds and return proxy was set for specific host
* @param uriString host:port
* @return Optional of Proxy if such proxy setting was set
*/
public synchronized Proxy getProxyForUriString(String uriString) {
if (proxyHolderContainsHost(globalProxyHolder, uriString)) {
log.debug("All threads proxy " + globalProxyHolder.getProxyMap().get(uriString) + " is using to connect to URI " + uriString);
return globalProxyHolder.getProxyMap().containsKey(uriString) ? globalProxyHolder.getProxyMap().get(uriString) :
globalProxyHolder.getProxyMap().get(uriString.substring(0, uriString.lastIndexOf(":")));
} else if (threadLocalProxyHolder != null && proxyHolderContainsHost(threadLocalProxyHolder.get(), uriString)) {
log.debug("Proxy " + threadLocalProxyHolder.get().getProxyMap().get(uriString) + " is using to connect to URI " + uriString);
return threadLocalProxyHolder.get().getProxyMap().containsKey(uriString) ?
threadLocalProxyHolder.get().getProxyMap().get(uriString) :
threadLocalProxyHolder.get().getProxyMap().get(uriString.substring(0, uriString.lastIndexOf(":")));
} else {
log.debug("No proxy is using to connect to URI " + uriString);
return Proxy.NO_PROXY;
}
}
@Override
public List<Proxy> select(URI uri) {
String uriString = uri.getHost();
if (uri.getPort() != -1) {
uriString += ":" + uri.getPort() ;
}
log.debug("Network request hadling from Talend proxy selector. Thread " + Thread.currentThread().getName() + ". URI to connect: " + uriString);
return Collections.singletonList(getProxyForUriString(uriString));
}
@Override
public void connectFailed(URI uri, SocketAddress sa, IOException ioe) {
if (ioe != null) {
log.warn("Connect failed when use Talend ProxySelector to the URI:" + uri.toString(), ioe);
} else {
log.warn("Connect failed when use Talend ProxySelector to the " + uri);
}
}
private static boolean proxyHolderContainsHost(ProxyHolder holder, String uriString) {
return holder != null &&
(holder.getProxyMap().containsKey(uriString)
|| (uriString.contains(":") && holder.getProxyMap().containsKey(uriString.substring(0, uriString.indexOf(":")))));
}
}

View File

@@ -4,12 +4,12 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.libraries</groupId>
<artifactId>talend-ws</artifactId>
<version>1.0.1-20191112</version>
<version>1.0.1-20190204</version>
<packaging>jar</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<cxf.version>3.3.4</cxf.version>
<cxf.version>3.1.1</cxf.version>
<odata.version>4.3.0</odata.version>
<slf4j.version>1.7.12</slf4j.version>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>
@@ -90,42 +90,42 @@
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-core</artifactId>
<version>${cxf.version}</version>
<version>3.2.6</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-frontend-simple</artifactId>
<version>${cxf.version}</version>
<version>3.2.6</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-tools-common</artifactId>
<version>${cxf.version}</version>
<version>3.2.6</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-wsdl</artifactId>
<version>${cxf.version}</version>
<version>3.2.6</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-bindings-soap</artifactId>
<version>${cxf.version}</version>
<version>3.2.6</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-databinding-jaxb</artifactId>
<version>${cxf.version}</version>
<version>3.2.6</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-transports-http-jetty</artifactId>
<version>${cxf.version}</version>
<version>3.2.6</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-transports-http</artifactId>
<version>${cxf.version}</version>
<version>3.2.6</version>
</dependency>
<dependency>
<groupId>org.talend.libraries</groupId>

View File

@@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.libraries</groupId>
<artifactId>talendExcel-1.5-20190731</artifactId>
<artifactId>talendExcel-1.4-20190531</artifactId>
<version>6.0.0</version>
<packaging>jar</packaging>

View File

@@ -3,7 +3,6 @@ package org.talend;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Date;
@@ -11,23 +10,17 @@ import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.apache.poi.poifs.crypt.EncryptionInfo;
import org.apache.poi.poifs.crypt.EncryptionMode;
import org.apache.poi.poifs.crypt.Encryptor;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.CellStyle;
import org.apache.poi.ss.usermodel.CellType;
import org.apache.poi.ss.usermodel.Font;
import org.apache.poi.ss.usermodel.FormulaEvaluator;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.ss.usermodel.WorkbookFactory;
import org.apache.poi.xssf.streaming.SXSSFSheet;
import org.apache.poi.xssf.streaming.SXSSFWorkbook;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.apache.poi.xssf.usermodel.XSSFWorkbookType;
import org.apache.poi.ss.usermodel.FormulaEvaluator;
public class ExcelTool {
@@ -77,8 +70,6 @@ public class ExcelTool {
private boolean isTrackAllColumns = false;
private String password = null;
public ExcelTool() {
cellStylesMapping = new HashMap<>();
}
@@ -119,7 +110,21 @@ public class ExcelTool {
initPreXlsx(fileName);
}
if (appendWorkbook) {
appendActionForFile(fileName);
InputStream inp = new FileInputStream(fileName);
wb = WorkbookFactory.create(inp);
sheet = wb.getSheet(sheetName);
if (sheet != null) {
if (appendSheet) {
if (sheet.getLastRowNum() != 0 || sheet.getRow(0) != null) {
curY = sheet.getLastRowNum() + 1;
}
} else {
wb.removeSheetAt(wb.getSheetIndex(sheetName));
sheet = wb.createSheet(sheetName);
}
} else {
sheet = wb.createSheet(sheetName);
}
} else {
xlsxFile.delete();
wb = new SXSSFWorkbook(rowAccessWindowSize);
@@ -135,47 +140,6 @@ public class ExcelTool {
}
}
public void prepareXlsmFile(String fileName) throws Exception {
File xlsmFile = new File(fileName);
if (xlsmFile.exists()) {
if (isAbsY && keepCellFormat) {
initPreXlsx(fileName);
}
if (appendWorkbook) {
appendActionForFile(fileName);
} else {
xlsmFile.delete();
wb = new SXSSFWorkbook(new XSSFWorkbook(XSSFWorkbookType.XLSM), rowAccessWindowSize);
sheet = wb.createSheet(sheetName);
}
} else {
wb = new SXSSFWorkbook(new XSSFWorkbook(XSSFWorkbookType.XLSM), rowAccessWindowSize);
sheet = wb.createSheet(sheetName);
}
if (isAbsY) {
startX = absX;
curY = absY;
}
}
private void appendActionForFile(String fileName) throws IOException {
InputStream inp = new FileInputStream(fileName);
wb = WorkbookFactory.create(inp);
sheet = wb.getSheet(sheetName);
if (sheet != null) {
if (appendSheet) {
if (sheet.getLastRowNum() != 0 || sheet.getRow(0) != null) {
curY = sheet.getLastRowNum() + 1;
}
} else {
wb.removeSheetAt(wb.getSheetIndex(sheetName));
sheet = wb.createSheet(sheetName);
}
} else {
sheet = wb.createSheet(sheetName);
}
}
/**
*
* @return start insert row index.
@@ -338,14 +302,13 @@ public class ExcelTool {
}
public void writeExcel(OutputStream outputStream) throws Exception {
try {
wb.write(outputStream);
wb.close();
} finally {
if (outputStream != null) {
outputStream.close();
}
}
try {
wb.write(outputStream);
} finally {
if(outputStream != null) {
outputStream.close();
}
}
}
public void writeExcel(String fileName, boolean createDir) throws Exception {
@@ -356,29 +319,14 @@ public class ExcelTool {
pFile.mkdirs();
}
}
FileOutputStream fileOutput = new FileOutputStream(fileName);
if (appendWorkbook && appendSheet && recalculateFormula) {
evaluateFormulaCell();
}
FileOutputStream fileOutput = new FileOutputStream(fileName);
POIFSFileSystem fs = null;
try {
if (password == null) {
wb.write(fileOutput);
} else {
fs = new POIFSFileSystem();
Encryptor encryptor = new EncryptionInfo(EncryptionMode.agile).getEncryptor();
encryptor.confirmPassword(password);
OutputStream encryptedDataStream = encryptor.getDataStream(fs);
wb.write(encryptedDataStream);
encryptedDataStream.close(); // this is mandatory to do that at that point
fs.writeFilesystem(fileOutput);
}
wb.write(fileOutput);
} finally {
wb.close();
fileOutput.close();
if (fs != null) {
fs.close();
}
fileOutput.close();
}
}
@@ -401,9 +349,4 @@ public class ExcelTool {
((SXSSFSheet) sheet).flushRows();
}
}
public void setPasswordProtection(String password) {
this.password = password;
}
}

View File

@@ -2,7 +2,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.libraries</groupId>
<artifactId>talendMsgMailUtil-1.1-20191012</artifactId>
<artifactId>talendMsgMailUtil-1.1-20170419</artifactId>
<name>talendMsgMailUtil</name>
<version>6.0.0</version>
<packaging>jar</packaging>
@@ -37,11 +37,6 @@
</distributionManagement>
<dependencies>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>

View File

@@ -12,8 +12,8 @@ import org.apache.poi.hsmf.datatypes.MAPIProperty;
import org.apache.poi.hsmf.datatypes.StringChunk;
import org.apache.poi.hsmf.datatypes.Types;
import org.apache.poi.hsmf.exceptions.ChunkNotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.log4j.Logger;
import org.apache.log4j.Level;
public class MsgMailUtil {
@@ -24,15 +24,6 @@ public class MsgMailUtil {
public MsgMailUtil() {
}
private enum Level {
DEBUG,
INFO,
TRACE,
FATAL,
WARN,
ERROR
}
public MsgMailUtil(String fileName, String outAttachmentPath)
throws IOException {
@@ -40,8 +31,8 @@ public class MsgMailUtil {
this.outAttachmentPath = outAttachmentPath;
}
public void activeLog(String logger_name, String position) {
this.log = LoggerFactory.getLogger(logger_name);
public void activeLog(Logger log, String position) {
this.log = log;
this.position = position;
}
@@ -122,20 +113,23 @@ public class MsgMailUtil {
}
}
private void processLog(Level level, String message) {
public void processLog(Level level, String message) {
message = position + " - " + message;
if (this.log != null) {
switch (level) {
case TRACE:
switch (level.toInt()) {
case Level.TRACE_INT:
log.trace(message);
break;
case ERROR:
case Level.ERROR_INT:
log.error(message);
break;
case INFO:
case Level.FATAL_INT:
log.fatal(message);
break;
case Level.INFO_INT:
log.info(message);
break;
case WARN:
case Level.WARN_INT:
log.warn(message);
break;
default:

View File

@@ -2,9 +2,9 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components.lib</groupId>
<artifactId>talend_DB_mssqlUtil</artifactId>
<version>1.4</version>
<groupId>org.talend.libraries</groupId>
<artifactId>talend_DB_mssqlUtil-1.3-20190523</artifactId>
<version>6.0.0</version>
<packaging>jar</packaging>
<name>talend_DB_mssqlUtil</name>

View File

@@ -16,8 +16,6 @@ import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.TimeZone;
import java.text.SimpleDateFormat;
import java.text.ParseException;
/**
* DOC Administrator class global comment. Detailled comment
@@ -119,15 +117,10 @@ public class MSSqlGenerateTimestampUtil {
return null;
}
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
{
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
}
/**
* parse datetimeoffset string to date.
* datetimeoffset string show as YYYY-MM-DD hh:mm:ss[.nnnnnnn] [+|-]hh:mm
* and no enough infomation for the DST decision,so no DST consider[not sure if 100% support for DST].
*
* @param datetimeOffsetString
* @return
@@ -138,17 +131,13 @@ public class MSSqlGenerateTimestampUtil {
String offsetString = datetimeOffsetString.substring(idx + 1);
int offset = TimeZone.getTimeZone("GMT" + offsetString).getRawOffset();
long millisecondsWithOffset = 0l;
try {
millisecondsWithOffset = sdf.parse(datetimeString).getTime();
} catch(ParseException e) {
throw new RuntimeException(e.getMessage());
}
//get GMT's millisecond number since GMT 1970_01_01
long milliseconds4GMT_to_GMT_1970_01_01 = millisecondsWithOffset - offset;
return new java.util.Date(milliseconds4GMT_to_GMT_1970_01_01);
// get local timezone, also consider the DST
TimeZone local = TimeZone.getDefault();
int localOffset = local.getOffset(new java.util.Date().getTime());
long milliseconds = java.sql.Timestamp.valueOf(datetimeString).getTime();
long millisecondsToGMT1970_01_01 = milliseconds - offset + localOffset;
return new java.util.Date(millisecondsToGMT1970_01_01);
}
}

View File

@@ -1,10 +1,10 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components.lib</groupId>
<groupId>org.talend.libraries</groupId>
<artifactId>talendsap</artifactId>
<name>talendsap</name>
<version>1.0.2</version>
<version>1.0.1</version>
<properties>
<talend.nexus.url>https://artifacts-oss.talend.com</talend.nexus.url>

View File

@@ -90,7 +90,6 @@ public class DocumentExtractor {
List<Element> tablesAndChangingElements = new ArrayList<Element>(3);
tablesAndChangingElements.add(functionElement.element("TABLES"));
tablesAndChangingElements.add(functionElement.element("CHANGING"));
tablesAndChangingElements.add(functionElement.element("OUTPUT"));
for(Element tablesOrChangingElement : tablesAndChangingElements) {
if (tablesOrChangingElement == null) {
@@ -122,10 +121,6 @@ public class DocumentExtractor {
}
result.add(row);
}
if(!result.isEmpty()) {
return result;
}
}
return result;

View File

@@ -49,11 +49,11 @@ public class DocumentHelper {
}
}
public void addSingleParameter(String name, String value, SAPParameterType parameter_type) {
public void addSingleParameter(String name, String value, boolean isChanging) {
if(value == null) {
value = "";
}
if (parameter_type == SAPParameterType.CHANGING) {
if (isChanging) {
correctChanging();
changing.addElement(name).setText(value);
} else {
@@ -62,8 +62,8 @@ public class DocumentHelper {
}
}
public void addStructParameter(String name, SAPParameterType parameter_type) {
if (parameter_type == SAPParameterType.CHANGING) {
public void addStructParameter(String name, boolean isChanging) {
if (isChanging) {
correctChanging();
currentStruct = changing.addElement(name);
} else {
@@ -79,16 +79,13 @@ public class DocumentHelper {
currentStruct.addElement(name).setText(value);
}
public void addTableParameter(String name, SAPParameterType parameter_type) {
if (parameter_type == SAPParameterType.CHANGING) {
public void addTableParameter(String name, boolean isChanging) {
if (isChanging) {
correctChanging();
currentTable = changing.addElement(name);
} else if(parameter_type == SAPParameterType.TABLES) {
} else {
correctTables();
currentTable = tables.addElement(name);
} else {
correctInput();
currentTable = input.addElement(name);
}
}
@@ -111,18 +108,18 @@ public class DocumentHelper {
DocumentHelper helper = new DocumentHelper();
helper.setFunctionName("READ_TABLE_FUNCTION");
helper.addSingleParameter("ID", "1", SAPParameterType.CHANGING);
helper.addSingleParameter("NAME", "gaoyan", SAPParameterType.IMPORT);
helper.addSingleParameter("ID", "1", true);
helper.addSingleParameter("NAME", "gaoyan", false);
helper.addStructParameter("INFO", SAPParameterType.CHANGING);
helper.addStructParameter("INFO", true);
helper.addStructChildParameter("ID", "2");
helper.addStructChildParameter("NAME", "wangwei");
helper.addStructParameter("INFO1", SAPParameterType.IMPORT);
helper.addStructParameter("INFO1", false);
helper.addStructChildParameter("ID1", "4");
helper.addStructChildParameter("NAME1", "momo");
helper.addTableParameter("TABLE1", SAPParameterType.TABLES);
helper.addTableParameter("TABLE1", false);
for (int i = 0; i < 200000; i++) {
helper.addTableRow();
helper.addTableRowChildParameter("c1", i + "");
@@ -135,7 +132,7 @@ public class DocumentHelper {
helper.addTableRowChildParameter("c8", "wangwei" + i);
}
helper.addTableParameter("TABLE2", SAPParameterType.TABLES);
helper.addTableParameter("TABLE2", false);
for (int i = 0; i < 2; i++) {
helper.addTableRow();
helper.addTableRowChildParameter("ID4", i + "");

View File

@@ -1,8 +0,0 @@
package org.talend.sap;
public enum SAPParameterType {
IMPORT,
CHANGING,
TABLES,
EXPORT
}

View File

@@ -0,0 +1,88 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="org.talend.designer.components.libs" default="process" basedir=".">
<property name="component.plugin.home" value="../../../org.talend.designer.components.localprovider/components" />
<!-- #################################################### -->
<!-- modification 1: config -->
<property name="jar.name" value="talendssl.jar" />
<property name="component.name" value="tLDAPInput" />
<property name="author.name" value="wliu" />
<!-- modification 2: compile classpath -->
<path id="compile.classpath">
</path>
<!-- #################################################### -->
<!-- sourcecode and final jar path -->
<property name="source.home" value="." />
<property name="jar.home" value="${component.plugin.home}/${component.name}/${jar.name}" />
<!-- temp dir for clasee files -->
<property name="build.dir" value="../../build" />
<!-- compile option -->
<property name="compile.debug" value="true" />
<property name="compile.deprecation" value="false" />
<property name="compile.optimize" value="true" />
<target name="process" description="prepare a temp dir">
<antcall target="prepare" />
<antcall target="compile" />
<antcall target="clean" />
</target>
<target name="prepare" description="prepare a temp dir">
<delete dir="${build.dir}" />
<mkdir dir="${build.dir}" />
<mkdir dir="${build.dir}/classes" />
</target>
<target name="compile" description="Compile Java sources">
<!-- compile -->
<javac srcdir="${source.home}" destdir="${build.dir}/classes" debug="${compile.debug}" deprecation="${compile.deprecation}" optimize="${compile.optimize}">
<classpath refid="compile.classpath" />
</javac>
<!-- include source code -->
<copy todir="${build.dir}/classes">
<fileset dir="${source.home}">
<exclude name="build.xml" />
</fileset>
</copy>
<!-- make jar -->
<tstamp>
<format property="date" pattern="yyyy-MM-dd HH:mm:ss" />
</tstamp>
<jar destfile="${build.dir}/${jar.name}" basedir="${build.dir}/classes">
<manifest>
<!-- who -->
<attribute name="Built-By" value="${author.name}" />
<!-- when -->
<attribute name="Built-Date" value="${date}"/>
<!-- JDK version -->
<attribute name="Created-By" value="${java.version} (${java.vendor})" />
<!-- Information about the program itself -->
<attribute name="Implementation-Vendor" value="Talend SA" />
<attribute name="Implementation-Title" value="${jar.name}" />
<attribute name="Implementation-Version" value="1.0" />
</manifest>
</jar>
<!-- move jar -->
<move file="${build.dir}/${jar.name}" tofile="${jar.home}" />
</target>
<target name="clean" description="clean the temp dir">
<delete dir="${build.dir}" />
<mkdir dir="${build.dir}" />
</target>
</project>

View File

@@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.libraries</groupId>
<artifactId>talendzip</artifactId>
<version>1.0-20190917</version>
<version>1.0-20190527</version>
<packaging>jar</packaging>
<properties>
@@ -40,19 +40,19 @@
<dependency>
<groupId>net.lingala.zip4j</groupId>
<artifactId>zip4j</artifactId>
<version>1.3.3</version>
<version>1.3.1</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>1.19</version>
<version>1.10</version>
</dependency>
<dependency>
<groupId>org.talend.libraries</groupId>
<artifactId>checkArchive-1.1-20190917</artifactId>
<artifactId>checkArchive-1.1-20181130</artifactId>
<version>6.0.0</version>
</dependency>
</dependencies>
</project>
</project>

View File

@@ -1,7 +1,5 @@
package com.talend.compress.zip;
import java.io.IOException;
public class Util {
public java.util.List<UnzippedFile> unzippedFiles = new java.util.ArrayList<UnzippedFile>();
@@ -55,10 +53,6 @@ public class Util {
f = new java.io.File(path, shortName);
}
if(!f.getCanonicalPath().startsWith(new java.io.File(path).getCanonicalPath())) {
throw new IOException("expanding " + f.getName()
+ " would create file outside of " + path);
}
checkDir(f);
f.getParentFile().mkdirs();
@@ -95,10 +89,6 @@ public class Util {
f = new java.io.File(path, shortName);
}
if(!f.getCanonicalPath().startsWith(new java.io.File(path).getCanonicalPath())) {
throw new IOException("expanding " + f.getName()
+ " would create file outside of " + path);
}
if (isDirectory) {
if (!f.exists()) {
f.mkdirs();

View File

@@ -5,9 +5,9 @@
>
<modelVersion>4.0.0</modelVersion>
<groupId>org.talend.components.lib</groupId>
<groupId>org.talend.libraries</groupId>
<artifactId>thashfile</artifactId>
<version>3.1-20190910</version>
<version>3.0-20170711</version>
<packaging>jar</packaging>
<name>thashfile</name>
@@ -45,8 +45,8 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>3.5.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>

View File

@@ -9,44 +9,45 @@ import java.util.concurrent.ConcurrentHashMap;
import org.talend.designer.components.hashfile.memory.AdvancedMemoryHashFile;
public class MapHashFile {
// use this map instead of globalMap
//use this map instead of globalMap
private Map<String, AdvancedMemoryHashFile> resourceMap = new ConcurrentHashMap<>();
// keep the present key of AdvancedMemoryHashFile as key and the previous key as
// value
//keep the present key of AdvancedMemoryHashFile as key and the previous key as value
private Map<String, String> keyMap = new ConcurrentHashMap<>();
// singleton
//singleton
private static final MapHashFile mhf = new MapHashFile();
public static TalendMultiThreadLockMap resourceLockMap = new TalendMultiThreadLockMap();
public static TalendMultiThreadLockMap resourceLockMap = new TalendMultiThreadLockMap();
public static class TalendMultiThreadLockMap {
public static class TalendMultiThreadLockMap {
private Map<Object, Object> tMultiTheadLockMap = new ConcurrentHashMap<>();
public Object get(Object key) {
return tMultiTheadLockMap.computeIfAbsent(key, k -> new Object());
}
public void remove(Object key) {
tMultiTheadLockMap.remove(key);
}
}
private Map<Object, Object> tMultiTheadLockMap = new HashMap<Object, Object>();
public synchronized Object get(Object key) {
if (tMultiTheadLockMap.get(key) == null) {
tMultiTheadLockMap.put(key, new Object());
}
return tMultiTheadLockMap.get(key);
}
public synchronized void remove(Object key){
tMultiTheadLockMap.remove(key);
}
}
private MapHashFile() {
}
public static MapHashFile getMapHashFile() {
return mhf;
}
// get the linked AdvancedMemoryHashFile
//get the linked AdvancedMemoryHashFile
public AdvancedMemoryHashFile getAdvancedMemoryHashFile(String key) {
AdvancedMemoryHashFile amhf = resourceMap.get(key);
String prekey = keyMap.get(key);
// if present AdvancedMemoryHashFile is null get the AdvancedMemoryHashFile
// before present.
//if present AdvancedMemoryHashFile is null get the AdvancedMemoryHashFile before present.
int size = keyMap.size();
while (amhf == null && (size--) > 0) {
while(amhf==null && (size--)>0){
amhf = resourceMap.get(prekey);
prekey = keyMap.get(prekey);
}
@@ -60,18 +61,16 @@ public class MapHashFile {
public Map<String, String> getKeyMap() {
return keyMap;
}
public void clearCache(String key) {
public void clearCache(String key){
clearChildCache(getRootCache(key));
}
public void clearChildCache(String root) {
public void clearChildCache(String root){
Set<String> set = keyMap.keySet();
synchronized (keyMap) {
synchronized(keyMap) {
Iterator<String> it = set.iterator();
while (it.hasNext()) {
while(it.hasNext()){
String key = it.next();
if (root.equals(keyMap.get(key))) {
if(root.equals(keyMap.get(key))){
this.resourceMap.remove(key);
this.keyMap.remove(key);
clearChildCache(key);
@@ -80,11 +79,11 @@ public class MapHashFile {
}
this.resourceMap.remove(root);
}
public String getRootCache(String cache) {
public String getRootCache(String cache){
String root;
while ((root = keyMap.get(cache)) != null) {
cache = root;
while((root = keyMap.get(cache))!=null){
cache=root;
}
return cache;
}

View File

@@ -149,8 +149,6 @@
<IMPORT NAME="Driver-V5R3_V6R1" MODULE="jt400_V5R3.jar" MVN="mvn:org.talend.libraries/jt400_V5R3/6.0.0" REQUIRED_IF="DB_VERSION == 'jt400_V5R3.jar'" />
<IMPORT NAME="Driver-V6R1_V7R2" MODULE="jt400_V6R1.jar" MVN="mvn:org.talend.libraries/jt400_V6R1/6.0.0" REQUIRED_IF="DB_VERSION == 'jt400_V6R1.jar'" />
<IMPORT NAME="Driver-V7R1_V7R3" MODULE="jt400-9.8.jar" MVN="mvn:net.sf.jt400/jt400/9.8" REQUIRED_IF="DB_VERSION == 'jt400-9.8.jar'" />
<IMPORT NAME="slf4j-api-1.7.25.jar" MODULE="slf4j-api-1.7.25.jar" MVN="mvn:org.slf4j/slf4j-api/1.7.25" REQUIRED_IF="USE_SHARED_CONNECTION =='true'" />
<IMPORT NAME="slf4j-log4j12-1.7.25.jar" MODULE="slf4j-log4j12-1.7.25.jar" MVN="mvn:org.slf4j/slf4j-log4j12/1.7.25" REQUIRED_IF="USE_SHARED_CONNECTION =='true'" />
</IMPORTS>
</CODEGENERATION>

View File

@@ -301,7 +301,6 @@ skeleton="../templates/db_output_bulk.skeleton"
%>
pstmt_<%=cid %>.addBatch();
nb_line_<%=cid%>++;
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
<%dbLog.data().addingToBatch(dbLog.var("nb_line"), dbLog.str(dataAction));%>
batchSizeCounter_<%=cid%>++;
<%
@@ -309,7 +308,6 @@ skeleton="../templates/db_output_bulk.skeleton"
%>
try {
nb_line_<%=cid%>++;
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
insertedCount_<%=cid%> = insertedCount_<%=cid%> + pstmt_<%=cid %>.executeUpdate();
<%dbLog.data().inserting(dbLog.var("nb_line"));%>
} catch(java.sql.SQLException e) {
@@ -433,7 +431,6 @@ skeleton="../templates/db_output_bulk.skeleton"
%>
pstmt_<%=cid %>.addBatch();
nb_line_<%=cid%>++;
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
<%dbLog.data().addingToBatch(dbLog.var("nb_line"), dbLog.str(dataAction));%>
batchSizeCounter_<%=cid%>++;
<%
@@ -441,7 +438,6 @@ skeleton="../templates/db_output_bulk.skeleton"
%>
try {
nb_line_<%=cid%>++;
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
updatedCount_<%=cid%> = updatedCount_<%=cid%> + pstmt_<%=cid %>.executeUpdate();
<%dbLog.data().updating(dbLog.var("nb_line"));%>
} catch(java.sql.SQLException e) {
@@ -588,7 +584,6 @@ skeleton="../templates/db_output_bulk.skeleton"
<%
}
%>
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
updatedCount_<%=cid%> = updatedCount_<%=cid%> + pstmtUpdate_<%=cid %>.executeUpdate();
nb_line_<%=cid%>++;
<%dbLog.data().updating(dbLog.var("nb_line"));%>
@@ -664,7 +659,6 @@ skeleton="../templates/db_output_bulk.skeleton"
<%
}
%>
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
insertedCount_<%=cid%> = insertedCount_<%=cid%> + pstmtInsert_<%=cid %>.executeUpdate();
nb_line_<%=cid%>++;
<%dbLog.data().inserting(dbLog.var("nb_line"));%>
@@ -839,13 +833,11 @@ skeleton="../templates/db_output_bulk.skeleton"
<%
}
%>
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
insertedCount_<%=cid%> = insertedCount_<%=cid%> + pstmtInsert_<%=cid %>.executeUpdate();
nb_line_<%=cid%>++;
<%dbLog.data().inserting(dbLog.var("nb_line"));%>
}else{
nb_line_<%=cid%>++;
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
<%dbLog.data().updating(dbLog.var("nb_line"));%>
}
} catch(java.sql.SQLException e) {
@@ -922,14 +914,12 @@ skeleton="../templates/db_output_bulk.skeleton"
%>
pstmt_<%=cid %>.addBatch();
nb_line_<%=cid%>++;
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
<%dbLog.data().addingToBatch(dbLog.var("nb_line"), dbLog.str(dataAction));%>
batchSizeCounter_<%=cid%>++;
<%
}else {
%>
try {
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
deletedCount_<%=cid%> = deletedCount_<%=cid%> + pstmt_<%=cid %>.executeUpdate();
nb_line_<%=cid%>++;
<%dbLog.data().deleting(dbLog.var("nb_line"));%>

View File

@@ -58,7 +58,7 @@ USER.NAME=Username
USE_COMMIT_CONTROL.NAME=Use commit control
USE_EXISTING_CONNECTION.NAME=Using existing connection
USE_FIELD_OPTIONS.NAME=Use field options
ENABLE_DEBUG_MODE.NAME=Debug query mode
ENABLE_DEBUG_MODE.NAME=Enable debug mode
DB_VERSION.NAME=DB Version
DB_VERSION.ITEM.V5R2_V5R4=V5R2 to V5R4 (Deprecated)
DB_VERSION.ITEM.V5R3_V6R1=V5R3 to V6R1 (Deprecated)

View File

@@ -123,9 +123,6 @@
<IMPORT NAME="common2.6" MODULE="commons-lang-2.6.jar" MVN="mvn:commons-lang/commons-lang/2.6" UrlPath="platform:/plugin/org.talend.libraries.apache.common/lib/commons-lang-2.6.jar" REQUIRED="true"/>
<IMPORT NAME="common-logging1.1.3" MODULE="commons-logging-1.1.3.jar" MVN="mvn:commons-logging/commons-logging/1.1.3" UrlPath="platform:/plugin/org.talend.libraries.apache.common/lib/commons-logging-1.1.3.jar" REQUIRED="true"/>
<IMPORT NAME="slf4j-api-1.7.25.jar" MODULE="slf4j-api-1.7.25.jar" MVN="mvn:org.slf4j/slf4j-api/1.7.25" REQUIRED_IF="USE_SHARED_CONNECTION =='true'" />
<IMPORT NAME="slf4j-log4j12-1.7.25.jar" MODULE="slf4j-log4j12-1.7.25.jar" MVN="mvn:org.slf4j/slf4j-log4j12/1.7.25" REQUIRED_IF="USE_SHARED_CONNECTION =='true'" />
</IMPORTS>
</CODEGENERATION>

View File

@@ -139,7 +139,6 @@ skeleton="../templates/db_output_bulk.skeleton"
}
%>
try {
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
insertedCount_<%=cid%> = insertedCount_<%=cid%> + pstmt_<%=cid %>.executeUpdate();
nb_line_<%=cid%>++;
<%dbLog.data().inserting(dbLog.var("nb_line"));%>
@@ -229,7 +228,6 @@ skeleton="../templates/db_output_bulk.skeleton"
}
%>
try {
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
updatedCount_<%=cid%> = updatedCount_<%=cid%> + pstmt_<%=cid %>.executeUpdate();
nb_line_<%=cid%>++;
<%dbLog.data().updating(dbLog.var("nb_line"));%>
@@ -347,7 +345,6 @@ skeleton="../templates/db_output_bulk.skeleton"
}
%>
try {
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
updatedCount_<%=cid%> = updatedCount_<%=cid%> + pstmtUpdate_<%=cid %>.executeUpdate();
nb_line_<%=cid%>++;
<%dbLog.data().updating(dbLog.var("nb_line"));%>
@@ -409,7 +406,6 @@ skeleton="../templates/db_output_bulk.skeleton"
}
%>
try{
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
insertedCount_<%=cid%> = insertedCount_<%=cid%> + pstmtInsert_<%=cid %>.executeUpdate();
nb_line_<%=cid%>++;
<%dbLog.data().inserting(dbLog.var("nb_line"));%>
@@ -505,7 +501,6 @@ skeleton="../templates/db_output_bulk.skeleton"
%>
try {
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
updateFlag_<%=cid%>=pstmtUpdate_<%=cid %>.executeUpdate();
if(updateFlag_<%=cid%> != 0) {
nb_line_<%=cid%>++;
@@ -572,7 +567,6 @@ skeleton="../templates/db_output_bulk.skeleton"
}
%>
try {
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
insertedCount_<%=cid%> = insertedCount_<%=cid%> + pstmtInsert_<%=cid %>.executeUpdate();
nb_line_<%=cid%>++;
<%dbLog.data().inserting(dbLog.var("nb_line"));%>
@@ -651,7 +645,6 @@ skeleton="../templates/db_output_bulk.skeleton"
}
%>
try {
<%if(isEnableDebug){dbLog.data().sqlDebugPrint("globalMap.get(\""+cid+"_QUERY\")");}%>
deletedCount_<%=cid%> = deletedCount_<%=cid%> + pstmt_<%=cid %>.executeUpdate();
nb_line_<%=cid%>++;
<%dbLog.data().deleting(dbLog.var("nb_line"));%>

View File

@@ -52,7 +52,7 @@ FIELD_OPTIONS.ITEM.UPDATE_KEY=Key in update
FIELD_OPTIONS.ITEM.DELETE_KEY=Key in delete
FIELD_OPTIONS.ITEM.UPDATABLE=Updatable
FIELD_OPTIONS.ITEM.INSERTABLE=Insertable
ENABLE_DEBUG_MODE.NAME=Debug query mode
ENABLE_DEBUG_MODE.NAME=Enable debug mode
NB_LINE.NAME=Number of line
NB_LINE_UPDATED.NAME= NB Line Updated

View File

@@ -0,0 +1,240 @@
MaxMind GeoIP License version 1.6, March 23rd, 2007
MAXMIND END-USER LICENSE AGREEMENT
By clicking on the words "I Agree" below, you agree that your use of the
MaxMind GeoIP database products and services is subject to the terms and
conditions set forth in this MaxMind End User License Agreement (this
"Agreement").
MaxMind, Inc. ("MaxMind") provides a line of database services and products
that provide the geographic information and other data associated with
specific Internet protocol addresses. These database services and products
are referred to in this Agreement individually as the "GeoIP Database" and
collectively as the "GeoIP Databases". The data available through the GeoIP
Databases is referred to in this Agreement as the "GeoIP Data". Some
products in the MaxMind GeoIP Line may include MaxMind's proprietary computer
programs used to query the GeoIP Databases (the "GeoIP Programs").
ADDITIONAL POLICIES.
The following policies are incorporated into this End User Agreement by
reference and provide additional terms and conditions related to specific
services and products provided by MaxMind:
minFraud Service Terms of Use
Each of these policies may be amended at any time and such amendments
shall be binding and effective as indicated below in the section on
Changes To The Agreement.
LIMITED GRANT OF RIGHTS.
In accordance with the terms of this Agreement, MaxMind hereby grants you a
non-exclusive, non-transferable limited license to access and use the GeoIP
Databases and GeoIP Data for your own internal Restricted Business
purposes. Restricted Business purposes are limited to customizing website
content, fraud prevention, geographic reporting, and similar business
purposes. You agree to use the GeoIP Data and GeoIP Databases only in a
manner that is consistent with applicable laws.
RESTRICTIONS ON USE.
Except as expressly permitted in this Agreement, you may not, nor may you
permit others to:
(a) copy any portion of the GeoIP Databases except as reasonably required
for using the GeoIP Database as permitted hereunder,
(b) allow anyone other than yourself or your employees to access the GeoIP
Databases, or any portion thereof, without MaxMind's express written
permission,
(c) use the GeoIP Databases to develop a database, infobase, online or
similar database service, or other information resource in any media (print,
electronic or otherwise, now existing or developed in the future) for sale
to, distribution to, display to or use by others,
(d) create compilations or derivative works of the GeoIP Databases,
(e) use the GeoIP Databases in any fashion that may infringe any copyright,
intellectual property right, contractual right, or proprietary or property
right or interest held by MaxMind,
(f) store in a retrieval system accessible to the public, transfer,
publish, distribute, display to others, broadcast, sell, or sublicense the
GeoIP Databases, or any portion thereof,
(g) remove or obscure any copyright notice or other notice or terms of use
contained in the GeoIP Databases,
(h) use the GeoIP Data to create or otherwise support the transmission of
unsolicited, commercial email,
(i) remove, disable, avoid, circumvent, or defeat any functionality in the
GeoIP Databases or GeoIP Programs designed to limit or control access to or
use of the GeoIP Databases or GeoIP Data,
(j) use, copy or otherwise access any portion of the GeoIP Data for which
you have not made payment to MaxMind. If for any reason, you access such
GeoIP Data, these terms and conditions apply to your use of such data and
you agree to pay all applicable charges, or
(k) copy, reverse engineer, decompile, disassemble, derive source code,
modify or prepare derivative works of the GeoIP Programs.
OWNERSHIP AND INTELLECTUAL PROPERTY RIGHTS.
(a) Trade Secrets and Confidential Information. You acknowledge and agree
that any and all confidential information and materials from which MaxMind
derives actual or potential economic value constitutes MaxMind's
confidential and proprietary trade secrets (collectively, "Trade Secrets").
You further acknowledge and agree that MaxMind's Trade Secrets include, but
are not limited to, the GeoIP Data contained in the GeoIP Databases and the
technology used in the GeoIP Programs. You shall maintain any information
learned about MaxMind's Trade Secrets as confidential and shall not disclose
such information or permit such information to be disclosed to any person or
entity. With respect to all such information, you shall exercise the same
degree of care to protect MaxMind's Trade Secrets that you exercise with
respect to protecting your own confidential information, and in no event
less than reasonable care.
(b) Ownership. All intellectual property rights including copyrights, moral
rights, trademarks, trade secrets, proprietary rights to the GeoIP
Databases, GeoIP Data and GeoIP Programs are exclusively owned by MaxMind.
You acknowledge and agree that you obtain no right, title or interest
therein. You hereby assign to MaxMind all copyrights, intellectual property
rights, and any other proprietary or property rights or interests in and to
any work created in violation of this Agreement.
FEES.
MaxMind's current fee schedule for using the GeoIP Databases and related
services is posted on the MaxMind website (url: http://www.maxmind.com).
You are responsible for paying all fees associated with the use of the GEOIP
Databases.
CHANGES TO THE AGREEMENT.
MaxMind may amend this Agreement at any time. Any such amendment(s) shall
be binding and effective upon the earlier of (i) the date that is thirty
(30) days after posting of the amended Agreement on MaxMind's web site, or
(ii) the date that MaxMind provides notice to you of the amended Agreement
pursuant to the notice provisions in this Agreement; except that changes to
charges and payment terms may be made only upon 30 days' prior written
notice to you. You may immediately terminate this Agreement upon notice to
MaxMind if a change is unacceptable to you. Your continued use of the GeoIP
Data, GeoIP Databases or GeoIP Programs following notice to you of a change
shall constitute your acceptance of the change.
LIMITATION ON LIABILITY.
MAXMIND'S MAXIMUM TOTAL LIABILITY FOR ALL OCCURRENCES (IF ANY), TAKING PLACE
DURING ANY TWELVE-MONTH PERIOD (OR A PORTION THEREOF, IF THIS AGREEMENT IS NOT
IN EFFECT FOR TWELVE MONTHS), ARISING OUT OF OR IN ANY WAY RELATED TO THE
AUTHORIZED OR UNAUTHORIZED ACTS OF MAXMIND'S EMPLOYEES OR MAXMIND'S PERFORMANCE
OR NONPERFORMANCE OF THE SERVICES PROVIDED HEREIN, INCLUDING (BUT NOT LIMITED
TO) ERRORS OF DESIGN OR ERRORS WHICH ARE DUE SOLELY TO MALFUNCTION OF
MAXMIND-CONTROLLED MACHINES OR FAILURES OF MAXMIND OPERATORS, MAXMIND
PROGRAMMERS OR MAXMIND-DEVELOPED PROGRAMS, SHALL BE LIMITED TO GENERAL MONEY
DAMAGES IN AN AMOUNT NOT TO EXCEED THE TOTAL AMOUNT PAID BY YOU FOR SERVICES
PROVIDED BY MAXMIND UNDER THIS AGREEMENT DURING SAID TWELVE-MONTH PERIOD (OR
DURING SUCH SHORTER PERIOD THAT THIS AGREEMENT IS IN EFFECT). YOU AGREE THAT THE
FOREGOING SHALL CONSTITUTE YOUR EXCLUSIVE REMEDY. YOU HEREBY RELEASE MAXMIND,
ITS OFFICERS, EMPLOYEES AND AFFILIATES FROM ANY AND ALL OBLIGATIONS, LIABILITIES
AND CLAIMS IN EXCESS OF THIS LIMITATION.
NO CONSEQUENTIAL DAMAGES.
UNDER NO CIRCUMSTANCES INCLUDING NEGLIGENCE SHALL MAXMIND OR ANY RELATED PARTY OR SUPPLIER BE
LIABLE FOR INDIRECT, INCIDENTAL, SPECIAL, CONSEQUENTIAL OR PUNITIVE DAMAGES;
OR FOR LOSS OF PROFITS, REVENUE, OR DATA; THAT ARE DIRECTLY OR INDIRECTLY
RELATED TO THE USE OF, OR THE INABILITY TO USE THE SITE OR SERVICES, WHETHER
IN AN ACTION IN CONTRACT, TORT, PRODUCT LIABILITY, STRICT LIABILITY, STATUTE
OR OTHERWISE EVEN IF MAXMIND HAS BEEN ADVISED OF THE POSSIBILITY OF THOSE
DAMAGES.
NO WARRANTIES.
THE GEOIP DATABASES, GEOIP DATA AND GEOIP PROGRAMS ARE FURNISHED ON AN "AS
IS", AS-AVAILABLE BASIS. MAXMIND MAKES NO WARRANTY, EXPRESS OR IMPLIED,
WITH RESPECT TO THE CAPABILITY OF THE GEOIP DATABASES, GEOIP DATA AND GEOIP
PROGRAMS OR THE ACCURACY OR THE COMPLETENESS OF THE GEOIP DATA. ALL
WARRANTIES OF ANY TYPE, EXPRESS OR IMPLIED, INCLUDING THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT OF
THIRD PARTY RIGHTS ARE EXPRESSLY DISCLAIMED. MAXMIND DOES NOT WARRANT THAT
THE SERVICES WILL MEET ANY PARTICULAR CRITERIA OF PERFORMANCE OR QUALITY, OR
THAT THE SITE IS FREE OF OTHER HARMFUL COMPONENTS. NEVERTHELESS, MAXMIND
SHALL MAKE COMMERCIALLY REASONABLE EFFORTS TO MAINTAIN THE SITE FREE OF
VIRUSES AND MALICIOUS CODE.
GOVERNING LAW.
This Agreement shall be governed and interpreted pursuant to the laws of the
Commonwealth of Massachusetts, applicable to contracts made and to be
performed wholly in Massachusetts, without regard to principles of conflicts
of laws.
You specifically consent to personal jurisdiction in Massachusetts in connection
with any dispute between you and MaxMind arising out of this Agreement or
pertaining to the subject matter hereof. You agree that the exclusive venue for
any dispute hereunder shall be in the state and federal courts in Boston,
Massachusetts.
NOTICES
Notices given under this Agreement shall be in writing and sent by
facsimile, e-mail, or by first class mail or equivalent. MaxMind shall
direct notice to you at the facsimile number, e-mail address, or physical
mailing address (collectively, "Address") you provided in the registration
process. You shall direct notice to MaxMind at the following address:
MaxMind, Inc.
PO Box 230074
Boston MA 02123-0074
Email: legal@maxmind.com
Fax: (815) 301-8737
Either party may change its Address for notice at anytime by giving notice
of the new Address as provided in this section.
COMPLETE AGREEMENT.
This Agreement represents
the entire agreement between you and MaxMind with respect to the subject
matter hereof and supersedes all previous representations, understandings or
agreements, oral or written, between the parties regarding the subject
matter hereof.
APPLICABLE LAWS.
You agree to use the GeoIP Data and GeoIP Databases only in a
manner that is consistent with applicable laws.
ASSIGNMENT.
You may not assign your rights in this Agreement without MaxMind's prior
written consent.
SEVERABILITY.
Should any provision of this Agreement be held void, invalid or inoperative,
such decision shall not affect any other provision hereof, and the remainder
of this Agreement shall be effective as though such void, invalid or
inoperative provision had not been contained herein.
FAILURE TO ENFORCE.
The failure of MaxMind to enforce any provision of these terms and
conditions shall not constitute or be construed as a waiver of such
provision or of the right to enforce it at a later time.
FACSIMILES.
If you submit a document which includes your facsimile signature, MaxMind
may treat the facsimile signature as an original of your signature.
CAPTIONS.
The section headings used herein, are for convenience only and shall have no
force or effect upon the construction or interpretation of any provision
hereof.

View File

@@ -215,8 +215,6 @@
</TEMPLATES>
<IMPORTS>
<IMPORT NAME="Driver-MysqlConnector" MODULE="mysql-connector-java-5.1.30-bin.jar" MVN="mvn:org.talend.libraries/mysql-connector-java-5.1.30-bin/6.0.0" />
<IMPORT NAME="slf4j-api-1.7.25.jar" MODULE="slf4j-api-1.7.25.jar" MVN="mvn:org.slf4j/slf4j-api/1.7.25" REQUIRED_IF="(USE_SHARED_CONNECTION == 'true' AND SPECIFY_DATASOURCE_ALIAS=='false')" />
<IMPORT NAME="slf4j-log4j12-1.7.25.jar" MODULE="slf4j-log4j12-1.7.25.jar" MVN="mvn:org.slf4j/slf4j-log4j12/1.7.25" REQUIRED_IF="(USE_SHARED_CONNECTION == 'true' AND SPECIFY_DATASOURCE_ALIAS=='false')" />
</IMPORTS>
</CODEGENERATION>
<RETURNS/>

View File

@@ -73,7 +73,7 @@ FIELD_OPTIONS.ITEM.DELETE_KEY=Key in delete
FIELD_OPTIONS.ITEM.UPDATABLE=Updatable
FIELD_OPTIONS.ITEM.INSERTABLE=Insertable
PROPERTIES.NAME=Additional JDBC Parameters
ENABLE_DEBUG_MODE.NAME=Debug query mode
ENABLE_DEBUG_MODE.NAME=Enable debug mode
NB_LINE_UPDATED.NAME=Number Of Updated Lines
NB_LINE_INSERTED.NAME=Number Of Inserted Lines

View File

@@ -121,14 +121,14 @@
<IMPORT NAME="aws-java-sdk-1.11.406.jar" MODULE="aws-java-sdk-1.11.406.jar"
MVN="mvn:com.amazonaws/aws-java-sdk/1.11.406"
REQUIRED="true" />
<IMPORT NAME="jackson-core-2.10.1.jar" MODULE="jackson-core-2.10.1.jar"
MVN="mvn:com.fasterxml.jackson.core/jackson-core/2.10.1"
<IMPORT NAME="jackson-core-2.9.9.jar" MODULE="jackson-core-2.9.9.jar"
MVN="mvn:com.fasterxml.jackson.core/jackson-core/2.9.9"
REQUIRED="true" />
<IMPORT NAME="jackson-databind-2.10.1.jar" MODULE="jackson-databind-2.10.1.jar"
MVN="mvn:com.fasterxml.jackson.core/jackson-databind/2.10.1"
<IMPORT NAME="jackson-databind-2.9.9.jar" MODULE="jackson-databind-2.9.9.jar"
MVN="mvn:com.fasterxml.jackson.core/jackson-databind/2.9.9"
REQUIRED="true" />
<IMPORT NAME="jackson-annotations-2.10.1.jar" MODULE="jackson-annotations-2.10.1.jar"
MVN="mvn:com.fasterxml.jackson.core/jackson-annotations/2.10.1"
<IMPORT NAME="jackson-annotations-2.9.0.jar" MODULE="jackson-annotations-2.9.0.jar"
MVN="mvn:com.fasterxml.jackson.core/jackson-annotations/2.9.0"
REQUIRED="true" />
<IMPORT NAME="httpcore-4.4.9.jar" MODULE="httpcore-4.4.9.jar"
MVN="mvn:org.apache.httpcomponents/httpcore/4.4.9"

Some files were not shown because too many files have changed in this diff Show More