Skip to content

Commit

Permalink
Merge pull request #1910 from alibaba/datax_0913
Browse files Browse the repository at this point in the history
Datax month 9 features
  • Loading branch information
penglin358 authored Sep 13, 2023
2 parents 051fe82 + 74f12ff commit 9a1f887
Show file tree
Hide file tree
Showing 49 changed files with 2,505 additions and 609 deletions.
9 changes: 9 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,15 @@ DataX目前已经有了比较全面的插件体系,主流的RDBMS数据库、N

DataX 后续计划月度迭代更新,也欢迎感兴趣的同学提交 Pull requests,月度更新内容会介绍介绍如下。

- [datax_v202309]https://github.com/alibaba/DataX/releases/tag/datax_v202309)
- 支持Phoenix 同步数据添加 where条件
- 支持华为 GuassDB读写插件
- 修复ClickReader 插件运行报错 Can't find bundle for base name
- 增加 DataX调试模块
- 修复 orc空文件报错问题
- 优化obwriter性能
- txtfilewriter 增加导出为insert语句功能支持

- [datax_v202308]https://github.com/alibaba/DataX/releases/tag/datax_v202308)
- OTS 插件更新
- databend 插件更新
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@ public class ClickhouseReader extends Reader {
private static final Logger LOG = LoggerFactory.getLogger(ClickhouseReader.class);

public static class Job extends Reader.Job {
private static MessageSource MESSAGE_SOURCE = MessageSource.loadResourceBundle(ClickhouseReader.class);

private Configuration jobConfig = null;
private CommonRdbmsReader.Job commonRdbmsReaderMaster;

Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -5,25 +5,62 @@

import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.Time;
import java.util.Date;

/**
* Created by jingxing on 14-8-24.
*/
public class DateColumn extends Column {

private DateType subType = DateType.DATETIME;

public static enum DateType {
DATE, TIME, DATETIME
}

/**
* 构建值为null的DateColumn,使用Date子类型为DATETIME
* */
public DateColumn() {
this((Long)null);
}
private DateType subType = DateType.DATETIME;

private int nanos = 0;

private int precision = -1;

public static enum DateType {
DATE, TIME, DATETIME
}

/**
* 构建值为time(java.sql.Time)的DateColumn,使用Date子类型为TIME,只有时间,没有日期
*/
public DateColumn(Time time, int nanos, int jdbcPrecision) {
this(time);
if (time != null) {
setNanos(nanos);
}
if (jdbcPrecision == 10) {
setPrecision(0);
}
if (jdbcPrecision >= 12 && jdbcPrecision <= 17) {
setPrecision(jdbcPrecision - 11);
}
}

public long getNanos() {
return nanos;
}

public void setNanos(int nanos) {
this.nanos = nanos;
}

public int getPrecision() {
return precision;
}

public void setPrecision(int precision) {
this.precision = precision;
}

/**
* 构建值为null的DateColumn,使用Date子类型为DATETIME
*/
public DateColumn() {
this((Long) null);
}

/**
* 构建值为stamp(Unix时间戳)的DateColumn,使用Date子类型为DATETIME
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
package com.alibaba.datax.common.util;

import org.apache.commons.lang3.StringUtils;

import java.util.HashMap;
import java.util.Map;

/**
* @author jitongchen
* @date 2023/9/7 9:47 AM
*/
public class LimitLogger {

private static Map<String, Long> lastPrintTime = new HashMap<>();

public static void limit(String name, long limit, LoggerFunction function) {
if (StringUtils.isBlank(name)) {
name = "__all__";
}
if (limit <= 0) {
function.apply();
} else {
if (!lastPrintTime.containsKey(name)) {
lastPrintTime.put(name, System.currentTimeMillis());
function.apply();
} else {
if (System.currentTimeMillis() > lastPrintTime.get(name) + limit) {
lastPrintTime.put(name, System.currentTimeMillis());
function.apply();
}
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
package com.alibaba.datax.common.util;

/**
* @author molin.lxd
* @date 2021-05-09
*/
public interface LoggerFunction {

void apply();
}
39 changes: 38 additions & 1 deletion hdfsreader/pom.xml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>datax-all</artifactId>
<groupId>com.alibaba.datax</groupId>
Expand Down Expand Up @@ -111,6 +112,42 @@
<version>${datax-project-version}</version>
</dependency>

<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-column</artifactId>
<version>1.12.0</version>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
<version>1.12.0</version>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-common</artifactId>
<version>1.12.0</version>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-format</artifactId>
<version>2.3.0</version>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-jackson</artifactId>
<version>1.12.0</version>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-encoding</artifactId>
<version>1.12.0</version>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-hadoop</artifactId>
<version>1.12.0</version>
</dependency>

</dependencies>

<build>
Expand Down
22 changes: 22 additions & 0 deletions hdfsreader/src/main/assembly/package.xml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,28 @@
<!--</includes>-->
<!--<outputDirectory>plugin/reader/hdfsreader/libs</outputDirectory>-->
<!--</fileSet>-->
<!--<fileSet>-->
<!--<directory>src/main/libs</directory>-->
<!--<includes>-->
<!--<include>*.*</include>-->
<!--</includes>-->
<!--<outputDirectory>plugin/reader/hdfsreader/libs</outputDirectory>-->
<!--</fileSet>-->

<fileSet>
<directory>src/main/libs</directory>
<includes>
<include>*.*</include>
</includes>
<outputDirectory>plugin/reader/ossreader/libs</outputDirectory>
</fileSet>
<fileSet>
<directory>src/main/libs</directory>
<includes>
<include>*.*</include>
</includes>
<outputDirectory>plugin/reader/hivereader/libs</outputDirectory>
</fileSet>
</fileSets>

<dependencySets>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,5 @@ public class Constant {
public static final String CSV = "CSV";
public static final String SEQ = "SEQ";
public static final String RC = "RC";
public static final String PARQUET = "PARQUET";
}
Loading

0 comments on commit 9a1f887

Please sign in to comment.