Skip to content

Commit

Permalink
Merge pull request #1799 from kaspersorensen/prep-for-metamodel-5.2.0
Browse files Browse the repository at this point in the history
MetaModel 5.2.0
  • Loading branch information
kaspersorensen authored Jan 17, 2019
2 parents 8dac6bb + fbc0f49 commit b3e8d9e
Show file tree
Hide file tree
Showing 6 changed files with 90 additions and 44 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
import org.slf4j.LoggerFactory;

import com.google.common.base.Function;
import com.google.common.collect.Sets;

import edu.uci.ics.jung.graph.DirectedGraph;

Expand Down Expand Up @@ -233,7 +232,7 @@ private List<Object> getPrerequisites(final Object vertex) {
}

private int getAccumulatedPrerequisiteCount(final Object obj) {
final Set<JobGraphLink> visitedEdges = Sets.newSetFromMap(new IdentityHashMap<>());
final Set<JobGraphLink> visitedEdges = Collections.newSetFromMap(new IdentityHashMap<>());
return getAccumulatedPrerequisiteCount(obj, visitedEdges);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@
import org.datacleaner.widgets.ResourceTypePresenter;
import org.datacleaner.widgets.table.DCEditableTable;

import com.google.common.base.Objects;
import com.google.common.base.Strings;

public final class FixedWidthDatastoreDialog extends AbstractResourceBasedDatastoreDialog<FixedWidthDatastore> {
Expand Down Expand Up @@ -277,8 +276,13 @@ private List<String> getColumnNames() {
final List<String> list = new ArrayList<>(rowCount);
final AlphabeticSequence sequence = new AlphabeticSequence();
for (int i = 0; i < rowCount; i++) {
final String nameStr = (String) _columnsTable.getValueAt(i, 0);
list.add(Objects.firstNonNull(Strings.emptyToNull(nameStr), sequence.next()));
final String namedStr = (String) _columnsTable.getValueAt(i, 0);
final String generatedStr = sequence.next();
if (Strings.isNullOrEmpty(namedStr)) {
list.add(generatedStr);
} else {
list.add(namedStr);
}
}
return list;
}
Expand Down
4 changes: 4 additions & 0 deletions engine/core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@
<groupId>org.apache.metamodel</groupId>
<artifactId>MetaModel-full</artifactId>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
</dependency>
<dependency>
<groupId>org.apache.metamodel</groupId>
<artifactId>MetaModel-kafka</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;

import org.datacleaner.api.AnalyzerResult;
import org.datacleaner.api.ComponentMessage;
Expand All @@ -31,22 +32,25 @@
import org.slf4j.LoggerFactory;

/**
* {@link AnalysisListener} that wraps a list of inner listeners. Makes life
* easier for the invokers of the listeners.
* {@link AnalysisListener} that wraps a list of inner listeners. Makes life easier for the invokers of the listeners.
*/
public final class CompositeAnalysisListener implements AnalysisListener {

private static final Logger logger = LoggerFactory.getLogger(CompositeAnalysisListener.class);

private final List<AnalysisListener> _delegates;
private final AtomicBoolean _anythingFailed;

public CompositeAnalysisListener(final AnalysisListener[] delegates) {
_anythingFailed = new AtomicBoolean(false);
_delegates = new ArrayList<>(delegates.length);
for (final AnalysisListener analysisListener : delegates) {
addDelegate(analysisListener);
}
}

public CompositeAnalysisListener(final AnalysisListener firstDelegate, final AnalysisListener... delegates) {
_anythingFailed = new AtomicBoolean(false);
_delegates = new ArrayList<>(1 + delegates.length);
addDelegate(firstDelegate);
for (final AnalysisListener analysisListener : delegates) {
Expand All @@ -67,8 +71,7 @@ public void addDelegate(final AnalysisListener analysisListener) {
}

/**
* Determines if this {@link CompositeAnalysisListener} is empty (i.e. has
* no delegates)
* Determines if this {@link CompositeAnalysisListener} is empty (i.e. has no delegates)
*
* @return
*/
Expand All @@ -91,19 +94,31 @@ public void jobBegin(final AnalysisJob job, final AnalysisJobMetrics metrics) {
try {
delegate.jobBegin(job, metrics);
} catch (final Exception e) {
logger.warn("Listener {} failed", delegate.getClass().getName(), e);
logFailure(delegate, e);
}
}
}

private void logFailure(AnalysisListener delegate, Exception e) {
final boolean anythingFailedBefore = _anythingFailed.getAndSet(true);
if (anythingFailedBefore) {
// moderate the logged output by only keeping the exception message
logger.warn("Listener {} failed. {}: {}", delegate.getClass().getName(), e.getClass().getSimpleName(),
e.getMessage());
} else {
logger.warn("Listener {} failed. {}: {}", delegate.getClass().getName(), e.getClass().getSimpleName(),
e.getMessage(), e);
}
}

@Override
public void onComponentMessage(final AnalysisJob job, final ComponentJob componentJob,
final ComponentMessage message) {
for (final AnalysisListener delegate : _delegates) {
try {
delegate.onComponentMessage(job, componentJob, message);
} catch (final Exception e) {
logger.warn("Listener {} failed", delegate.getClass().getName(), e);
logFailure(delegate, e);
}
}
}
Expand All @@ -114,7 +129,7 @@ public void jobSuccess(final AnalysisJob job, final AnalysisJobMetrics metrics)
try {
delegate.jobSuccess(job, metrics);
} catch (final Exception e) {
logger.warn("Listener {} failed", delegate.getClass().getName(), e);
logFailure(delegate, e);
}
}
}
Expand All @@ -125,7 +140,7 @@ public void rowProcessingBegin(final AnalysisJob job, final RowProcessingMetrics
try {
delegate.rowProcessingBegin(job, metrics);
} catch (final Exception e) {
logger.warn("Listener {} failed", delegate.getClass().getName(), e);
logFailure(delegate, e);
}
}
}
Expand All @@ -137,7 +152,7 @@ public void rowProcessingProgress(final AnalysisJob job, final RowProcessingMetr
try {
delegate.rowProcessingProgress(job, metrics, row, currentRow);
} catch (final Exception e) {
logger.warn("Listener {} failed", delegate.getClass().getName(), e);
logFailure(delegate, e);
}
}
}
Expand All @@ -148,7 +163,7 @@ public void rowProcessingSuccess(final AnalysisJob job, final RowProcessingMetri
try {
delegate.rowProcessingSuccess(job, metrics);
} catch (final Exception e) {
logger.warn("Listener {} failed", delegate.getClass().getName(), e);
logFailure(delegate, e);
}
}
}
Expand All @@ -159,7 +174,7 @@ public void componentBegin(final AnalysisJob job, final ComponentJob componentJo
try {
delegate.componentBegin(job, componentJob, metrics);
} catch (final Exception e) {
logger.warn("Listener {} failed", delegate.getClass().getName(), e);
logFailure(delegate, e);
}
}
}
Expand All @@ -170,7 +185,7 @@ public void componentSuccess(final AnalysisJob job, final ComponentJob component
try {
delegate.componentSuccess(job, componentJob, result);
} catch (final Exception e) {
logger.warn("Listener {} failed", delegate.getClass().getName(), e);
logFailure(delegate, e);
}
}
}
Expand All @@ -182,7 +197,7 @@ public void errorInComponent(final AnalysisJob job, final ComponentJob component
try {
delegate.errorInComponent(job, componentJob, row, throwable);
} catch (final Exception e) {
logger.warn("Listener {} failed", delegate.getClass().getName(), e);
logFailure(delegate, e);
}
}
}
Expand All @@ -193,7 +208,7 @@ public void errorUnknown(final AnalysisJob job, final Throwable throwable) {
try {
delegate.errorUnknown(job, throwable);
} catch (final Exception e) {
logger.warn("Listener {} failed", delegate.getClass().getName(), e);
logFailure(delegate, e);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ public void testAllDatastoreTypes() throws Exception {
final ElasticSearchDatastore esDatastore =
(ElasticSearchDatastore) datastoreCatalog.getDatastore("my es index");
assertEquals("localhost", esDatastore.getHostname());
assertEquals(new Integer(9300), esDatastore.getPort());
assertEquals(9300, esDatastore.getPort().intValue());
assertEquals("my_es_cluster", esDatastore.getClusterName());
assertEquals("my_index", esDatastore.getIndexName());
assertNull(esDatastore.getTableDefs());
Expand Down
72 changes: 48 additions & 24 deletions pom.xml
Original file line number Diff line number Diff line change
@@ -1,21 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>

<!-- Maven plugin versions -->
<javadoc.version>3.0.0</javadoc.version>
<javadoc.version>3.0.1</javadoc.version>

<!-- Dependency versions -->
<slf4j.version>1.7.25</slf4j.version>
<junit.version>4.12</junit.version>
<easymock.version>3.6</easymock.version>
<httpcomponents.version>4.5.5</httpcomponents.version>
<metamodel.version>5.1.1</metamodel.version>
<metamodel.extras.version>5.0.0</metamodel.extras.version>
<metamodel.version>5.2.0</metamodel.version>
<metamodel.extras.version>5.2.0</metamodel.extras.version>
<spring.core.version>4.3.16.RELEASE</spring.core.version>
<freemarker.version>2.3.28</freemarker.version>
<icu4j.version>61.1</icu4j.version>
Expand All @@ -25,13 +27,12 @@
<javax.annotation.version>1.2</javax.annotation.version>
<scala.version>2.11.12</scala.version>
<jung.version>2.1.1</jung.version>
<guava.version>27.0.1-jre</guava.version>
<hadoop.version>3.1.1</hadoop.version>

<!-- TODO: The following (inter-dependent) dependencies are due for upgrades before Java 11 -->
<jersey.version>1.19.4</jersey.version>
<hadoop.version>2.7.1</hadoop.version>
<spark.version>1.6.1</spark.version>
<guava.version>16.0.1</guava.version>
<curator.version>2.6.0</curator.version>
<spark.version>1.6.3</spark.version>
<curator.version>2.13.0</curator.version>
</properties>
<parent>
<!-- Uses the OSS sonatype nexus repository for distribution -->
Expand Down Expand Up @@ -371,7 +372,7 @@
<exclude>org.mortbay.jetty:servlet-api-2.5:*</exclude>
<exclude>org.eclipse.jetty.orbit:javax.servlet:*</exclude>
<exclude>ant:ant:*</exclude>

<exclude>org.datanucleus:javax.jdo:*</exclude>

<!-- Prefer javax.transaction:javax.transaction-api over javax.transaction:jta -->
Expand Down Expand Up @@ -411,10 +412,6 @@
<exclude>org.glassfish.hk2.*</exclude>
<exclude>org.glassfish.hk2.external:*</exclude>

<!-- Jersey 2 excluded due to conflicts with Jersey 1 (which is
required by Hadoop) -->
<exclude>org.glassfish.jersey:*</exclude>

<!-- asm groupId is conflicting with org.ow2.asm groupId -->
<exclude>asm:asm:*</exclude>

Expand All @@ -423,6 +420,7 @@

<!-- jcip-annotations is overlapping with annotations -->
<exclude>net.jcip:jcip-annotations:*</exclude>
<exclude>com.github.stephenc.jcip:jcip-annotations:*</exclude>

<!-- xml-apis is shipped with the JRE -->
<exclude>xml-apis:xml-apis:*</exclude>
Expand All @@ -441,6 +439,8 @@

<!-- org.apache.geronimo.bundles:json is overlapping with org.json:json -->
<exclude>org.apache.geronimo.bundles:json:*</exclude>

<exclude>com.sun.xml.ws:jaxws-rt:*</exclude>
</excludes>
</bannedDependencies>
</rules>
Expand Down Expand Up @@ -737,6 +737,10 @@
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.xml.ws</groupId>
<artifactId>jaxws-rt</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down Expand Up @@ -881,6 +885,17 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>com.github.stephenc.jcip</groupId>
<artifactId>jcip-annotations</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-registry</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
Expand Down Expand Up @@ -1109,6 +1124,10 @@
<groupId>com.github.stephenc.findbugs</groupId>
<artifactId>findbugs-annotations</artifactId>
</exclusion>
<exclusion>
<groupId>com.github.stephenc.jcip</groupId>
<artifactId>jcip-annotations</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>jsp-api</artifactId>
Expand All @@ -1129,6 +1148,10 @@
<groupId>org.datanucleus</groupId>
<artifactId>javax.jdo</artifactId>
</exclusion>
<exclusion>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP-java7</artifactId>
</exclusion>
<exclusion>
<groupId>javax.transaction</groupId>
<artifactId>jta</artifactId>
Expand Down Expand Up @@ -1231,6 +1254,11 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
<version>4.0.56.Final</version>
</dependency>

<dependency>
<groupId>commons-lang</groupId>
Expand All @@ -1241,6 +1269,12 @@
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>1.9.2</version>
<exclusions>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>commons-pool</groupId>
Expand Down Expand Up @@ -1422,16 +1456,6 @@
<artifactId>jackson-module-scala_2.11</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
<version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
<version>${jersey.version}</version>
</dependency>

<!-- logging dependencies -->
<dependency>
Expand Down

0 comments on commit b3e8d9e

Please sign in to comment.