-
Notifications
You must be signed in to change notification settings - Fork 17
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Common: use processing manifest (close #81)
- Loading branch information
Showing
33 changed files
with
953 additions
and
142 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
37 changes: 37 additions & 0 deletions
37
shredder/src/main/scala/com.snowplowanalytics.snowplow.storage/spark/DynamodbManifest.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,37 @@ | ||
/* | ||
* Copyright (c) 2012-2018 Snowplow Analytics Ltd. All rights reserved. | ||
* | ||
* This program is licensed to you under the Apache License Version 2.0, | ||
* and you may not use this file except in compliance with the Apache License Version 2.0. | ||
* You may obtain a copy of the Apache License Version 2.0 at | ||
* http://www.apache.org/licenses/LICENSE-2.0. | ||
* | ||
* Unless required by applicable law or agreed to in writing, | ||
* software distributed under the Apache License Version 2.0 is distributed on an | ||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the Apache License Version 2.0 for the specific language governing permissions and | ||
* limitations there under. | ||
*/ | ||
package com.snowplowanalytics.snowplow.storage.spark | ||
|
||
import cats.implicits._ | ||
|
||
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder | ||
|
||
import com.snowplowanalytics.manifest.core.ProcessingManifest._ | ||
import com.snowplowanalytics.manifest.dynamodb.DynamoDbManifest | ||
import com.snowplowanalytics.snowplow.rdbloader.generated.ProjectMetadata | ||
|
||
object DynamodbManifest { | ||
|
||
type ManifestFailure[A] = Either[ManifestError, A] | ||
|
||
val ShredJobApplication = Application(ProjectMetadata.name, ProjectMetadata.version, None) | ||
|
||
val ShreddedTypesKeys = "processed:shredder:types" | ||
|
||
def initialize(tableName: String) = { | ||
val client = AmazonDynamoDBClientBuilder.standard().build() | ||
DynamoDbManifest[ManifestFailure](client, tableName) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
59 changes: 59 additions & 0 deletions
59
...er/src/main/scala/com.snowplowanalytics.snowplow.storage/spark/StringSetAccumulator.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
/* | ||
* Copyright (c) 2012-2018 Snowplow Analytics Ltd. All rights reserved. | ||
* | ||
* This program is licensed to you under the Apache License Version 2.0, | ||
* and you may not use this file except in compliance with the Apache License Version 2.0. | ||
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. | ||
* | ||
* Unless required by applicable law or agreed to in writing, | ||
* software distributed under the Apache License Version 2.0 is distributed on an | ||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. | ||
*/ | ||
package com.snowplowanalytics.snowplow.storage.spark | ||
|
||
import org.apache.spark.util.AccumulatorV2 | ||
|
||
import scala.collection.mutable | ||
|
||
import StringSetAccumulator._ | ||
|
||
class StringSetAccumulator extends AccumulatorV2[KeyAccum, KeyAccum] { | ||
|
||
private val accum = mutable.Set.empty[String] | ||
|
||
def merge(other: AccumulatorV2[KeyAccum, KeyAccum]): Unit = other match { | ||
case o: StringSetAccumulator => accum ++= o.accum | ||
case _ => throw new UnsupportedOperationException( | ||
s"Cannot merge ${this.getClass.getName} with ${other.getClass.getName}") | ||
} | ||
|
||
def isZero: Boolean = accum.isEmpty | ||
|
||
def copy(): AccumulatorV2[KeyAccum, KeyAccum] = { | ||
val newAcc = new StringSetAccumulator | ||
accum.synchronized { | ||
newAcc.accum ++= accum | ||
} | ||
newAcc | ||
} | ||
|
||
def value = accum | ||
|
||
def add(keys: KeyAccum): Unit = { | ||
accum ++= keys | ||
} | ||
|
||
def add(keys: Set[String]): Unit = { | ||
val mutableSet = mutable.Set(keys.toList: _*) | ||
add(mutableSet) | ||
} | ||
|
||
def reset(): Unit = { | ||
accum.clear() | ||
} | ||
} | ||
|
||
object StringSetAccumulator { | ||
type KeyAccum = mutable.Set[String] | ||
} |
Oops, something went wrong.