Skip to content

Commit

Permalink
Add support for injecting arbitrary JavaScript to API docs
Browse files Browse the repository at this point in the history
  • Loading branch information
andrewor14 committed Apr 8, 2014
1 parent 037755c commit 824011b
Show file tree
Hide file tree
Showing 9 changed files with 83 additions and 22 deletions.
3 changes: 2 additions & 1 deletion core/src/main/scala/org/apache/spark/Aggregator.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,17 @@

package org.apache.spark

import org.apache.spark.annotations.DeveloperAPI
import org.apache.spark.util.collection.{AppendOnlyMap, ExternalAppendOnlyMap}

/**
* <span class="developer badge">Developer API</span>
* A set of functions used to aggregate data.
*
* @param createCombiner function to create the initial value of the aggregation.
* @param mergeValue function to merge a new value into the aggregation result.
* @param mergeCombiners function to merge outputs from multiple mergeValue function.
*/
@DeveloperAPI
case class Aggregator[K, V, C] (
createCombiner: V => C,
mergeValue: (C, V) => C,
Expand Down
3 changes: 1 addition & 2 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,7 @@ import org.apache.spark.util.{ClosureCleaner, MetadataCleaner, MetadataCleanerTy
* @param config a Spark Config object describing the application configuration. Any settings in
* this config overrides the default configs as well as system properties.
*/
class SparkContext(config: SparkConf)
extends Logging {
class SparkContext(config: SparkConf) extends Logging {

// This is used only by YARN for now, but should be relevant to other cluster types (Mesos,
// etc) too. This is typically generated from InputFormatInfo.computePreferredLocations. It
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/scala/org/apache/spark/TaskEndReason.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,16 @@

package org.apache.spark

import org.apache.spark.annotations.DeveloperAPI
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.storage.BlockManagerId

/**
* <span class="developer badge">Developer API</span>
* Various possible reasons why a task ended. The low-level TaskScheduler is supposed to retry
* tasks several times for "ephemeral" failures, and only report back failures that require some
* old stages to be resubmitted, such as shuffle map fetch failures.
*/

@DeveloperAPI
sealed trait TaskEndReason

/** <span class="developer badge">Developer API</span> */
Expand Down
25 changes: 25 additions & 0 deletions core/src/main/scala/org/apache/spark/annotations/DeveloperAPI.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.annotations;

import java.lang.annotation.*;

@Retention(RetentionPolicy.SOURCE)
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD,
ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
public @interface DeveloperAPI {}
25 changes: 25 additions & 0 deletions core/src/main/scala/org/apache/spark/annotations/Experimental.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.annotations;

import java.lang.annotation.*;

@Retention(RetentionPolicy.SOURCE)
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD,
ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
public @interface Experimental {}
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,12 @@
package org.apache.spark.scheduler

import org.apache.spark.storage.RDDInfo
import org.apache.spark.annotations.DeveloperAPI

/**
* <span class="developer badge">Developer API</span>
* Stores information about a stage to pass from the scheduler to SparkListeners.
*/
@DeveloperAPI
class StageInfo(val stageId: Int, val name: String, val numTasks: Int, val rddInfo: RDDInfo) {
/** When this stage was submitted from the DAGScheduler to a TaskScheduler. */
var submissionTime: Option[Long] = None
Expand Down
2 changes: 1 addition & 1 deletion docs/_config.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
pygments: true
markdown: kramdown
markdown: rdiscount

# These allow the documentation to be updated with nerw releases
# of Spark, Scala, and Mesos.
Expand Down
35 changes: 20 additions & 15 deletions docs/_plugins/copy_api_dirs.rb
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,11 @@
puts "cp -r " + source + "/. " + dest
cp_r(source + "/.", dest)

# Append custom JavaScript
js = File.readlines("./js/api-docs.js")
js_file = dest + "/lib/template.js"
File.open(js_file, 'a') { |f| f.write("\n" + js.join()) }

# Append custom CSS
css_file = dest + "/lib/template.css"
extra_css = [
Expand All @@ -62,19 +67,19 @@
File.open(css_file, 'a') { |f| f.write(extra_css) }
end

# Build Epydoc for Python
puts "Moving to python directory and building epydoc."
cd("../python")
puts `epydoc --config epydoc.conf`

puts "Moving back into docs dir."
cd("../docs")

puts "echo making directory pyspark"
mkdir_p "pyspark"

puts "cp -r ../python/docs/. api/pyspark"
cp_r("../python/docs/.", "api/pyspark")

cd("..")
# # Build Epydoc for Python
# puts "Moving to python directory and building epydoc."
# cd("../python")
# puts `epydoc --config epydoc.conf`
#
# puts "Moving back into docs dir."
# cd("../docs")
#
# puts "echo making directory pyspark"
# mkdir_p "pyspark"
#
# puts "cp -r ../python/docs/. api/pyspark"
# cp_r("../python/docs/.", "api/pyspark")
#
# cd("..")
end
5 changes: 5 additions & 0 deletions docs/js/api-docs.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
/* Dynamically injected post-processing code for the API docs */

$(document).ready(function() {
console.log("Ready")
});

0 comments on commit 824011b

Please sign in to comment.