From 36c2e248c8c1436b126352ee50ded5f01c03c4c8 Mon Sep 17 00:00:00 2001 From: Yanbo Liang Date: Sat, 19 Nov 2016 07:21:47 -0800 Subject: [PATCH] Better deprecated docs for estimator/transfomer read/write context function. --- .../scala/org/apache/spark/ml/util/ReadWrite.scala | 2 +- python/pyspark/ml/util.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala b/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala index bc4f9e6716ee8..22abf4c976e96 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala @@ -46,7 +46,7 @@ private[util] sealed trait BaseReadWrite { * Sets the Spark SQLContext to use for saving/loading. */ @Since("1.6.0") - @deprecated("Use session instead", "2.0.0") + @deprecated("Use session instead, This method will be removed in 2.2.0.", "2.0.0") def context(sqlContext: SQLContext): this.type = { optionSparkSession = Option(sqlContext.sparkSession) this diff --git a/python/pyspark/ml/util.py b/python/pyspark/ml/util.py index 42f6df9ca280e..9c5cd15681483 100644 --- a/python/pyspark/ml/util.py +++ b/python/pyspark/ml/util.py @@ -78,7 +78,10 @@ def overwrite(self): raise NotImplementedError("MLWriter is not yet implemented for type: %s" % type(self)) def context(self, sqlContext): - """Sets the SQL context to use for saving.""" + """ + Sets the SQL context to use for saving. + .. note:: Deprecated in 2.1, use session instead. + """ raise NotImplementedError("MLWriter is not yet implemented for type: %s" % type(self)) def session(self, sparkSession): @@ -168,7 +171,10 @@ def load(self, path): raise NotImplementedError("MLReader is not yet implemented for type: %s" % type(self)) def context(self, sqlContext): - """Sets the SQL context to use for loading.""" + """ + Sets the SQL context to use for loading. + .. note:: Deprecated in 2.1, use session instead. + """ raise NotImplementedError("MLReader is not yet implemented for type: %s" % type(self)) def session(self, sparkSession):