Skip to content

Commit d666053

Browse files
ScrapCodespwendell
authored andcommitted
SPARK-1352 - Comment style single space before ending */ check.
Author: Prashant Sharma <[email protected]> Closes #261 from ScrapCodes/comment-style-check2 and squashes the following commits: 6cde61e [Prashant Sharma] comment style space before ending */ check.
1 parent 95d7d2a commit d666053

File tree

13 files changed

+32
-31
lines changed

13 files changed

+32
-31
lines changed

core/src/main/scala/org/apache/spark/network/Connection.scala

+6-6
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
211211

212212
def addMessage(message: Message) {
213213
messages.synchronized{
214-
/* messages += message*/
214+
/* messages += message */
215215
messages.enqueue(message)
216216
logDebug("Added [" + message + "] to outbox for sending to " +
217217
"[" + getRemoteConnectionManagerId() + "]")
@@ -222,7 +222,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
222222
messages.synchronized {
223223
while (!messages.isEmpty) {
224224
/* nextMessageToBeUsed = nextMessageToBeUsed % messages.size */
225-
/* val message = messages(nextMessageToBeUsed)*/
225+
/* val message = messages(nextMessageToBeUsed) */
226226
val message = messages.dequeue
227227
val chunk = message.getChunkForSending(defaultChunkSize)
228228
if (chunk.isDefined) {
@@ -262,7 +262,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
262262

263263
val currentBuffers = new ArrayBuffer[ByteBuffer]()
264264

265-
/* channel.socket.setSendBufferSize(256 * 1024)*/
265+
/* channel.socket.setSendBufferSize(256 * 1024) */
266266

267267
override def getRemoteAddress() = address
268268

@@ -355,7 +355,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
355355
}
356356
case None => {
357357
// changeConnectionKeyInterest(0)
358-
/* key.interestOps(0)*/
358+
/* key.interestOps(0) */
359359
return false
360360
}
361361
}
@@ -540,10 +540,10 @@ private[spark] class ReceivingConnection(
540540
return false
541541
}
542542

543-
/* logDebug("Read " + bytesRead + " bytes for the buffer")*/
543+
/* logDebug("Read " + bytesRead + " bytes for the buffer") */
544544

545545
if (currentChunk.buffer.remaining == 0) {
546-
/* println("Filled buffer at " + System.currentTimeMillis)*/
546+
/* println("Filled buffer at " + System.currentTimeMillis) */
547547
val bufferMessage = inbox.getMessageForChunk(currentChunk).get
548548
if (bufferMessage.isCompletelyReceived) {
549549
bufferMessage.flip

core/src/main/scala/org/apache/spark/network/ConnectionManager.scala

+7-7
Original file line numberDiff line numberDiff line change
@@ -505,7 +505,7 @@ private[spark] class ConnectionManager(port: Int, conf: SparkConf,
505505
}
506506
}
507507
handleMessageExecutor.execute(runnable)
508-
/* handleMessage(connection, message)*/
508+
/* handleMessage(connection, message) */
509509
}
510510

511511
private def handleClientAuthentication(
@@ -859,14 +859,14 @@ private[spark] object ConnectionManager {
859859
None
860860
})
861861

862-
/* testSequentialSending(manager)*/
863-
/* System.gc()*/
862+
/* testSequentialSending(manager) */
863+
/* System.gc() */
864864

865-
/* testParallelSending(manager)*/
866-
/* System.gc()*/
865+
/* testParallelSending(manager) */
866+
/* System.gc() */
867867

868-
/* testParallelDecreasingSending(manager)*/
869-
/* System.gc()*/
868+
/* testParallelDecreasingSending(manager) */
869+
/* System.gc() */
870870

871871
testContinuousSending(manager)
872872
System.gc()

core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,8 @@ private[spark] object ConnectionManagerTest extends Logging{
4747
val slaves = slavesFile.mkString.split("\n")
4848
slavesFile.close()
4949

50-
/* println("Slaves")*/
51-
/* slaves.foreach(println)*/
50+
/* println("Slaves") */
51+
/* slaves.foreach(println) */
5252
val tasknum = if (args.length > 2) args(2).toInt else slaves.length
5353
val size = ( if (args.length > 3) (args(3).toInt) else 10 ) * 1024 * 1024
5454
val count = if (args.length > 4) args(4).toInt else 3

core/src/main/scala/org/apache/spark/network/ReceiverTest.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ private[spark] object ReceiverTest {
2727
println("Started connection manager with id = " + manager.id)
2828

2929
manager.onReceiveMessage((msg: Message, id: ConnectionManagerId) => {
30-
/* println("Received [" + msg + "] from [" + id + "] at " + System.currentTimeMillis)*/
30+
/* println("Received [" + msg + "] from [" + id + "] at " + System.currentTimeMillis) */
3131
val buffer = ByteBuffer.wrap("response".getBytes)
3232
Some(Message.createBufferMessage(buffer, msg.id))
3333
})

core/src/main/scala/org/apache/spark/network/SenderTest.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ private[spark] object SenderTest {
5050
(0 until count).foreach(i => {
5151
val dataMessage = Message.createBufferMessage(buffer.duplicate)
5252
val startTime = System.currentTimeMillis
53-
/* println("Started timer at " + startTime)*/
53+
/* println("Started timer at " + startTime) */
5454
val responseStr = manager.sendMessageReliablySync(targetConnectionManagerId, dataMessage)
5555
.map { response =>
5656
val buffer = response.asInstanceOf[BufferMessage].buffers(0)

core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import org.apache.spark.scheduler.Schedulable
2525
import org.apache.spark.ui.Page._
2626
import org.apache.spark.ui.UIUtils
2727

28-
/** Page showing list of all ongoing and recently finished stages and pools*/
28+
/** Page showing list of all ongoing and recently finished stages and pools */
2929
private[ui] class IndexPage(parent: JobProgressUI) {
3030
private val appName = parent.appName
3131
private val basePath = parent.basePath

core/src/main/scala/org/apache/spark/util/MutablePair.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,8 @@ package org.apache.spark.util
2424
* @param _1 Element 1 of this MutablePair
2525
* @param _2 Element 2 of this MutablePair
2626
*/
27-
case class MutablePair[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef*/) T1,
28-
@specialized(Int, Long, Double, Char, Boolean/* , AnyRef*/) T2]
27+
case class MutablePair[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef */) T1,
28+
@specialized(Int, Long, Double, Char, Boolean/* , AnyRef */) T2]
2929
(var _1: T1, var _2: T2)
3030
extends Product2[T1, T2]
3131
{

examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import java.net.ServerSocket
2121
import java.io.PrintWriter
2222
import util.Random
2323

24-
/** Represents a page view on a website with associated dimension data.*/
24+
/** Represents a page view on a website with associated dimension data. */
2525
class PageView(val url : String, val status : Int, val zipCode : Int, val userID : Int)
2626
extends Serializable {
2727
override def toString() : String = {

external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ class FlumeEventServer(receiver : FlumeReceiver) extends AvroSourceProtocol {
127127
}
128128

129129
/** A NetworkReceiver which listens for events using the
130-
* Flume Avro interface.*/
130+
* Flume Avro interface. */
131131
private[streaming]
132132
class FlumeReceiver(
133133
host: String,

graphx/src/main/scala/org/apache/spark/graphx/impl/MessageToPartition.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ class VertexBroadcastMsg[@specialized(Int, Long, Double, Boolean) T](
4545
* @param data value to send
4646
*/
4747
private[graphx]
48-
class MessageToPartition[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef*/) T](
48+
class MessageToPartition[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef */) T](
4949
@transient var partition: PartitionID,
5050
var data: T)
5151
extends Product2[PartitionID, T] with Serializable {

project/project/SparkPluginBuild.scala

+1-2
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,7 @@ object SparkPluginDef extends Build {
3434
version := sparkVersion,
3535
scalaVersion := "2.10.3",
3636
scalacOptions := Seq("-unchecked", "-deprecation"),
37-
libraryDependencies ++= Dependencies.scalaStyle,
38-
sbtPlugin := true
37+
libraryDependencies ++= Dependencies.scalaStyle
3938
)
4039

4140
object Dependencies {

project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStyleCheck.scala renamed to project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStartChecker.scala

+5-3
Original file line numberDiff line numberDiff line change
@@ -25,13 +25,15 @@ import scalariform.lexer.{MultiLineComment, ScalaDocComment, SingleLineComment,
2525
import scalariform.parser.CompilationUnit
2626

2727
class SparkSpaceAfterCommentStartChecker extends ScalariformChecker {
28-
val errorKey: String = "insert.a.single.space.after.comment.start"
28+
val errorKey: String = "insert.a.single.space.after.comment.start.and.before.end"
2929

3030
private def multiLineCommentRegex(comment: Token) =
31-
Pattern.compile( """/\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches()
31+
Pattern.compile( """/\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches() ||
32+
Pattern.compile( """/\*.*\S\*/""", Pattern.DOTALL).matcher(comment.text.trim).matches()
3233

3334
private def scalaDocPatternRegex(comment: Token) =
34-
Pattern.compile( """/\*\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches()
35+
Pattern.compile( """/\*\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches() ||
36+
Pattern.compile( """/\*\*.*\S\*/""", Pattern.DOTALL).matcher(comment.text.trim).matches()
3537

3638
private def singleLineCommentRegex(comment: Token): Boolean =
3739
comment.text.trim.matches( """//\S+.*""") && !comment.text.trim.matches( """///+""")

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala

+3-3
Original file line numberDiff line numberDiff line change
@@ -57,19 +57,19 @@ import org.apache.spark.sql.catalyst.types._
5757
case class ParquetRelation(tableName: String, path: String)
5858
extends BaseRelation with MultiInstanceRelation {
5959

60-
/** Schema derived from ParquetFile **/
60+
/** Schema derived from ParquetFile */
6161
def parquetSchema: MessageType =
6262
ParquetTypesConverter
6363
.readMetaData(new Path(path))
6464
.getFileMetaData
6565
.getSchema
6666

67-
/** Attributes **/
67+
/** Attributes */
6868
val attributes =
6969
ParquetTypesConverter
7070
.convertToAttributes(parquetSchema)
7171

72-
/** Output **/
72+
/** Output */
7373
override val output = attributes
7474

7575
// Parquet files have no concepts of keys, therefore no Partitioner

0 commit comments

Comments
 (0)