Skip to content

Commit

Permalink
Added logging for #48, "HAR seems to contain HTTP status codes which …
Browse files Browse the repository at this point in the history
…cannot be converted to Int".
  • Loading branch information
manuelkiessling committed Mar 1, 2017
1 parent 4e538cb commit 9d309f1
Show file tree
Hide file tree
Showing 2 changed files with 103 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import java.text.SimpleDateFormat
import java.util.Calendar

import com.datastax.spark.connector._
import org.apache.log4j.LogManager
import org.apache.spark._
import org.apache.spark.rdd.RDD
import org.json4s._
Expand Down Expand Up @@ -48,7 +49,18 @@ object HarAnalyzer {
implicit val formats = org.json4s.DefaultFormats
val requestCounter = for {
entry <- entries
if ((entry \ "response" \ "status").extract[Int] >= status && (entry \ "response" \ "status").extract[Int] < status + 100)
if {
try {
val entryStatus = (entry \ "response" \ "status").extract[Int]
entryStatus >= status && entryStatus < status + 100
} catch {
case e: Exception => {
val log = LogManager.getRootLogger
log.warn(s"Problem: '${e.getMessage}' while trying to get the status code at 'response -> status' within ${entry.toString()}")
false
}
}
}
} yield 1
if (requestCounter.isEmpty) 0 else requestCounter.reduce(_ + _)
// This is a "normal" Scala reduce, not an RDD reduce.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import org.json4s.jackson.JsonMethods._
import org.scalatest.{BeforeAndAfter, FunSpec, Matchers}

object FixtureGenerator {
def getTestresultsRDD(sc: SparkContext) = {
def getValidTestresultsRDD(sc: SparkContext) = {
val format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
val datetimeRun1 = format.parse("2015-01-02 23:59:59")
val datetimeRun2 = format.parse("2015-11-18 00:00:00")
Expand Down Expand Up @@ -68,6 +68,67 @@ object FixtureGenerator {
)
))
}

def getInvalidTestresultsRDD(sc: SparkContext) = {
val format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
val datetimeRun1 = format.parse("2015-01-02 23:59:59")
val datetimeRun2 = format.parse("2015-11-18 00:00:00")
sc.parallelize(Seq(
Testresult(
testcaseId = "testcaseId1",
testresultId = "testresultId1",
datetimeRun = datetimeRun1,
har = parse(
"""
|{
| "log": {
| "entries": [
| {
| "response": {
| "status": "foo"
| },
| "time": 10
| },
| {
| "response": {
| "status": 400
| },
| "time": 15
| }
| ]
| }
|}
""".stripMargin, false)
),
Testresult(
testcaseId = "testcaseId1",
testresultId = "testresultId2",
datetimeRun = datetimeRun2,
har = parse(
"""
{
| "log": {
| "entries": [
| {
| "response": {
| "status": 400
| },
| "time": 16
| },
| {
| "response": {
| "status": 400
| },
| "time": 4
| }
| ]
| }
|}
""".stripMargin, false)

)
))
}
}

class SparkExampleSpec extends FunSpec with BeforeAndAfter with Matchers {
Expand All @@ -92,8 +153,9 @@ class SparkExampleSpec extends FunSpec with BeforeAndAfter with Matchers {
}

describe("The HarAnalyzer") {
it("should extract statistics from HARs") {
val testresultsRDD = FixtureGenerator.getTestresultsRDD(sc)

it("should extract statistics from valid HARs") {
val testresultsRDD = FixtureGenerator.getValidTestresultsRDD(sc)
val statisticsRDD = HarAnalyzer.calculateRequestStatistics(testresultsRDD)
val statistics = statisticsRDD.collect()

Expand All @@ -115,6 +177,31 @@ class SparkExampleSpec extends FunSpec with BeforeAndAfter with Matchers {
statistics(1).numberOfRequestsWithStatus400 should be(2)
statistics(1).totalRequestTime should be(20)
}

it("should gracefully handle invalid HARs") {
val testresultsRDD = FixtureGenerator.getInvalidTestresultsRDD(sc)
val statisticsRDD = HarAnalyzer.calculateRequestStatistics(testresultsRDD)
val statistics = statisticsRDD.collect()

statistics(0).testcaseId should be("testcaseId1")
statistics(0).dayBucket should be("2015-01-02")
statistics(0).testresultId should be("testresultId1")
statistics(0).testresultDatetimeRun.toString.substring(0, 20) should be("Fri Jan 02 23:59:59 ") // @TODO: Stupid hack because we do not yet store the timezone
statistics(0).testresultDatetimeRun.toString.substring(24) should be("2015")
statistics(0).numberOfRequestsWithStatus200 should be(0)
statistics(0).numberOfRequestsWithStatus400 should be(1)
statistics(0).totalRequestTime should be(25)

statistics(1).testcaseId should be("testcaseId1")
statistics(1).dayBucket should be("2015-11-18")
statistics(1).testresultId should be("testresultId2")
statistics(1).testresultDatetimeRun.toString.substring(0, 20) should be("Wed Nov 18 00:00:00 ")
statistics(1).testresultDatetimeRun.toString.substring(24) should be("2015")
statistics(1).numberOfRequestsWithStatus200 should be(0)
statistics(1).numberOfRequestsWithStatus400 should be(2)
statistics(1).totalRequestTime should be(20)
}

}

}

0 comments on commit 9d309f1

Please sign in to comment.