diff --git a/tools/README.md b/tools/README.md index 771c21393a6..8613bbb4781 100644 --- a/tools/README.md +++ b/tools/README.md @@ -89,7 +89,7 @@ Filter event logs to be processed. 10 newest file with filenames containing "loc ```bash $SPARK_HOME/bin/spark-submit --class com.nvidia.spark.rapids.tool.profiling.ProfileMain \ rapids-4-spark-tools_2.12-.jar \ --m "local" -f "10-newest" \ +-m "local" -f "10-newest-filesystem" \ /directory/with/eventlogs/ ``` @@ -196,12 +196,31 @@ Usage: java -cp rapids-4-spark-tools_2.12-.jar:$SPARK_HOME/jars/* of a selection criterion. i.e Select all event logs except the ones which have application name as the input string. - -f, --filter-criteria Filter newest or oldest N eventlogs for - processing.eg: 100-newest (for processing - newest 100 event logs). eg: 100-oldest (for - processing oldest 100 event logs) + -f, --filter-criteria Filter newest or oldest N eventlogs based on application start + timestamp, unique application name or filesystem + timestamp. Filesystem based filtering happens before any + application based filtering. + For application based filtering, the order in which filters are + applied is: application-name, start-app-time, filter-criteria. + Application based filter-criteria are: + 100-newest (for processing newest 100 event logs based on + timestamp of the application inside the eventlog i.e application + start time) + 100-oldest (for processing oldest 100 event logs based on + timestamp of the application inside the eventlog i.e application + start time) + 100-newest-per-app-name (select at most 100 newest log files for + each unique application name) + 100-oldest-per-app-name (select at most 100 oldest log files for + each unique application name) + Filesystem based filter criteria are: + 100-newest-filesystem (for processing newest 100 event + logs based on filesystem timestamp). + 100-oldest-filesystem (for processing oldest 100 event logs + based on filesystem timestamp). -m, --match-event-logs Filter event logs whose filenames contain the - input string + input string. Filesystem based filtering happens before + any application based filtering. -n, --num-output-rows Number of output rows in the summary report. Default is 1000. --num-threads Number of thread to use for parallel @@ -224,6 +243,12 @@ Usage: java -cp rapids-4-spark-tools_2.12-.jar:$SPARK_HOME/jars/* --report-read-schema Whether to output the read formats and datatypes to the CSV file. This can be very long. Default is false. + -s, --start-app-time Filter event logs whose application start + occurred within the past specified time + period. Valid time periods are + min(minute),h(hours),d(days),w(weeks),m(months). + If a period is not specified it defaults to + days. -t, --timeout Maximum time in seconds to wait for the event logs to be processed. Default is 24 hours (86400 seconds) and must be greater than 3 @@ -592,8 +617,8 @@ For usage see below: less than 10 applications). Default is false -f, --filter-criteria Filter newest or oldest N event logs for processing. Supported formats are: - To process 10 recent event logs: --filter-criteria "10-newest" - To process 10 oldest event logs: --filter-criteria "10-oldest" + To process 10 recent event logs: --filter-criteria "10-newest-timestamp" + To process 10 oldest event logs: --filter-criteria "10-oldest-timestamp" -g, --generate-dot Generate query visualizations in DOT format. Default is false -m, --match-event-logs Filter event logs filenames which contains the input string. diff --git a/tools/src/main/scala/com/nvidia/spark/rapids/tool/EventLogPathProcessor.scala b/tools/src/main/scala/com/nvidia/spark/rapids/tool/EventLogPathProcessor.scala index 3cf385d096d..dfdc5d301dd 100644 --- a/tools/src/main/scala/com/nvidia/spark/rapids/tool/EventLogPathProcessor.scala +++ b/tools/src/main/scala/com/nvidia/spark/rapids/tool/EventLogPathProcessor.scala @@ -170,7 +170,7 @@ object EventLogPathProcessor extends Logging { logsWithTimestamp.filterKeys(_.eventLog.getName.contains(strMatch)) }.getOrElse(logsWithTimestamp) - val filteredLogs = filterNLogs.map { filter => + val filteredLogs = if (filterNLogs.nonEmpty && !filterByAppCriteria(filterNLogs)) { val filteredInfo = filterNLogs.get.split("-") val numberofEventLogs = filteredInfo(0).toInt val criteria = filteredInfo(1) @@ -179,15 +179,21 @@ object EventLogPathProcessor extends Logging { } else if (criteria.equals("oldest")) { LinkedHashMap(matchedLogs.toSeq.sortWith(_._2 < _._2): _*) } else { - logError("Criteria should be either newest or oldest") + logError("Criteria should be either newest-filesystem or oldest-filesystem") Map.empty[EventLogInfo, Long] } matched.take(numberofEventLogs) - }.getOrElse(matchedLogs) - + } else { + matchedLogs + } filteredLogs.keys.toSeq } + def filterByAppCriteria(filterNLogs: Option[String]): Boolean = { + filterNLogs.get.endsWith("-oldest") || filterNLogs.get.endsWith("-newest") || + filterNLogs.get.endsWith("per-app-name") + } + def logApplicationInfo(app: ApplicationInfo) = { logInfo(s"============== ${app.appId} (index=${app.index}) ==============") } diff --git a/tools/src/main/scala/com/nvidia/spark/rapids/tool/profiling/ProfileArgs.scala b/tools/src/main/scala/com/nvidia/spark/rapids/tool/profiling/ProfileArgs.scala index b657bd44a88..2566a23d336 100644 --- a/tools/src/main/scala/com/nvidia/spark/rapids/tool/profiling/ProfileArgs.scala +++ b/tools/src/main/scala/com/nvidia/spark/rapids/tool/profiling/ProfileArgs.scala @@ -58,8 +58,8 @@ For usage see below: val filterCriteria: ScallopOption[String] = opt[String](required = false, descr = "Filter newest or oldest N eventlogs for processing." + - "eg: 100-newest (for processing newest 100 event logs). " + - "eg: 100-oldest (for processing oldest 100 event logs)") + "eg: 100-newest-filesystem (for processing newest 100 event logs). " + + "eg: 100-oldest-filesystem (for processing oldest 100 event logs)") val matchEventLogs: ScallopOption[String] = opt[String](required = false, descr = "Filter event logs whose filenames contain the input string") @@ -81,8 +81,10 @@ For usage see below: descr = "Write an SVG graph out for the full application timeline.") validate(filterCriteria) { - case crit if (crit.endsWith("-newest") || crit.endsWith("-oldest")) => Right(Unit) - case _ => Left("Error, the filter criteria must end with either -newest or -oldest") + case crit if (crit.endsWith("-newest-filesystem") || + crit.endsWith("-oldest-filesystem")) => Right(Unit) + case _ => Left("Error, the filter criteria must end with either -newest-filesystem " + + "or -oldest-filesystem") } verify() diff --git a/tools/src/main/scala/com/nvidia/spark/rapids/tool/qualification/QualificationArgs.scala b/tools/src/main/scala/com/nvidia/spark/rapids/tool/qualification/QualificationArgs.scala index b705cddd7b3..a712863b5ed 100644 --- a/tools/src/main/scala/com/nvidia/spark/rapids/tool/qualification/QualificationArgs.scala +++ b/tools/src/main/scala/com/nvidia/spark/rapids/tool/qualification/QualificationArgs.scala @@ -42,10 +42,25 @@ Usage: java -cp rapids-4-spark-tools_2.12-.jar:$SPARK_HOME/jars/* " eg: s3a:///eventlog1 /path/to/eventlog2") val filterCriteria: ScallopOption[String] = opt[String](required = false, - descr = "Filter newest or oldest N eventlogs for processing." + - "eg: 100-newest (for processing newest 100 event logs). " + - "eg: 100-oldest (for processing oldest 100 event logs). Filesystem " + - "based filtering happens before any application based filtering.") + descr = "Filter newest or oldest N eventlogs based on application start timestamp, " + + "unique application name or filesystem timestamp. Filesystem based filtering " + + "happens before any application based filtering." + + "For application based filtering, the order in which filters are" + + "applied is: application-name, start-app-time, filter-criteria." + + "Application based filter-criteria are:" + + "100-newest (for processing newest 100 event logs based on timestamp inside" + + "the eventlog) i.e application start time) " + + "100-oldest (for processing oldest 100 event logs based on timestamp inside" + + "the eventlog) i.e application start time) " + + "100-newest-per-app-name (select at most 100 newest log files for each unique " + + "application name) " + + "100-oldest-per-app-name (select at most 100 oldest log files for each unique " + + "application name)" + + "Filesystem based filter criteria are:" + + "100-newest-filesystem (for processing newest 100 event logs based on filesystem " + + "timestamp). " + + "100-oldest-filesystem (for processing oldest 100 event logsbased on filesystem " + + "timestamp).") val applicationName: ScallopOption[String] = opt[String](required = false, descr = "Filter event logs whose application name matches " + @@ -97,8 +112,11 @@ Usage: java -cp rapids-4-spark-tools_2.12-.jar:$SPARK_HOME/jars/* } validate(filterCriteria) { - case crit if (crit.endsWith("-newest") || crit.endsWith("-oldest")) => Right(Unit) - case _ => Left("Error, the filter criteria must end with either -newest or -oldest") + case crit if (crit.endsWith("-newest-filesystem") || crit.endsWith("-oldest-filesystem") + || crit.endsWith("-newest-per-app-name") || crit.endsWith("-oldest-per-app-name") + || crit.endsWith("-oldest") || crit.endsWith("-newest")) => Right(Unit) + case _ => Left("Error, the filter criteria must end with -newest, -oldest, " + + "-newest-filesystem, -oldest-filesystem, -newest-per-app-name or -oldest-per-app-name") } validate(timeout) { diff --git a/tools/src/main/scala/com/nvidia/spark/rapids/tool/qualification/QualificationMain.scala b/tools/src/main/scala/com/nvidia/spark/rapids/tool/qualification/QualificationMain.scala index 61d44c6747a..7428101ac7c 100644 --- a/tools/src/main/scala/com/nvidia/spark/rapids/tool/qualification/QualificationMain.scala +++ b/tools/src/main/scala/com/nvidia/spark/rapids/tool/qualification/QualificationMain.scala @@ -94,6 +94,9 @@ object QualificationMain extends Logging { } def argsContainsAppFilters(appArgs: QualificationArgs): Boolean = { - appArgs.applicationName.isSupplied || appArgs.startAppTime.isSupplied + val filterCriteria = appArgs.filterCriteria.toOption + appArgs.applicationName.isSupplied || appArgs.startAppTime.isSupplied || + (filterCriteria.isDefined && (filterCriteria.get.endsWith("-newest") || + filterCriteria.get.endsWith("-oldest") || filterCriteria.get.endsWith("-per-app-name"))) } } diff --git a/tools/src/main/scala/org/apache/spark/sql/rapids/tool/AppFilterImpl.scala b/tools/src/main/scala/org/apache/spark/sql/rapids/tool/AppFilterImpl.scala index f6f5887bd48..6b76bf3a462 100644 --- a/tools/src/main/scala/org/apache/spark/sql/rapids/tool/AppFilterImpl.scala +++ b/tools/src/main/scala/org/apache/spark/sql/rapids/tool/AppFilterImpl.scala @@ -73,6 +73,8 @@ class AppFilterImpl( val apps = appsForFiltering.asScala val filterAppName = appArgs.applicationName.getOrElse("") + val filterCriteria = appArgs.filterCriteria.getOrElse("") + val appNameFiltered = if (appArgs.applicationName.isSupplied && filterAppName.nonEmpty) { val filtered = if (filterAppName.startsWith(NEGATE)) { // remove ~ before passing it into the containsAppName function @@ -98,7 +100,38 @@ class AppFilterImpl( } else { appNameFiltered } - appTimeFiltered.map(_.eventlog).toSeq + val appCriteriaFiltered = if (appArgs.filterCriteria.isSupplied && filterCriteria.nonEmpty) { + if (filterCriteria.endsWith("-newest") || filterCriteria.endsWith("-oldest")) { + val filteredInfo = filterCriteria.split("-") + val numberofEventLogs = filteredInfo(0).toInt + val criteria = filteredInfo(1) + val filtered = if (criteria.equals("oldest")) { + appTimeFiltered.toSeq.sortBy(_.appInfo.get.startTime).take(numberofEventLogs) + } else { + appTimeFiltered.toSeq.sortBy(_.appInfo.get.startTime).reverse.take(numberofEventLogs) + } + filtered + } else if (filterCriteria.endsWith("-per-app-name")) { + val distinctAppNameMap = appTimeFiltered.groupBy(_.appInfo.get.appName) + val filteredInfo = filterCriteria.split("-") + val numberofEventLogs = filteredInfo(0).toInt + val criteria = filteredInfo(1) + val filtered = distinctAppNameMap.map { case (name, apps) => + val sortedApps = if (criteria.equals("oldest")) { + apps.toSeq.sortBy(_.appInfo.get.startTime).take(numberofEventLogs) + } else { + apps.toSeq.sortBy(_.appInfo.get.startTime).reverse.take(numberofEventLogs) + } + (name, sortedApps) + } + filtered.values.flatMap(x => x) + } else { + appTimeFiltered + } + } else { + appTimeFiltered + } + appCriteriaFiltered.map(_.eventlog).toSeq } private def containsAppName(app: AppFilterReturnParameters, filterAppName: String): Boolean = { diff --git a/tools/src/test/scala/com/nvidia/spark/rapids/tool/profiling/ApplicationInfoSuite.scala b/tools/src/test/scala/com/nvidia/spark/rapids/tool/profiling/ApplicationInfoSuite.scala index b26b6cf26b7..cbe0e2f5f50 100644 --- a/tools/src/test/scala/com/nvidia/spark/rapids/tool/profiling/ApplicationInfoSuite.scala +++ b/tools/src/test/scala/com/nvidia/spark/rapids/tool/profiling/ApplicationInfoSuite.scala @@ -41,6 +41,7 @@ class ApplicationInfoSuite extends FunSuite with Logging { private val expRoot = ToolTestUtils.getTestResourceFile("ProfilingExpectations") private val logDir = ToolTestUtils.getTestResourcePath("spark-events-profiling") + private val qualLogDir = ToolTestUtils.getTestResourcePath("spark-events-qualification") test("test single event") { testSqlCompression() @@ -373,9 +374,9 @@ class ApplicationInfoSuite extends FunSuite with Logging { val appArgs = new ProfileArgs(Array( "--match-event-logs", matchFileName, - "src/test/resources/spark-events-qualification/udf_func_eventlog", - "src/test/resources/spark-events-qualification/udf_dataset_eventlog", - "src/test/resources/spark-events-qualification/dataset_eventlog" + s"$qualLogDir/udf_func_eventlog", + s"$qualLogDir/udf_dataset_eventlog", + s"$qualLogDir/dataset_eventlog" )) val result = EventLogPathProcessor.processAllPaths(appArgs.filterCriteria.toOption, @@ -399,7 +400,7 @@ class ApplicationInfoSuite extends FunSuite with Logging { tempFile2.setLastModified(12324567) // oldest file tempFile3.setLastModified(34567891) // second newest file tempFile4.setLastModified(23456789) - val filterNew = "2-newest" + val filterNew = "2-newest-filesystem" val appArgs = new ProfileArgs(Array( "--filter-criteria", filterNew, @@ -440,7 +441,7 @@ class ApplicationInfoSuite extends FunSuite with Logging { tempFile3.setLastModified(34567891) // second newest file tempFile4.setLastModified(23456789) - val filterOld = "3-oldest" + val filterOld = "3-oldest-filesystem" val matchFileName = "temp" val appArgs = new ProfileArgs(Array( "--filter-criteria", diff --git a/tools/src/test/scala/com/nvidia/spark/rapids/tool/qualification/AppFilterSuite.scala b/tools/src/test/scala/com/nvidia/spark/rapids/tool/qualification/AppFilterSuite.scala index dcabf5e41a4..35fc7d4ea55 100644 --- a/tools/src/test/scala/com/nvidia/spark/rapids/tool/qualification/AppFilterSuite.scala +++ b/tools/src/test/scala/com/nvidia/spark/rapids/tool/qualification/AppFilterSuite.scala @@ -221,12 +221,12 @@ class AppFilterSuite extends FunSuite { TestEventLogFSAndAppNameInfo("nds86", msDaysAgo(4), 1), TestEventLogFSAndAppNameInfo("nds86", msWeeksAgo(2), 2)) - test("app name exact and fs 10-newest") { - testFileSystemTimeAndStart(appsWithFsToTest, "10-newest", "nds86", 2) + test("app name exact and fs 10-newest-filesystem") { + testFileSystemTimeAndStart(appsWithFsToTest, "10-newest-filesystem", "nds86", 2) } - test("app name exact and 2-oldest") { - testFileSystemTimeAndStart(appsWithFsToTest, "2-oldest", "ndsweeks2", 1) + test("app name exact and 2-oldest-filesystem") { + testFileSystemTimeAndStart(appsWithFsToTest, "2-oldest-filesystem", "ndsweeks2", 1) } private def testFileSystemTimeAndStart(apps: Array[TestEventLogFSAndAppNameInfo], @@ -272,34 +272,64 @@ class AppFilterSuite extends FunSuite { TestEventLogFSAndAppInfo("app-nds86-1", msDaysAgo(3), "nds86", msDaysAgo(4), 1), TestEventLogFSAndAppInfo("app-nds86-2", msDaysAgo(13), "nds86", msWeeksAgo(2), 2)) - test("full app name exact and fs 10-newest 6 days") { - testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "10-newest", "nds86", "nds86", - "6d", 1) + test("full app name exact and fs 10-newest-filesystem 6 days") { + testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "10-newest-filesystem", + "nds86", "nds86", "6d", 1) } - test("full app name exact and 2-oldest no match from app start") { - testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "2-oldest", "ndsweeks2", "nds", - "6d", 0) + test("full app name exact and 2-oldest-filesystem no match from app start") { + testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "2-oldest-filesystem", + "ndsweeks2", "nds", "6d", 0) } - test("full app name exact and 2-oldest") { - testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "2-oldest", "ndsweeks2", "nds", - "3w", 1) + test("full app name exact and 2-oldest-filesystem") { + testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "2-oldest-filesystem", + "ndsweeks2", "nds", "3w", 1) } - test("full app name exact and 2-oldest no match from filename") { - testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "2-oldest", "nds", "nomatch", - "3w", 0) + test("full app name exact and 2-oldest-filesystem no match from filename") { + testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "2-oldest-filesystem", + "nds", "nomatch", "3w", 0) } - test("full 2-oldest no match from app name") { - testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "2-oldest", "nomatch", "nds", - "3w", 0) + test("full 2-oldest-filesystem no match from app name") { + testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "2-oldest-filesystem", + "nomatch", "nds", "3w", 0) } - test("full app name exact and 10-oldest and 3w") { - testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "10-oldest", "nds86", "app-nds86", - "3w", 2) + test("full app name exact and 10-oldest-filesystem and 3w") { + testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "10-oldest-filesystem", + "nds86", "app-nds86", "3w", 2) + } + + test("app name and 2-oldest by app time no match from filename") { + testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "2-oldest", + "nds", "nomatch", "3w", 0) + } + + test("app name and 2-oldest by app time") { + testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "2-oldest", + "nds", "nds", "3w", 2) + } + + test("app name and 2-newest by app time") { + testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "2-newest", + "ndsweeks2", "nds", "3w", 1) + } + + test("app name and 1-newest-per-app-name") { + testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "1-newest-per-app-name", + "nds", "nds", "3w", 3) + } + + test("app name and 10-newest-per-app-name") { + testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "10-oldest-per-app-name", + "nds", "nds", "3w", 4) + } + + test("app name and 1-newest-per-app-name no match from filename") { + testFileSystemTimeAndStartAndAppFull(appsFullWithFsToTest, "1-newest-per-app-name", "nds", + "nomatch", "3w", 0) } private def testFileSystemTimeAndStartAndAppFull(apps: Array[TestEventLogFSAndAppInfo], @@ -339,4 +369,76 @@ class AppFilterSuite extends FunSuite { } } } + + private val appsWithAppNameCriteriaToTest = Array( + TestEventLogFSAndAppInfo("app-ndshours18", msHoursAgo(16), "ndshours18", msHoursAgo(18), 1), + TestEventLogFSAndAppInfo("app-ndsweeks-1", msWeeksAgo(1), "ndsweeks", msWeeksAgo(1), 1), + TestEventLogFSAndAppInfo("app-ndsweeks-2", msWeeksAgo(2), "ndsweeks", msWeeksAgo(2), 2), + TestEventLogFSAndAppInfo("app-nds86-1", msDaysAgo(3), "nds86", msDaysAgo(4), 1), + TestEventLogFSAndAppInfo("app-nds86-2", msDaysAgo(13), "nds86", msWeeksAgo(2), 2), + TestEventLogFSAndAppInfo("app-nds86-3", msDaysAgo(18), "nds86", msWeeksAgo(3), 3)) + + test("standalone 1-oldest-per-app-name") { + val expected = Array(("ndshours18", "local-162610430031"), ("ndsweeks", "local-162610430032"), + ("nds86", "local-162610430033")) + testAppFilterCriteriaAndPerAppName(appsWithAppNameCriteriaToTest, "1-oldest-per-app-name", + 3, expected) + } + + test("standalone 2-newest-per-app-name") { + val expected = Array(("ndshours18", "local-162610430031"), ("ndsweeks", "local-162610430031"), + ("ndsweeks", "local-162610430032"), ("nds86", "local-162610430031"), + ("nds86", "local-162610430032")) + testAppFilterCriteriaAndPerAppName(appsWithAppNameCriteriaToTest, "2-newest-per-app-name", + 5, expected) + } + + test("standalone 2-newest based on app time") { + val expected = Array(("ndshours18", "local-162610430031"), ("nds86", "local-162610430031")) + testAppFilterCriteriaAndPerAppName(appsWithAppNameCriteriaToTest, + "2-newest", 2, expected) + } + + test("standalone 10-oldest based on app time") { + val expected = Array(("nds86", "local-162610430031"), ("nds86", "local-162610430032"), + ("nds86", "local-162610430033"), ("ndsweeks", "local-162610430031"), + ("ndsweeks", "local-162610430032"), ("ndshours18", "local-162610430031")) + testAppFilterCriteriaAndPerAppName(appsWithAppNameCriteriaToTest, "10-oldest", 6, expected) + } + + private def testAppFilterCriteriaAndPerAppName( + apps: Array[TestEventLogFSAndAppInfo], + filterCriteria: String, expectedFilterSize: Int, + expectedAppName: Array[(String, String)]): Unit = { + TrampolineUtil.withTempDir { outpath => + TrampolineUtil.withTempDir { tmpEventLogDir => + + val fileNames = apps.map { app => + val elogFile = Paths.get(tmpEventLogDir.getAbsolutePath, app.fileName) + // scalastyle:off line.size.limit + val supText = + s"""{"Event":"SparkListenerLogStart","Spark Version":"3.1.1"} + |{"Event":"SparkListenerApplicationStart","App Name":"${app.appName}","App ID":"local-16261043003${app.uniqueId}","Timestamp":${app.appTime},"User":"user1"}""".stripMargin + // scalastyle:on line.size.limit + Files.write(elogFile, supText.getBytes(StandardCharsets.UTF_8)) + new File(elogFile.toString).setLastModified(app.fsTime) + elogFile.toString + } + + val allArgs = Array( + "--output-directory", + outpath.getAbsolutePath(), + "--filter-criteria", + filterCriteria + ) + val appArgs = new QualificationArgs(allArgs ++ fileNames) + val (exit, appSum) = QualificationMain.mainInternal(appArgs) + val resultAppName = appSum.map(x => (x.appName, x.appId)).toArray + + assert(exit == 0) + assert(appSum.size == expectedFilterSize) + assert(resultAppName.sorted.sameElements(expectedAppName.sorted)) + } + } + } }