diff --git a/build_inside_container.sh b/build_inside_container.sh index 34e09289..0ef1da30 100755 --- a/build_inside_container.sh +++ b/build_inside_container.sh @@ -27,7 +27,7 @@ autoreconf --install # FLags to print compiler warnings DEBUG_CFLAGS="-Wall -Werror -Wextra" -export CFLAGS=" ${DEBUG_CFLAGS} -I${INSTALL_DIR}/include/rtmessage -I${INSTALL_DIR}/include/msgpack -I${INSTALL_DIR}/include/rbus -I${INSTALL_DIR}/include -I/usr/include/glib-2.0 -I/usr/lib/x86_64-linux-gnu/glib-2.0/include -I/usr/local/include -DFEATURE_SUPPORT_WEBCONFIG -DRDK_LOGGER" +export CFLAGS=" ${DEBUG_CFLAGS} -I${INSTALL_DIR}/include/rtmessage -I${INSTALL_DIR}/include/msgpack -I${INSTALL_DIR}/include/rbus -I${INSTALL_DIR}/include -I/usr/include/glib-2.0 -I/usr/lib/x86_64-linux-gnu/glib-2.0/include -I/usr/local/include -DFEATURE_SUPPORT_WEBCONFIG -DRDK_LOGGER -DPERSIST_LOG_MON_REF" export LDFLAGS="-L/usr/lib/x86_64-linux-gnu -lglib-2.0" diff --git a/source/dcautil/dca.c b/source/dcautil/dca.c index f217a4a0..90c91a3b 100644 --- a/source/dcautil/dca.c +++ b/source/dcautil/dca.c @@ -30,7 +30,7 @@ #include #include #include - +#include #include #include "dcautil.h" @@ -98,14 +98,15 @@ static const char *strnstr(const char *haystack, const char *needle, size_t len) for (size_t i = 0; i + needle_len <= len; i++) { - if (memcmp(haystack + i, needle, needle_len) == 0) - { - return haystack + i; - } if (haystack[i] == '\0') { break; } + if (memcmp(haystack + i, needle, needle_len) == 0) + { + return haystack + i; + } + } return NULL; } @@ -144,10 +145,7 @@ int processTopPattern(char* profileName, Vector* topMarkerList, Vector* out_gre size_t var = 0; size_t vCount = Vector_Size(topMarkerList); T2Debug("topMarkerList for profile %s is of count = %lu \n", profileName, (unsigned long )vCount); - // Get logfile -> seek value map associated with the profile - // We are getting the exec count directly from the profileExecCounter parameter - //int profileExecCounter = gsProfile->execCounter; char* filename = NULL; for (var = 0; var < vCount; ++var) @@ -159,6 +157,10 @@ int processTopPattern(char* profileName, Vector* topMarkerList, Vector* out_gre } int tmp_skip_interval, is_skip_param; tmp_skip_interval = grepMarkerObj->skipFreq; + if(tmp_skip_interval <= 0) + { + tmp_skip_interval = 0; + } is_skip_param = (profileExecCounter % (tmp_skip_interval + 1) == 0) ? 0 : 1; if (is_skip_param != 0) { @@ -193,6 +195,10 @@ int processTopPattern(char* profileName, Vector* topMarkerList, Vector* out_gre // If the skip frequency is set, skip the marker processing for this interval int tmp_skip_interval, is_skip_param; tmp_skip_interval = grepMarkerObj->skipFreq; + if(tmp_skip_interval <= 0) + { + tmp_skip_interval = 0; + } is_skip_param = (profileExecCounter % (tmp_skip_interval + 1) == 0) ? 0 : 1; if (is_skip_param != 0) { @@ -509,7 +515,6 @@ static char* getAbsolutePatternMatch(FileDescriptor* fileDescriptor, const char* static int processPatternWithOptimizedFunction(const GrepMarker* marker, Vector* out_grepResultList, FileDescriptor* filedescriptor) { // Sanitize the input - const char* memmmapped_data_cf = filedescriptor->cfaddr; if (!marker || !out_grepResultList || !memmmapped_data_cf) { @@ -547,7 +552,6 @@ static int processPatternWithOptimizedFunction(const GrepMarker* marker, Vector* { // Get the last occurrence of the pattern in the memory-mapped data last_found = getAbsolutePatternMatch(filedescriptor, pattern); - // TODO : If trimParameter is true, trim the pattern before adding to the result list if (last_found) { // If a match is found, process it accordingly @@ -598,7 +602,6 @@ static int getLogFileDescriptor(GrepSeekProfile* gsProfile, const char* logPath, return -1; } - // Calculate the file size struct stat sb; if (fstat(fd, &sb) == -1) { @@ -607,7 +610,6 @@ static int getLogFileDescriptor(GrepSeekProfile* gsProfile, const char* logPath, return -1; } - // Check if the file size is 0 if (sb.st_size == 0) { T2Error("The size of the logfile is 0 for %s\n", logFile); @@ -665,7 +667,6 @@ static int getRotatedLogFileDescriptor(const char* logPath, const char* logFile) return -1; } - // Calculate the file size struct stat rb; if (fstat(rd, &rb) == -1) { @@ -692,10 +693,12 @@ static void freeFileDescriptor(FileDescriptor* fileDescriptor) if(fileDescriptor->baseAddr) { munmap(fileDescriptor->baseAddr, fileDescriptor->cf_file_size); + fileDescriptor->baseAddr = NULL; } if(fileDescriptor->rotatedAddr) { munmap(fileDescriptor->rotatedAddr, fileDescriptor->rf_file_size); + fileDescriptor->rotatedAddr = NULL; } fileDescriptor->cfaddr = NULL; fileDescriptor->rfaddr = NULL; @@ -736,7 +739,7 @@ static FileDescriptor* getFileDeltaInMemMapAndSearch(const int fd, const off_t s off_t offset_in_page_size_multiple ; unsigned int bytes_ignored = 0, bytes_ignored_main = 0, bytes_ignored_rotated = 0; // Find the nearest multiple of page size - if (seek_value > 0) + if (seek_value > 0 && PAGESIZE > 0) { offset_in_page_size_multiple = (seek_value / PAGESIZE) * PAGESIZE; bytes_ignored = seek_value - offset_in_page_size_multiple; @@ -746,64 +749,88 @@ static FileDescriptor* getFileDeltaInMemMapAndSearch(const int fd, const off_t s offset_in_page_size_multiple = 0; bytes_ignored = 0; } + //create a tmp file for main file fd + char tmp_fdmain[] = "/tmp/dca_tmpfile_fdmainXXXXXX"; + int tmp_fd = mkstemp(tmp_fdmain); + if (tmp_fd == -1) + { + T2Error("Failed to create temp file: %s\n", strerror(errno)); + return NULL; + } + unlink(tmp_fdmain); + off_t offset = 0; + ssize_t sent = sendfile(tmp_fd, fd, &offset, sb.st_size); + if (sent != sb.st_size) + { + T2Error("sendfile failed: %s\n", strerror(errno)); + close(tmp_fd); + return NULL; + } if(seek_value > sb.st_size || check_rotated == true) { int rd = getRotatedLogFileDescriptor(logPath, logFile); - if (rd == -1) + if (rd != -1 && fstat(rd, &rb) == 0 && rb.st_size > 0) { - T2Error("Error opening rotated file. Start search in current file\n"); - T2Debug("File size rounded to nearest page size used for offset read: %jd bytes\n", (intmax_t)offset_in_page_size_multiple); - addrcf = mmap(NULL, sb.st_size, PROT_READ, MAP_PRIVATE, fd, offset_in_page_size_multiple); - bytes_ignored_main = bytes_ignored; - } - else - { - int fs = 0; - fs = fstat(rd, &rb); - if(fs == -1) + char tmp_fdrotated[] = "/tmp/dca_tmpfile_fdrotatedXXXXXX"; + int tmp_rd = mkstemp(tmp_fdrotated); + if (tmp_rd == -1) { - T2Error("Error getting file size\n"); - close(rd); + T2Error("Failed to create temp file: %s\n", strerror(errno)); + return NULL; } - else + unlink(tmp_fdrotated); + offset = 0; + sent = sendfile(tmp_rd, rd, &offset, rb.st_size); + if (sent != rb.st_size) { - if(rb.st_size == 0) - { - T2Error("The Size of the logfile is 0\n"); - close(rd); - } + T2Error("sendfile failed: %s\n", strerror(errno)); + close(tmp_rd); + return NULL; } - - if(rb.st_size > 0) + addrcf = mmap(NULL, sb.st_size, PROT_READ, MAP_PRIVATE, tmp_fd, 0); + addrrf = mmap(NULL, rb.st_size, PROT_READ, MAP_PRIVATE, tmp_rd, offset_in_page_size_multiple); + bytes_ignored_rotated = bytes_ignored; + close(rd); + close(tmp_rd); + rd = -1; + } + else + { + T2Error("Error opening rotated file. Start search in current file\n"); + T2Debug("File size rounded to nearest page size used for offset read: %jd bytes\n", (intmax_t)offset_in_page_size_multiple); + if(seek_value < sb.st_size) { - addrcf = mmap(NULL, sb.st_size, PROT_READ, MAP_PRIVATE, fd, 0); - addrrf = mmap(NULL, rb.st_size, PROT_READ, MAP_PRIVATE, rd, offset_in_page_size_multiple); - bytes_ignored_rotated = bytes_ignored; - if(rd != -1) - { - close(rd); - rd = -1; - } + addrcf = mmap(NULL, sb.st_size, PROT_READ, MAP_PRIVATE, tmp_fd, offset_in_page_size_multiple); + bytes_ignored_main = bytes_ignored; } - - - if(rb.st_size == 0 && fs == -1) + else { - T2Debug("No contents in rotated log file. File size rounded to nearest page size used for offset read: %jd bytes\n", (intmax_t)offset_in_page_size_multiple); - addrcf = mmap(NULL, sb.st_size, PROT_READ, MAP_PRIVATE, fd, offset_in_page_size_multiple); - bytes_ignored_main = bytes_ignored; + T2Debug("Log file got rotated. Ignoring invalid mapping\n"); + close(tmp_fd); + close(fd); + return NULL; } } } else { - T2Info("File size rounded to nearest page size used for offset read: %jd bytes\n", (intmax_t)offset_in_page_size_multiple); - addrcf = mmap(NULL, sb.st_size, PROT_READ, MAP_PRIVATE, fd, offset_in_page_size_multiple); - bytes_ignored_main = bytes_ignored; - addrrf = NULL; // No rotated file in this case + T2Debug("File size rounded to nearest page size used for offset read: %jd bytes\n", (intmax_t)offset_in_page_size_multiple); + if(seek_value < sb.st_size) + { + addrcf = mmap(NULL, sb.st_size, PROT_READ, MAP_PRIVATE, tmp_fd, offset_in_page_size_multiple); + bytes_ignored_main = bytes_ignored; + } + else + { + T2Debug("Log file got rotated. Ignoring invalid mapping\n"); + close(tmp_fd); + close(fd); + return NULL; + } + addrrf = NULL; } - + close(tmp_fd); close(fd); if (addrcf == MAP_FAILED) @@ -871,12 +898,10 @@ static int parseMarkerListOptimized(GrepSeekProfile *gsProfile, Vector * ip_vMar } char *prevfile = NULL; - //GrepSeekProfile* gsProfile = NULL; + size_t var = 0; size_t vCount = Vector_Size(ip_vMarkerList); - // Get logfile -> seek value map associated with the profile - //gsProfile = (GrepSeekProfile *) getLogSeekMapForProfile(profileName); if(NULL == gsProfile) { T2Error("%s Unable to retrieve/create logSeekMap for profile \n", __FUNCTION__); diff --git a/test/functional-tests/features/telemetry_process_multiprofile.feature b/test/functional-tests/features/telemetry_process_multiprofile.feature index 7670f59d..9d4219b2 100644 --- a/test/functional-tests/features/telemetry_process_multiprofile.feature +++ b/test/functional-tests/features/telemetry_process_multiprofile.feature @@ -226,3 +226,65 @@ Scenario: Multiprofile with TriggerConditions Given When the telemetry daemon is already running When a multiprofile is configured with TriggerConditions Then Multiprofile should be accepted and report should be generated whenever trigger condition is triggered + +Scenario: Check for HASH value matches of profile to avoid duplicate processing + Given a multiprofile is running + When another multiprofile with same name and hash is configured + Then the configuration will be ignored + +Scenario: Support for subscribing to TR181 Parameter value change + Given a datamodel marker is configured as method subscribe + When the tr181 parameter value changes + Then the value change will be sent as an event to the telemetry daemon + +Scenario: Data harvesting from previous logs folder for report profiles with log file search markers + Given the device has logs from the previous session in the PreviousLogs folder + When a profile goes through log files for report generation + Then the log files in PreviousLogs folder will also be grepped for log lines + +Scenario: Capability to support multiple split markers for the same log line + When two split markers are configured for the same log line in a file + Then both the markers will be reported + +Scenario: Include data from data source Tr181 parameters as Accumulate + Given a datamodel marker is configured as method subscribe and use accumulate + When the tr181 parameter value changes multiple time inside the reporting interval + Then all the changes will be reported with values + +Scenario: Report sending over HTTP protocol + Given a profile is confugred with report sending protocol as HTTP along with the respective endpoint + Then the report will be sent to the configured endpoint + +Scenario: Caching of upload failed reports + Given a json report is attemplted to be sent the configured method + When the attempt to send the report fails + Then the report will be cached to be sent later along with the next report + +Scenario: Report sending with protocol set as RBUS_METHOD in report profiles. + Given a profile is confugred with report sending protocol as HTTP along with the respective datamodel + Then the report will be configured to the respective datamodel + +Scenario: Report generation for profiles with log grep markers during log file rotation scenarios + Given a grep marker is configured + When the respective log file reaches a certain limit and has been rotated + Then the content of the roatated log file is also grepped for the search string + +Scenario: Event accumulate with and without timestamp in report profiles for event markers and datamodel. + Given an event marker or tr181 marker with subscribe are configured with reportTimeStamp + When the event is sent to the telementry + Then the telemetry report will have the time the event was received as timestamp + +Scenario: Forced on demand reporting outside the regular reporting intervals. + Given a single profile or a multiprofile is running + When kill signal 29 is sent to the telemetry daemon + Then a reportwill be generated immediately for all the running profiles + +Scenario: Stress testing of interaction with rbus interface to check for any deadlocks or rbus timeouts. + Given telemetry is running and an event marker is configured + When the configured event markers is sent in large numbers without any interval + Then all the events should be captured and telemetry daemon should not be crashing + +Scenario: profile persistence + Given a multiprofile is expired + When the telemetry is restarted + Then the profile will be enabled after restart diff --git a/test/functional-tests/features/telemetry_process_singleprofile.feature b/test/functional-tests/features/telemetry_process_singleprofile.feature new file mode 100644 index 00000000..3b816fed --- /dev/null +++ b/test/functional-tests/features/telemetry_process_singleprofile.feature @@ -0,0 +1,40 @@ +#################################################################################### +# If not stated otherwise in this file or this component's Licenses +# following copyright and licenses apply: +# +# Copyright 2024 RDK Management +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#################################################################################### + + +Feature: Telemetry Single profile configuration and report generation + + Scenario: Single profile configuration with event marker and use as accumulate + Given When the telemetry daemon is already running + When a single profile is configured with event marker and use as accumulate + Then generated report should contain the values for all occurrences of the marker + + Scenario: Capability to support multiple split markers for the same log line + When two split markers are configured for the same log line in a file + Then both the markers will be reported + + Scenario: Caching of upload failed reports + Given a json report is attemplted to be sent the configured method + When the attempt to send the report fails + Then the report will be cached to be sent later along with the next report + +Scenario: Data harvesting from previous logs folder for report profiles with log file search markers + Given the device has logs from the previous session in the PreviousLogs folder + When a profile goes through log files for report generation + Then the log files in PreviousLogs folder will also be grepped for log lines diff --git a/test/functional-tests/features/telemetry_process_tempProfile.feature b/test/functional-tests/features/telemetry_process_tempProfile.feature new file mode 100644 index 00000000..a0498b1f --- /dev/null +++ b/test/functional-tests/features/telemetry_process_tempProfile.feature @@ -0,0 +1,95 @@ +#################################################################################### +# If not stated otherwise in this file or this component's Licenses +# following copyright and licenses apply: +# +# Copyright 2024 RDK Management +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#################################################################################### + + +Feature: Telemetry temporary profile configuration and report generation + +Scenario: Verify co-existence of Report profile and Temporary report profile + Given a temp profile is configured and running + When a temporary profile is configured + Then both profiles should be running without any interference from each other + + Scenario: temp profile configuration with Datamodel marker + Given When the telemetry daemon is already running + When a temp profile is configured with Datamodel marker + Then the temp profile should be enabled + Then generated report should contain the value of the tr181 parameter given in the reference + +Scenario: Support for subscribing to TR181 Parameter value change + Given a datamodel marker is configured as method subscribe + When the tr181 parameter value changes + Then the value change will be sent as an event to the telemetry daemon + +Scenario: Data harvesting from previous logs folder for report profiles with log file search markers + Given the device has logs from the previous session in the PreviousLogs folder + When a temp profile goes through log files for report generation + Then the log files in PreviousLogs folder will also be grepped for log lines + + Scenario: temp profile configuration with event marker and use as accumulate + Given When the telemetry daemon is already running + When a temp profile is configured with event marker and use as accumulate + Then the temp profile should be enabled + Then generated report should contain the values for all occurrences of the marker + + +Scenario: Include data from data source Tr181 parameters as Accumulate + Given a datamodel marker is configured as method subscribe and use accumulate + When the tr181 parameter value changes multiple time inside the reporting interval + Then all the changes will be reported with values + +Scenario: Multiple profiles configured simultaneously + When a temporary profile configuration has multiple profiles + Then all the profiles will be processed and run if applicable + + Scenario: temp profile configuration with ReportingInterval + Given When the telemetry daemon is already running + When a temp profile is configured with ReportingInterval + Then the temp profile should be enabled + Then report should be generated after ReportingInterval + + Scenario: temp profile configuration with ActivationTimeout + Given When the telemetry daemon is already running + When a temp profile is configured with ActivationTimeout + Then the temp profile should be enabled + Then the profile should be disabled after the expiring of ActivationTimeout + +Scenario: temp profile with TriggerConditions + Given When the telemetry daemon is already running + When a temp profile is configured with TriggerConditions + Then temp profile should be accepted and report should be generated whenever trigger condition is triggered + + Scenario: temp profile configuration with grep marker, use as absolute and with regex + Given When the telemetry daemon is already running + When a temp profile is configured with grep marker, use as absolute and with regex + Then the temp profile should be enabled + Then generated report should contain the content after the search string until the end of the line matching the given regex + +Scenario: Caching of upload failed reports + Given a json report is attemplted to be sent the configured method + When the attempt to send the report fails + Then the report will be cached to be sent later along with the next report + +Scenario: Report sending with protocol set as RBUS_METHOD in report profiles. + Given a profile is confugred with report sending protocol as HTTP along with the respective datamodel + Then the report will be configured to the respective datamodel + +Scenario: profile non persistence + Given a temp profile is expired + When the telemetry is restarted + Then the profile will not be enabled after restart diff --git a/test/functional-tests/tests/app.c b/test/functional-tests/tests/app.c new file mode 100644 index 00000000..f7685e02 --- /dev/null +++ b/test/functional-tests/tests/app.c @@ -0,0 +1,19 @@ +#include +#include +#include "../../../include/telemetry_busmessage_sender.h" + +void main(int argc, char *argv[]) +{ + int i = 0, n; + n = (argc < 2) ? 100 : atoi(argv[1]); + // Initialize Telemetry2.0 + t2_init("event_spammer"); + + while(i <= n) + { + t2_event_d("T2_INFO_Test", i); + i++; + } + t2_uninit(); + printf("Sent %d t2_event_d events.\n", n); +} diff --git a/test/functional-tests/tests/helper_functions.py b/test/functional-tests/tests/helper_functions.py index 45dc34e1..a287d426 100644 --- a/test/functional-tests/tests/helper_functions.py +++ b/test/functional-tests/tests/helper_functions.py @@ -94,6 +94,9 @@ def rbus_get_data(param: str): return subprocess.run(RBUSCLI_GET_CMD + param, shell=True, capture_output=True).stdout.decode('utf-8') def rbus_set_data(param: str, type:str, value: str): + if param in [T2_REPORT_PROFILE_PARAM, T2_TEMP_REPORT_PROFILE_PARAM]: + # Put the value between single quotes + value = f"'{value}'" return subprocess.run(f"{RBUSCLI_SET_CMD} {param} {type} {value}", shell=True, capture_output=True).stdout.decode('utf-8') def get_process_uptime(pid): diff --git a/test/functional-tests/tests/report_profiles.py b/test/functional-tests/tests/report_profiles.py index b1b65dab..e66174f0 100644 --- a/test/functional-tests/tests/report_profiles.py +++ b/test/functional-tests/tests/report_profiles.py @@ -546,7 +546,7 @@ "Version": "0.1", "Protocol": "HTTP", "EncodingType": "", - "ActivationTimeOut": 1200, + "ActivationTimeOut": 21, "ReportingInterval": 20, "GenerateNow": false, "RootName": "FR2_US_TC3", @@ -663,7 +663,7 @@ "Description": "RDKB_Profile", "Version": "0.1", "Protocol": "HTTP", - "ActivationTimeOut": 1200, + "ActivationTimeOut": 21, "ReportingInterval": 20, "GenerateNow": false, "RootName": "FR2_US_TC3", @@ -780,7 +780,7 @@ "Version": "0.1", "Protocol": "HTTP", "EncodingType": "JSON", - "ActivationTimeout": 1200, + "ActivationTimeout": 21, "GenerateNow": false, "RootName": "FR2_US_TC3", "TimeReference": "2023-01-25T13:47:00Z", @@ -838,7 +838,7 @@ "Version": "0.1", "Protocol": "HTTP", "EncodingType": "JSON", - "ActivationTimeout": 3600, + "ActivationTimeout": 36, "ReportingInterval": 30, "RootName": "FR2_US_TC3", "TimeReference": "2023-01-25T13:47:00Z", @@ -890,75 +890,15 @@ ] }''' -data_with_reporting_interval = '''{ - "profiles": [ - { - "name": "TR_AC732", - "hash": "Hash732", - "value": { - "Name": "RDKB_Profile_3", - "Description": "RDKB_Profile", - "Version": "0.1", - "Protocol": "HTTP", - "EncodingType": "JSON", - "ActivationTimeout": 3600, - "ReportingInterval": 20, - "GenerateNow": false, - "RootName": "FR2_US_TC3", - "Parameter": [ - { - "type": "event", - "eventName": "TEST_EVENT_MARKER_1", - "component": "sysint", - "use": "count" - }, - { - "type": "event", - "eventName": "TEST_EVENT_MARKER_2", - "component": "sysint", - "use": "accumulate" - }, - { - "type": "grep", - "marker": "SYS_INFO_CrashPortalUpload_success", - "search": "Success loading", - "logFile": "core_log.txt", - "use": "count", - "reportEmpty":true - } - ], - "ReportingAdjustments": [ - { - "ReportOnUpdate": false, - "FirstReportingInterval": 15, - "MaxUploadLatency": 20000 - } - ], - "HTTP": { - "URL": "https://mockxconf:50051/dataLakeMock/", - "Compression": "None", - "Method": "POST", - "RequestURIParameter": [ - { - "Name": "reportName", - "Reference": "Profile.Name" - } - ] - }, - "JSONEncoding": { - "ReportFormat": "NameValuePair", - "ReportTimestamp": "None" - } - } - } - ] -}''' +data_with_reporting_interval = '{ "profiles": [ { "name": "TR_AC732", "hash": "Hash732", "value": { "Name": "RDKB_Profile_3", "Description": "RDKB_Profile", "Version": "0.1", "Protocol": "RBUS_METHOD", "EncodingType": "JSON", "ActivationTimeout": 3600, "ReportingInterval": 20, "GenerateNow": false, "RootName": "FR2_US_TC3", "Parameter": [ { "type": "event", "eventName": "TEST_EVENT_MARKER_1", "component": "sysint", "use": "count" }, { "type": "event", "eventName": "TEST_EVENT_MARKER_2", "component": "sysint", "use": "accumulate", "reportTimestamp":"Unix-Epoch" }, { "type": "grep", "marker": "SYS_INFO_CrashPortalUpload_success", "search": "Success loading", "logFile": "core_log.txt", "use": "count", "reportEmpty":true } ], "ReportingAdjustments": [ { "ReportOnUpdate": false, "FirstReportingInterval": 15, "MaxUploadLatency": 20000 } ], "RBUS_METHOD":{"Method":"Device.X_RDK_Xmidt.SendData","Parameters":[{"name":"msg_type","value":"event"},{"name":"source","value":"telemetry2"},{"name":"dest","value":"event:/profile-report/LTE-report"},{"name":"content_type","value":"application/json"},{"name":"qos","value":"75"}]}, "JSONEncoding": { "ReportFormat": "NameValuePair", "ReportTimestamp": "None" } } } ] }' + +data_temp_with_reporting_interval = '{ "profiles": [ { "name": "temp_AC732", "hash": "temp_732", "value": { "Name": "RDKB_Profile_3", "Description": "RDKB_Profile", "Version": "0.1", "Protocol": "RBUS_METHOD", "EncodingType": "JSON", "ActivationTimeout": 3600, "ReportingInterval": 20, "GenerateNow": false, "RootName": "temp_AC732", "Parameter": [ { "type": "event", "eventName": "TEST_EVENT_MARKER_1", "component": "sysint", "use": "count" }, { "type": "event", "eventName": "TEST_EVENT_MARKER_2", "component": "sysint", "use": "accumulate", "reportTimestamp":"Unix-Epoch" }, { "type": "grep", "marker": "SYS_INFO_CrashPortalUpload_success", "search": "Success loading", "logFile": "core_log.txt", "use": "count", "reportEmpty":true } ], "ReportingAdjustments": [ { "ReportOnUpdate": false, "FirstReportingInterval": 15, "MaxUploadLatency": 20000 } ], "RBUS_METHOD":{"Method":"Device.X_RDK_Xmidt.SendData","Parameters":[{"name":"msg_type","value":"event"},{"name":"source","value":"telemetry2"},{"name":"dest","value":"event:/profile-report/LTE-report"},{"name":"content_type","value":"application/json"},{"name":"qos","value":"75"}]}, "JSONEncoding": { "ReportFormat": "NameValuePair", "ReportTimestamp": "None" } } } ] }' data_with_Generate_Now = '''{ "profiles": [ { - "name": "TR_AC767", - "hash": "Hash767", + "name": "TR_AC777", + "hash": "Hash777", "value": { "Name": "RDKB_Profile_1", "Description": "RDKB_Profile", @@ -966,23 +906,31 @@ "Protocol": "HTTP", "EncodingType": "JSON", "ActivationTimeOut": 50, - "ReportingInterval": 20, + "ReportingInterval": 40, "GenerateNow": true, "RootName": "FR2_US_TC3", "Parameter": [ + { + "type": "grep", + "marker": "FILE_Read_Progress", + "search": "file reading", + "logFile": "core_log.txt", + "use": "absolute", + "trim":true + }, + { + "type": "grep", + "marker": "FILE_Write_Progress", + "search": "file writing", + "logFile": "core_log.txt", + "use": "accumulate" + }, { "type": "dataModel", "name": "MODEL_NAME", "reference": "Device.DeviceInfo.ModelName", "use": "absolute" }, - { - "type": "event", - "eventName": "USED_MEM1_split", - "component": "sysint", - "use": "absolute", - "reportEmpty":true - }, { "type": "grep", "marker": "SYS_INFO_CrashPortalUpload_success", @@ -1021,64 +969,11 @@ "ReportTimestamp": "None" } } - }, - { - "name": "TR_AC777", - "hash": "Hash777", - "value": { - "Name": "RDKB_Profile_1", - "Description": "RDKB_Profile", - "Version": "0.1", - "Protocol": "HTTP", - "EncodingType": "JSON", - "ActivationTimeOut": 50, - "ReportingInterval": 20, - "GenerateNow": true, - "RootName": "FR2_US_TC3", - "Parameter": [ - { - "type": "grep", - "marker": "FILE_Read_Progress", - "search": "file reading", - "logFile": "core_log.txt", - "use": "absolute", - "trim":true - }, - { - "type": "grep", - "marker": "FILE_Write_Progress", - "search": "file writing", - "logFile": "core_log.txt", - "use": "accumulate" - } - ], - "ReportingAdjustments": [ - { - "ReportOnUpdate": false, - "FirstReportingInterval": 15, - "MaxUploadLatency": 20000 - } - ], - "HTTP": { - "URL": "https://mockxconf:50051/dataLakeMock/", - "Compression": "None", - "Method": "POST", - "RequestURIParameter": [ - { - "Name": "reportName", - "Reference": "Profile.Name" - } - ] - }, - "JSONEncoding": { - "ReportFormat": "NameValuePair", - "ReportTimestamp": "None" - } - } } ] }''' + data_with_full_log_path = '''{ "profiles": [ { @@ -1221,9 +1116,8 @@ "Version": "0.1", "Protocol": "HTTP", "EncodingType": "JSON", - "ActivationTimeOut": 20, - "DeleteOnTimeout": true, - "ReportingInterval": 10, + "ActivationTimeOut": 200, + "ReportingInterval": 100, "GenerateNow": false, "RootName": "FR2_US_TC3", "Parameter": [ @@ -1261,71 +1155,377 @@ ] }''' -data_with_delete_on_timeout = '''{ - "profiles": [ - { - "name": "TR_AC66", - "hash": "Hash66", - "value": { - "Name": "RDKB_Profile_1", - "Description": "RDKB_Profile", - "Version": "0.1", - "Protocol": "HTTP", - "EncodingType": "JSON", - "ActivationTimeOut": 20, - "DeleteOnTimeout": true, - "ReportingInterval": 10, - "GenerateNow": false, - "RootName": "FR2_US_TC3", - "TimeReference": "2023-01-25T13:47:00Z", - "Parameter": [ - { - "type": "dataModel", - "name": "MODEL_NAME", - "reference": "Device.DeviceInfo.ModelName", - "use": "absolute", - "regex":"[A-Z]+" - }, - { - "type": "event", - "eventName": "TEST_EVENT_MARKER_2", - "component": "sysint", - "use": "absolute", - "regex":"[0-9]+" - }, - { - "type": "grep", - "marker": "SYS_INFO_CrashPortalUpload_success", - "search": "Success uploading", - "logFile": "core_log.txt", - "use": "absolute", - "regex":"[0-9]+" - } - ], - "ReportingAdjustments": - { - "ReportOnUpdate": true, - "FirstReportingInterval": 5, - "MaxUploadLatency": 10 - }, - "HTTP": { - "URL": "https://mockxconf:50051/dataLakeMock/", - "Compression": "None", - "Method": "POST", - "RequestURIParameter": [ - { - "Name": "reportName", - "Reference": "Profile.Name" - } - ] - }, - "JSONEncoding": { - "ReportFormat": "NameValuePair", - "ReportTimestamp": "None" - } +data_with_delete_on_timeout = '{ "profiles": [ { "name": "rp_TR_AC66", "hash": "rp_Hash66", "value": { "Name": "RDKB_Profile_1", "Description": "RDKB_Profile", "Version": "0.1", "Protocol": "HTTP", "EncodingType": "JSON", "ActivationTimeOut": 20, "DeleteOnTimeout": true, "ReportingInterval": 10, "GenerateNow": false, "RootName": "rp_TR_AC66", "TimeReference": "2023-01-25T13:47:00Z", "Parameter": [ { "type": "dataModel", "name": "MODEL_NAME", "reference": "Device.DeviceInfo.ModelName", "use": "absolute", "regex":"[A-Z]+" }, { "type": "event", "eventName": "TEST_EVENT_MARKER_2", "component": "sysint", "use": "absolute", "regex":"[0-9]+" }, { "type": "grep", "marker": "SYS_INFO_CrashPortalUpload_success", "search": "Success uploading", "logFile": "core_log.txt", "use": "absolute", "regex":"[0-9]+" } ], "ReportingAdjustments": { "ReportOnUpdate": true, "FirstReportingInterval": 5, "MaxUploadLatency": 10 }, "HTTP": { "URL": "https://mockxconf:50051/dataLakeMock/", "Compression": "None", "Method": "POST", "RequestURIParameter": [ { "Name": "reportName", "Reference": "Profile.Name" } ] }, "JSONEncoding": { "ReportFormat": "NameValuePair", "ReportTimestamp": "None" } } } ] }' + +data_temp_with_delete_on_timeout = '{ "profiles": [ { "name": "temp_TR_AC66", "hash": "temp_Hash66", "value": { "Name": "RDKB_Profile_1", "Description": "RDKB_Profile", "Version": "0.1", "Protocol": "HTTP", "EncodingType": "JSON", "ActivationTimeOut": 20, "DeleteOnTimeout": true, "ReportingInterval": 10, "GenerateNow": false, "RootName": "temp_TR_AC66", "TimeReference": "2023-01-25T13:47:00Z", "Parameter": [ { "type": "dataModel", "name": "MODEL_NAME", "reference": "Device.DeviceInfo.ModelName", "use": "absolute", "regex":"[A-Z]+" }, { "type": "event", "eventName": "TEST_EVENT_MARKER_2", "component": "sysint", "use": "absolute", "regex":"[0-9]+" }, { "type": "grep", "marker": "SYS_INFO_CrashPortalUpload_success", "search": "Success uploading", "logFile": "core_log.txt", "use": "absolute", "regex":"[0-9]+" } ], "ReportingAdjustments": { "ReportOnUpdate": true, "FirstReportingInterval": 5, "MaxUploadLatency": 10 }, "HTTP": { "URL": "https://mockxconf:50051/dataLakeMock/", "Compression": "None", "Method": "POST", "RequestURIParameter": [ { "Name": "reportName", "Reference": "Profile.Name" } ] }, "JSONEncoding": { "ReportFormat": "NameValuePair", "ReportTimestamp": "None" } } } ] }' + +data_with_split_markers = '''{ + "profiles": [ + { + "name": "Split66", + "hash": "Split66", + "value": { + "Name": "RDKB_Profile_1", + "Description": "RDKB_Profile", + "Version": "0.1", + "Protocol": "HTTP", + "EncodingType": "JSON", + "ActivationTimeOut": 20, + "ReportingInterval": 10, + "GenerateNow": false, + "RootName": "rp_Split_Marker", + "TimeReference": "2023-01-25T13:47:00Z", + "Parameter": [ + { + "type": "dataModel", + "name": "IUI_VERSION", + "reference": "Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", + "method":"subscribe", + "use": "accumulate", + "reportTimestamp":"Unix-Epoch" + }, + { + "type": "event", + "eventName": "TEST_EVENT_MARKER_2", + "component": "sysint", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_Rotated_Log", + "search": "Rotated_log_line", + "logFile": "core_log.txt", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_PreviousLogs", + "search": "This log file is for previous logs", + "logFile": "session0.txt", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_WhoAmI", + "search": "WhoAmI feature is", + "logFile": "telemetry2_0.txt.0", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_WhoAmI_Status", + "search": "WhoAmI feature", + "logFile": "telemetry2_0.txt.0", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_Rotated_Log", + "search": "some random lines for filling the file", + "logFile": "rotated.txt", + "use": "count" + } + ], + "ReportingAdjustments": { + "ReportOnUpdate": true, + "FirstReportingInterval": 5, + "MaxUploadLatency": 10 + }, + "HTTP": { + "URL": "https://mockxconf:50051/dataLookeMock/", + "Compression": "None", + "Method": "POST", + "RequestURIParameter": [ + { + "Name": "reportName", + "Reference": "Profile.Name" } + ] + }, + "JSONEncoding": { + "ReportFormat": "NameValuePair", + "ReportTimestamp": "Unix-Epoch" } - ] + } + } + ] +}''' + +data_temp_with_split_markers = '''{ + "profiles": [ + { + "name": "temp_Split66", + "hash": "temp_Split66", + "value": { + "Name": "RDKB_Profile_1", + "Description": "RDKB_Profile", + "Version": "0.1", + "Protocol": "HTTP", + "EncodingType": "JSON", + "ActivationTimeOut": 20, + "ReportingInterval": 10, + "GenerateNow": false, + "RootName": "temp_Split_Marker", + "TimeReference": "2023-01-25T13:47:00Z", + "Parameter": [ + { + "type": "dataModel", + "name": "IUI_VERSION", + "reference": "Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", + "method":"subscribe", + "use": "accumulate", + "reportTimestamp":"Unix-Epoch" + }, + { + "type": "event", + "eventName": "TEST_EVENT_MARKER_2", + "component": "sysint", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_Rotated_Log", + "search": "Rotated_log_line", + "logFile": "core_log.txt", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_PreviousLogs", + "search": "This log file is for previous logs", + "logFile": "session0.txt", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_WhoAmI", + "search": "WhoAmI feature is", + "logFile": "telemetry2_0.txt.0", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_WhoAmI_Status", + "search": "WhoAmI feature", + "logFile": "telemetry2_0.txt.0", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_Rotated_Log", + "search": "some random lines for filling the file", + "logFile": "rotated.txt", + "use": "count" + } + ], + "ReportingAdjustments": { + "ReportOnUpdate": true, + "FirstReportingInterval": 5, + "MaxUploadLatency": 10 + }, + "HTTP": { + "URL": "https://mockxconf:50051/dataTempLookeMock/", + "Compression": "None", + "Method": "POST", + "RequestURIParameter": [ + { + "Name": "reportName", + "Reference": "Profile.Name" + } + ] + }, + "JSONEncoding": { + "ReportFormat": "NameValuePair", + "ReportTimestamp": "Unix-Epoch" + } + } + } + ] +}''' + + +data_for_persistence = '''{ + "profiles": [ + { + "name": "per_66", + "hash": "per_66", + "value": { + "Name": "RDKB_Profile_1", + "Description": "RDKB_Profile", + "Version": "0.1", + "Protocol": "HTTP", + "EncodingType": "JSON", + "ActivationTimeOut": 20, + "ReportingInterval": 10, + "GenerateNow": false, + "RootName": "rp_Split_Marker", + "TimeReference": "2023-01-25T13:47:00Z", + "Parameter": [ + { + "type": "dataModel", + "name": "IUI_VERSION", + "reference": "Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", + "method":"subscribe", + "use": "accumulate", + "reportTimestamp":"Unix-Epoch" + }, + { + "type": "event", + "eventName": "TEST_EVENT_MARKER_2", + "component": "sysint", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_Rotated_Log", + "search": "Rotated_log_line", + "logFile": "core_log.txt", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_PreviousLogs", + "search": "This log file is for previous logs", + "logFile": "session0.txt", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_WhoAmI", + "search": "WhoAmI feature is", + "logFile": "telemetry2_0.txt.0", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_WhoAmI_Status", + "search": "WhoAmI feature", + "logFile": "telemetry2_0.txt.0", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_Rotated_Log", + "search": "some random lines for filling the file", + "logFile": "rotated.txt", + "use": "count" + } + ], + "ReportingAdjustments": { + "ReportOnUpdate": true, + "FirstReportingInterval": 5, + "MaxUploadLatency": 10 + }, + "HTTP": { + "URL": "https://mockxconf:50051/dataLookeMock/", + "Compression": "None", + "Method": "POST", + "RequestURIParameter": [ + { + "Name": "reportName", + "Reference": "Profile.Name" + } + ] + }, + "JSONEncoding": { + "ReportFormat": "NameValuePair", + "ReportTimestamp": "Unix-Epoch" + } + } + } + ] +}''' + +data_temp_for_persistence = '''{ + "profiles": [ + { + "name": "temp_per_66", + "hash": "temp_per_66", + "value": { + "Name": "RDKB_Profile_1", + "Description": "RDKB_Profile", + "Version": "0.1", + "Protocol": "HTTP", + "EncodingType": "JSON", + "ActivationTimeOut": 20, + "ReportingInterval": 10, + "GenerateNow": false, + "RootName": "temp_Split_Marker", + "TimeReference": "2023-01-25T13:47:00Z", + "Parameter": [ + { + "type": "dataModel", + "name": "IUI_VERSION", + "reference": "Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", + "method":"subscribe", + "use": "accumulate", + "reportTimestamp":"Unix-Epoch" + }, + { + "type": "event", + "eventName": "TEST_EVENT_MARKER_2", + "component": "sysint", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_Rotated_Log", + "search": "Rotated_log_line", + "logFile": "core_log.txt", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_PreviousLogs", + "search": "This log file is for previous logs", + "logFile": "session0.txt", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_WhoAmI", + "search": "WhoAmI feature is", + "logFile": "telemetry2_0.txt.0", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_WhoAmI_Status", + "search": "WhoAmI feature", + "logFile": "telemetry2_0.txt.0", + "use": "absolute" + }, + { + "type": "grep", + "marker": "SYS_INFO_Rotated_Log", + "search": "some random lines for filling the file", + "logFile": "rotated.txt", + "use": "count" + } + ], + "ReportingAdjustments": { + "ReportOnUpdate": true, + "FirstReportingInterval": 5, + "MaxUploadLatency": 10 + }, + "HTTP": { + "URL": "https://mockxconf:50051/dataTempLookeMock/", + "Compression": "None", + "Method": "POST", + "RequestURIParameter": [ + { + "Name": "reportName", + "Reference": "Profile.Name" + } + ] + }, + "JSONEncoding": { + "ReportFormat": "NameValuePair", + "ReportTimestamp": "Unix-Epoch" + } + } + } + ] }''' data_with_first_reporting_interval_max_latency = '''{ @@ -1851,14 +2051,16 @@ "Description": "Trigger condition working case", "Version": ".01", "Protocol": "HTTP", + "ReportingInterval": 520, "EncodingType": "JSON", "GenerateNow": false, + "ActivationTimeout": 520, "TimeReference": "0001-01-01T00:00:00Z", - "ActivationTimeout": 120, "Parameter": [], "TriggerCondition": [ { "type": "dataModel", + "name": "rp_trigger", "reference": "Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.RDKRemoteDebugger.Enable", "operator": "any" } @@ -1883,3 +2085,45 @@ ] }''' +data_temp_with_triggerconditon_pos = '''{ + "profiles": [ + { + "name": "TC_temp_pos", + "hash": "TC_temp_pos", + "value": { + "Description": "Trigger condition working case", + "Version": ".01", + "Protocol": "HTTP", + "ReportingInterval": 520, + "EncodingType": "JSON", + "GenerateNow": false, + "ActivationTimeout": 520, + "TimeReference": "0001-01-01T00:00:00Z", + "Parameter": [], + "TriggerCondition": [ + { + "type": "dataModel", + "name": "temp_trigger", + "reference": "Device.DeviceInfo.X_RDKCENTRAL-COM_FirmwareDownloadDeferReboot", + "operator": "any" + } + ], + "HTTP": { + "URL": "https://mockxconf:50051/dataLakeMock/", + "Compression": "None", + "Method": "POST", + "RequestURIParameter": [ + { + "Name": "reportName", + "Reference": "Profile.Name" + } + ] + }, + "JSONEncoding": { + "ReportFormat": "NameValuePair", + "ReportTimestamp": "None" + } + } + } + ] +}''' diff --git a/test/functional-tests/tests/rotated.txt b/test/functional-tests/tests/rotated.txt new file mode 100644 index 00000000..52b2c4a7 --- /dev/null +++ b/test/functional-tests/tests/rotated.txt @@ -0,0 +1,5 @@ +some random line for filling the file +some random line for filling the file +some random lines for filling the file +some random line for filling the file +some random line for filling the file diff --git a/test/functional-tests/tests/rotated.txt.1 b/test/functional-tests/tests/rotated.txt.1 new file mode 100644 index 00000000..28e91cbe --- /dev/null +++ b/test/functional-tests/tests/rotated.txt.1 @@ -0,0 +1,10 @@ +some random line for filling the file +some random line for filling the file +some random lines for filling the file +some random line for filling the file +some random line for filling the file +some random line for filling the file +some random line for filling the file +some random line for filling the file +some random lines for filling the file +some random line for filling the file diff --git a/test/functional-tests/tests/rotated.txt.reduced b/test/functional-tests/tests/rotated.txt.reduced new file mode 100644 index 00000000..5576bf39 --- /dev/null +++ b/test/functional-tests/tests/rotated.txt.reduced @@ -0,0 +1,2 @@ +some random line for filling the file +some random lines for filling the file diff --git a/test/functional-tests/tests/test_multiprofile_msgpacket.py b/test/functional-tests/tests/test_multiprofile_msgpacket.py index 07f896ad..24494034 100644 --- a/test/functional-tests/tests/test_multiprofile_msgpacket.py +++ b/test/functional-tests/tests/test_multiprofile_msgpacket.py @@ -71,7 +71,6 @@ def test_without_namefield(): assert LOG_MSG not in grep_T2logs(LOG_MSG) #Empty string in namefield assert HASH_ERROR_MSG in grep_T2logs(HASH_ERROR_MSG) #without hash field - #negative case without hashvalue, without version field & without Protocol field @pytest.mark.run(order=2) def test_without_hashvalue(): @@ -87,7 +86,6 @@ def test_without_hashvalue(): assert "TR_AC15" not in grep_T2logs(LOG_PROFILE_ENABLE) # without Protocol field assert PROTOCOL_ERROR_MSG in grep_T2logs(PROTOCOL_ERROR_MSG) # verify whether the protocol is given - #negative cases: # random value for Protocol # empty string for version @@ -111,8 +109,6 @@ def test_with_wrong_protocol_value(): assert "TR_AC13" not in grep_T2logs(LOG_PROFILE_ENABLE) # Verify Profile cannot be enabled for empty protocol sleep(2) - - #negative cases # without EncodingType & ActivationTimeout values # without encodingType param @@ -122,6 +118,8 @@ def test_with_wrong_protocol_value(): @pytest.mark.run(order=4) def test_without_EncodingType_ActivationTimeout_values(): + rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_empty_profile)) # instead of telemetry restart giving empty profile to clear previous profile data + sleep(2) ERROR_REPORTING_INTERVAL = "If TriggerCondition is not given ReportingInterval parameter is mandatory" ERROR_ENCODING = "Incomplete Profile information, ignoring profile" clear_T2logs() @@ -133,7 +131,9 @@ def test_without_EncodingType_ActivationTimeout_values(): run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") sleep(2) rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_without_EncodingType_ActivationTimeout_values)) - sleep(25) + sleep(5) + # 215 - Multiple profiles configured simultaneously + # 202 - Profile setting and parsing in message pack format assert "TR_AC18" in grep_T2logs(LOG_PROFILE_ENABLE) # Verify profile is enabled with an empty encodingType assert "TR_AC19" in grep_T2logs(LOG_PROFILE_ENABLE) # Verify profile is enabled with an empty ActivationTimeout assert "TR_AC20" not in grep_T2logs(LOG_PROFILE_ENABLE) # Verify profile is not enabled without encodingType param @@ -143,7 +143,9 @@ def test_without_EncodingType_ActivationTimeout_values(): assert "TR_AC22" not in grep_T2logs(LOG_PROFILE_ENABLE) # Verify profile is not enabled without ReportingInterval param assert "TR_AC23" in grep_T2logs(LOG_PROFILE_ENABLE) # Verify profile is enabled without GenerateNow param sleep(5) - + rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_empty_profile)) # instead of telemetry restart giving empty profile to clear previous profile data + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", data_empty_profile) + sleep(2) #1).positive case for working of Reporting Interval #2).positive case for event marker & with count @@ -157,9 +159,12 @@ def test_reporting_interval_working(): clear_persistant_files() run_telemetry() run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") + rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_empty_profile)) sleep(2) rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_with_reporting_interval)) - sleep(5) + sleep(2) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", data_temp_with_reporting_interval) + sleep(2) REPORTING_INTERVAL_LOG1 = grep_T2logs("reporting interval is taken - TR_AC732") command1 = ["telemetry2_0_client TEST_EVENT_MARKER_1 300"] @@ -175,20 +180,32 @@ def test_reporting_interval_working(): run_shell_command(command3) sleep(2) assert "20 sec" in REPORTING_INTERVAL_LOG1 - sleep(10) - assert "TIMEOUT for profile" in grep_T2logs("TR_AC732") #Verify reporting interval - assert "TEST_EVENT_MARKER_1\":\"2" in grep_T2logs("cJSON Report ") #verify event marker for count - assert "occurrance1" in grep_T2logs("TEST_EVENT_MARKER_2") #verify event marker for accummulate - 1 - assert "occurrance2" in grep_T2logs("TEST_EVENT_MARKER_2") #verify event marker for accummulate - 2 - sleep(2) #wait for 2 sec to verify whether this valid profile is running and generating report + sleep(15) + assert "TIMEOUT for profile" in grep_T2logs("TR_AC732") # 218 -Report on interval + assert "TEST_EVENT_MARKER_1\":\"2" in grep_T2logs("FR2_US_TC3") # 234 -Include data from data source T2 events as count + assert "occurrance1\",\"occurrance2" in grep_T2logs("FR2_US_TC3") # 212 - Include data from data source as T2 events - 1 + assert "TEST_EVENT_MARKER_2_CT" in grep_T2logs("FR2_US_TC3") # 248 - Event accumulate with and without timestamp in report profiles for event markers. + # 216 - Epoch time/UTC time support + assert "Device.X_RDK_Xmidt.SendData" in grep_T2logs("T2 asyncMethodHandler called: ") # 228 - Report sending with protocol as RBUS_METHOD in report profiles. + assert "send via rbusMethod is failure" in grep_T2logs("send via rbusMethod is failure") # 225 - Caching of upload failed reports - 1 + assert "Report Cached, No. of reportes cached = " in grep_T2logs("Report Cached, No. of reportes cached = ") # 225 - Caching of upload failed reports - 2 + assert "TIMEOUT for profile" in grep_T2logs("temp_AC732") # 314 - Report on interval + assert "occurrance1\",\"occurrance2" in grep_T2logs("temp_AC732") # 212 - Include data from data source as T2 events - 1 + assert "Device.X_RDK_Xmidt.SendData" in grep_T2logs("T2 asyncMethodHandler called: ") # 324 - Report sending over RBUS_METHOD + assert "send via rbusMethod is failure" in grep_T2logs("send via rbusMethod is failure") # 321 - Caching of upload failed reports + assert "Report Cached, No. of reportes cached = " in grep_T2logs("Report Cached, No. of reportes cached = ") # 321 - Caching of upload failed reports + + run_shell_command("/usr/local/bin/rbus_timeout.sh") + rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_empty_profile)) # instead of telemetry restart giving empty profile to clear previous profile data + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", data_empty_profile) + sleep(2) # verification for GenerateNow # count - grep marker validation # absolute - grep marker validation # Trim - grep marker validation # Datamodel validation -''' @pytest.mark.run(order=6) def test_for_Generate_Now(): clear_T2logs() @@ -210,13 +227,12 @@ def test_for_Generate_Now(): LOG_GENERATE_NOW = "Waiting for 0 sec for next TIMEOUT for profile" rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_with_Generate_Now)) - sleep(10) - assert "TR_AC777" in grep_T2logs(LOG_GENERATE_NOW) # verification for GenerateNow - assert "SYS_INFO_CrashPortalUpload_success\":\"2" in grep_T2logs("cJSON Report ") # count - grep marker validation - assert "FILE_Upload_Progress\":\" newfile1 20%" in grep_T2logs("cJSON Report ") # absolute - grep marker validation - assert "FILE_Read_Progress\":\"newfile2 line 10" in grep_T2logs("cJSON Report ") # Trim - grep marker validation - assert "MODEL_NAME" in grep_T2logs("cJSON Report ") # Datamodel validation -''' + sleep(2) + assert "TR_AC777" in grep_T2logs(LOG_GENERATE_NOW) # 235 - Support for Generate Now of profiles + assert "SYS_INFO_CrashPortalUpload_success\":\"2" in grep_T2logs("cJSON Report ") # 236 - Include data from data source as log files with string match pattern as Count + assert "FILE_Upload_Progress\":\" newfile1 20%" in grep_T2logs("cJSON Report ") # 237 - Include data from data source as log files with string match pattern as absolute + assert "FILE_Read_Progress\":\"newfile2 line 10" in grep_T2logs("cJSON Report ") # 238 - Include data from data source as log files with string match pattern as with Trim + assert "MODEL_NAME" in grep_T2logs("cJSON Report ") # 206 - Include data from data source as TR181 Parameter # Negative case with activation timeout less than reporting interval # Postive case for Empty report sent when reportEmpty is true @@ -225,7 +241,12 @@ def test_for_Generate_Now(): @pytest.mark.run(order=7) def test_for_invalid_activation_timeout(): ERROR_PROFILE_TIMEOUT = "activationTimeoutPeriod is less than reporting interval. invalid profile: " - rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.ConfigURL", "string", "https://mockxconf:50050/loguploader1/getT2DCMSettings") + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.ConfigURL", "string", "https://mockxconf:50050/loguploader2/getT2DCMSettings") + os.makedirs('/opt/logs/PreviousLogs', exist_ok=True) + file = open('/opt/logs/session0.txt', 'w') + file.write("This log file is for previous logs\n") + file.write("Second line in the previous logs\n") + file.close() clear_T2logs() kill_telemetry(9) RUN_START_TIME = dt.now() @@ -235,14 +256,31 @@ def test_for_invalid_activation_timeout(): sleep(5) run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_with_less_activation_timeout)) + sleep(2) + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", "string", "T2_Container_0.0.2") + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", "string", "T2_Container_0.0.3") + command2 = ["telemetry2_0_client SYS_EVENT_TEST_accum 7"] + run_shell_command(command2) + sleep(2) + command2 = ["telemetry2_0_client SYS_EVENT_TEST_accum 6"] + run_shell_command(command2) sleep(60) assert "TR_AC88" in grep_T2logs(ERROR_PROFILE_TIMEOUT) # Verify profile not set if activation timeout is less than reporting interval - assert "MODEL_NAME\":\"NULL" in grep_T2logs("cJSON Report ") # verify Empty report is sent for reportEmpty is true - assert "TR_AC6919" in grep_T2logs("firstreporting interval is given") # - assert "5 sec" in grep_T2logs("firstreporting interval is given") #} Verify Firstreporting Interval is working - assert "NEW TEST PROFILE" in grep_T2logs(LOG_PROFILE_SET) # Verify DCM profile is set - assert "60 sec" in grep_T2logs("reporting interval is taken - NEW TEST PROFILE") #Verify DCM profile is running - assert "AccountId\":\"Platform_Container_Test_DEVICE" in grep_T2logs("cJSON Report ") #verify report generated for DCM profile + assert "MODEL_NAME\":\"NULL" in grep_T2logs("cJSON Report ") # 239 - Support for reportEmpty of profiles + assert "TR_AC6919" in grep_T2logs("firstreporting interval is given") # 240 - Support for First Reporting Interval -1 + assert "5 sec" in grep_T2logs("firstreporting interval is given") # 240 - Support for First Reporting Interval - 2 + assert "NEW TEST PROFILE" in grep_T2logs(LOG_PROFILE_SET) # 101 - Report fetch and parse via HTTP + assert "60 sec" in grep_T2logs("reporting interval is taken - NEW TEST PROFILE") # 107 - Configurable reporting interval + # 241 - Support for DCM profile and multiprofile parallel execution + assert "AccountId\":\"Platform_Container_Test_DEVICE" in grep_T2logs("cJSON Report ") # 102 - Include data from data source as TR181 Parameter + assert "SYS_GREP_TEST" in grep_T2logs("cJSON Report ") # 110, 111 - Data harvesting from previous logs folder for DCA profiles with log file search markers. + assert "SYS_GREP_TEST_2" in grep_T2logs("cJSON Report ") # 104 - Capability to support multiple split markers for the same log line + assert "SYS_EVENT_TEST_accum\":[\"7\",\"6\"" in grep_T2logs("cJSON Report ") # 105 - Include data from data source as T2 events + # 106 - Include data from data source T2 events as Accumulate + assert "SYS_TEST_ReportUpload" in grep_T2logs("cJSON Report ") # 103 - Include data from data source as log files with string match pattern + assert "Report Cached, No. of reportes cached = " in grep_T2logs("Report Cached, No. of reportes cached = ") # 108 - Caching of upload failed reports - xconf + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.ConfigURL", "string", "https://mockxconf:50050/loguploader1/getT2DCMSettings") + #1).positive case for activation timeout #2).regex - grep marker validation @@ -251,7 +289,7 @@ def test_for_invalid_activation_timeout(): #5).positive case with delete on timeout @pytest.mark.run(order=8) def test_with_delete_on_timeout(): - clear_T2logs() + #clear_T2logs() RUN_START_TIME = dt.now() run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") sleep(2) @@ -263,17 +301,26 @@ def test_with_delete_on_timeout(): sleep(2) LOG_PROFILE_TIMEOUT = "Profile activation timeout" LOG_DELETE_PROFILE = "removing profile :" - rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_with_delete_on_timeout)) - sleep(5) + rbus_set_data(T2_REPORT_PROFILE_PARAM, "string", data_with_delete_on_timeout) + sleep(2) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", data_temp_with_delete_on_timeout) + sleep(2) command2 = ["telemetry2_0_client TEST_EVENT_MARKER_2 occurrance17"] run_shell_command(command2) sleep(30) - assert "TR_AC66" in grep_T2logs(LOG_PROFILE_TIMEOUT) # verification for activation timeout - assert "SYS_INFO_CrashPortalUpload_success\":\"200" in grep_T2logs("cJSON Report ") # regex - grep marker validation - assert "MODEL_NAME\":\"DOCKER" in grep_T2logs("cJSON Report ") # regex - Datamodel validation - assert "TEST_EVENT_MARKER_2\":\"17" in grep_T2logs("cJSON Report ") # regex - Event marker validation - assert "TR_AC66" in grep_T2logs(LOG_DELETE_PROFILE) #verify profile is removed from active profile list if DeleteOnTimeout is true + assert "rp_TR_AC66" in grep_T2logs(LOG_PROFILE_ENABLE) # 201 - Profile setting and parsing in JSON format + assert "rp_TR_AC66" in grep_T2logs(LOG_PROFILE_TIMEOUT) # 219 - Support for activation timeout of profiles + assert "SYS_INFO_CrashPortalUpload_success\":\"200" in grep_T2logs("rp_TR_AC66") # 222, 250 - Regex support for data formating on log grep patterns in report profiles. + assert "MODEL_NAME\":\"DOCKER" in grep_T2logs("rp_TR_AC66") # 243 - Include data from data source as TR181 Parameter with regex + assert "TEST_EVENT_MARKER_2\":\"17" in grep_T2logs("rp_TR_AC66") # 242 - Include data from data source as T2 events with regex + assert "rp_TR_AC66" in grep_T2logs(LOG_DELETE_PROFILE) # 232 -Support for Delete on Timeout of profiles + + assert "temp_TR_AC66" in grep_T2logs(LOG_PROFILE_ENABLE) # 301 - Profile setting and parsing in JSON format + assert "temp_TR_AC66" in grep_T2logs(LOG_PROFILE_TIMEOUT) # 315 - Support for activation timeout of profiles + assert "SYS_INFO_CrashPortalUpload_success\":\"200" in grep_T2logs("temp_TR_AC66") # 318 - Regex support for log grep patterns + assert "MODEL_NAME\":\"DOCKER" in grep_T2logs("temp_TR_AC66") # 304 - Include data from data source as TR181 Parameter + assert "TEST_EVENT_MARKER_2\":\"17" in grep_T2logs("temp_TR_AC66") # 309 - Include data from data source as T2 events #1.First reporting interval is applicable only when time ref is default - non-working case #2.Maxlatency is applicable only when time ref is not default - non- working case @@ -288,6 +335,7 @@ def test_for_first_reporting_interval_Maxlatency(): run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") sleep(2) rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_empty_profile)) # instead of telemetry restart giving empty profile to clear previous profile data + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", data_empty_profile) sleep(2) rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_with_first_reporting_interval_max_latency)) sleep(5) @@ -298,7 +346,8 @@ def test_for_first_reporting_interval_Maxlatency(): assert "NA_FRI" not in grep_T2logs("Waiting for 5 sec for next TIMEOUT for profile as firstreporting interval is given") sleep(10) assert "NA_FRI" in grep_T2logs(TIMEOUT_LOG) - assert "NA_MLU" in grep_T2logs(TIMEOUT_LOG) #verify when timeref is not default max uploadlatency is accepted + assert "NA_MLU" in grep_T2logs(TIMEOUT_LOG) # verify when timeref is not default max uploadlatency is accepted + # 217, 245 - Delayed reporting support/ Maxlatency assert MLU_ERROR_LOG in grep_T2logs(MLU_ERROR_LOG) @pytest.mark.run(order=10) @@ -319,7 +368,49 @@ def test_for_triggerCondition_negative_case(): assert "Unexpected operator verifyMsgPckTriggerCondition ++out" in grep_T2logs("Unexpected operator verifyMsgPckTriggerCondition ++out") assert "Null threshold verifyMsgPckTriggerCondition ++out" in grep_T2logs("Null threshold verifyMsgPckTriggerCondition ++out") assert "Unexpected reference verifyMsgPckTriggerCondition ++out" in grep_T2logs("Unexpected reference verifyMsgPckTriggerCondition ++out") -''' + +@pytest.mark.run(order=12) +def test_for_subscribe_tr181(): + clear_T2logs() + kill_telemetry(9) + RUN_START_TIME = dt.now() + remove_T2bootup_flag() + clear_persistant_files() + os.makedirs('/opt/logs/PreviousLogs', exist_ok=True) + file = open('/opt/logs/PreviousLogs/session0.txt', 'w') + file.write("This log file is for previous logs\n") + file.write("Second line in the previous logs\n") + file.close() + run_telemetry() + sleep(2) + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 DEBUG") + sleep(2) + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", "string", "T2_Container_0.0.1") + sleep(1) + rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_with_split_markers)) + sleep(1) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_temp_with_split_markers)) + sleep(2) + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", "string", "T2_Container_0.0.2") + sleep(2) + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", "string", "T2_Container_0.0.3") + sleep(10) + assert "SYS_INFO_WhoAmI" in grep_T2logs("rp_Split_Marker") # 209 - Include data from data source as log files with string match pattern + assert "SYS_INFO_WhoAmI_Status" in grep_T2logs("rp_Split_Marker") # 211 - Capability to support multiple split markers for the same log line + assert "SYS_INFO_PreviousLogs" in grep_T2logs("rp_Split_Marker") # 210, 231, 246 - Data harvesting from previous logs folder for report profiles with log file search markers. + assert "T2_Container_0.0.2" in grep_T2logs("rp_Split_Marker") # 207 - Support for subscribing to TR181 Parameter value change + # 214 -Include data from data source Tr181 parameters as Accumulate + assert "T2_Container_0.0.3" in grep_T2logs("rp_Split_Marker") # 214 -Include data from data source Tr181 parameters as Accumulate + assert "IUI_VERSION_CT" in grep_T2logs("rp_Split_Marker") # 249 - Event accumulate with and without timestamp in report profiles for datamodel markers. + assert "Report Sent Successfully over HTTP" in grep_T2logs ("Report Sent Successfully over HTTP") # 223 - Report sending over HTTP protocol + + assert "SYS_INFO_WhoAmI" in grep_T2logs("temp_Split_Marker") # 307 - Include data from data source as log files with string match pattern + assert "SYS_INFO_PreviousLogs" in grep_T2logs("temp_Split_Marker") # 308, 327 - Capability to read backwards from previous logs + assert "T2_Container_0.0.2" in grep_T2logs("temp_Split_Marker") # 305 - Support for subscribing to TR181 Parameter value change + # 312 - Include data from data source Tr181 parameters as Accumulate + assert "T2_Container_0.0.3" in grep_T2logs("temp_Split_Marker") # 312 - Include data from data source Tr181 parameters as Accumulate + assert "Report Sent Successfully over HTTP" in grep_T2logs ("Report Sent Successfully over HTTP") # 319 - Report sending over HTTP protocol + @pytest.mark.run(order=11) def test_for_triggerCondition_working_case(): clear_T2logs() @@ -330,15 +421,76 @@ def test_for_triggerCondition_working_case(): run_telemetry() sleep(5) run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") - sleep(2) + subprocess.run("rbuscli set Device.DeviceInfo.X_RDKCENTRAL-COM_FirmwareDownloadDeferReboot bool false", shell=True) + subprocess.run("rbuscli set Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.RDKRemoteDebugger.Enable bool true", shell=True) + rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_with_triggerconditon_pos)) - sleep(5) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_temp_with_triggerconditon_pos)) + sleep(2) subprocess.run("rbuscli set Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.RDKRemoteDebugger.Enable bool false", shell=True) + subprocess.run("rbuscli set Device.DeviceInfo.X_RDKCENTRAL-COM_FirmwareDownloadDeferReboot bool true", shell=True) sleep(2) - assert "Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.RDKRemoteDebugger.Enable" in grep_T2logs("TriggerConditionResult") - assert "false" in grep_T2logs("TriggerConditionResult") + assert "Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.RDKRemoteDebugger.Enable" in grep_T2logs("TriggerConditionResult") # 220, 244 - Report on trigger condition + assert "false" in grep_T2logs("TriggerConditionResult") # 251 - Report generation on trigger condition with stress testing for covering deadlock scenarios - 1 + assert "Device.DeviceInfo.X_RDKCENTRAL-COM_FirmwareDownloadDeferReboot" in grep_T2logs("TriggerConditionResult") # 316 - Report on trigger condition - 1 + assert "true" in grep_T2logs("TriggerConditionResult") # 316 - Report on trigger condition - 2 subprocess.run("rbuscli set Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.RDKRemoteDebugger.Enable bool true", shell=True) sleep(2) assert "Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.RDKRemoteDebugger.Enable" in grep_T2logs("TriggerConditionResult") - assert "true" in grep_T2logs("TriggerConditionResult") - ''' + assert "true" in grep_T2logs("TriggerConditionResult") # 251 - Report generation on trigger condition with stress testing for covering deadlock scenarios - 2 + sleep(1) + subprocess.run("rbuscli set Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.RDKRemoteDebugger.Enable bool false", shell=True) + subprocess.run("rbuscli set Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.RDKRemoteDebugger.Enable bool true", shell=True) + subprocess.run("rbuscli set Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.RDKRemoteDebugger.Enable bool false", shell=True) + +@pytest.mark.run(order=13) +def test_for_duplicate_hash(): + rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_empty_profile)) + sleep(2) + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 DEBUG") + run_shell_command("cp /opt/logs/core_log.txt /opt/logs/core_log.txt.0") + run_shell_command("cp /opt/logs/core_log.txt /opt/logs/core_log.txt.1") + run_shell_command("echo Rotated_log_line >> /opt/logs/core_log.txt.1") + rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_for_persistence)) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_temp_for_persistence)) + sleep(2) + rbus_set_data(T2_REPORT_PROFILE_PARAM_MSG_PCK, "string", tomsgpack(data_for_persistence)) + sleep(2) + assert "per_66" in grep_T2logs("hash already exist") # 203 - Check for HASH value matches of profile to avoid duplicate processing + run_shell_command("cp test/functional-tests/tests/rotated.txt /opt/logs/") + sleep(6) + assert "SYS_INFO_Rotated_Log\":\"1" in grep_T2logs("cJSON Report ") + run_shell_command("cp test/functional-tests/tests/rotated.txt.1 /opt/logs/") + run_shell_command("cp test/functional-tests/tests/rotated.txt.reduced /opt/logs/rotated.txt") + sleep(10) + assert "SYS_INFO_Rotated_Log\":\"1" in grep_T2logs("cJSON Report ") # 247 - Report generation for profiles with log grep markers during log file rotation scenarios. + assert "per_66" in grep_T2logs("URL: https://mockxconf:50051/dataLookeMock") # 226 - Configurable reporting end points + # 227 - Configurable URL parameters for HTTP Protocol + #assert "temp_Split66" in grep_T2logs("URL: https://mockxconf:50051/dataTempLookeMock") # 322 - Configurable reporting end points + # 323 - Configurable URL parameters for HTTP Protocol + assert "per_66" in grep_T2logs("removing profile :") # 229 - Profile persistence - 1 + assert "temp_per_66" in grep_T2logs("removing profile :") # 326 - Profile persistence - 1 + clear_T2logs() + RUN_START_TIME = dt.now() + kill_telemetry(9) + remove_T2bootup_flag() + run_telemetry() + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") + sleep(5) + assert "per_66" in grep_T2logs(LOG_PROFILE_ENABLE) # 229 - Profile persistence - 2 + assert "temp_per_66" not in grep_T2logs(LOG_PROFILE_ENABLE) # 326 - Profile persistence - 2 + kill_telemetry(29) + sleep(2) + assert "LOG_UPLOAD_ONDEMAND received" in grep_T2logs("LOG_UPLOAD_ONDEMAND received") # 221, 252- Forced on demand reporting outside the regular reporting intervals. - 1 + assert "per_66" in grep_T2logs("Sending Interrupt signal to Timeout Thread of profile") # 252 - Forced on demand reporting outside the regular reporting intervals. - 2 + assert "rp_Split_Marker" in grep_T2logs("cJSON Report ") # 247 - Report generation for profiles with log grep markers during log file rotation scenarios. + +@pytest.mark.run(order=14) +def test_stress_test(): + command_to_get_pid = "pidof telemetry2_0" + pid1 = run_shell_command(command_to_get_pid) + run_shell_command("test/functional-tests/tests/t2_app 9999") + sleep(5) + pid2 = run_shell_command(command_to_get_pid) + assert pid1==pid2 # 253 - Stress testing of interaction with rbus interface to check for any deadlocks or rbus timeouts. + diff --git a/test/functional-tests/tests/test_runs_as_daemon.py b/test/functional-tests/tests/test_runs_as_daemon.py index 27bc0019..0ce3f5e4 100755 --- a/test/functional-tests/tests/test_runs_as_daemon.py +++ b/test/functional-tests/tests/test_runs_as_daemon.py @@ -47,4 +47,4 @@ def test_tear_down(): run_shell_command(command_to_stop) command_to_get_pid = "pidof telemetry2_0" pid = run_shell_command(command_to_get_pid) - assert pid == "" \ No newline at end of file + assert pid == "" diff --git a/test/functional-tests/tests/test_temp_profile.py b/test/functional-tests/tests/test_temp_profile.py new file mode 100644 index 00000000..bc096391 --- /dev/null +++ b/test/functional-tests/tests/test_temp_profile.py @@ -0,0 +1,349 @@ +#################################################################################### +# If not stated otherwise in this file or this component's Licenses file the +# following copyright and licenses apply: +# +# Copyright 2024 RDK Management +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#################################################################################### + +import subprocess +import requests +from time import sleep +from datetime import datetime as dt +import os +import time +from basic_constants import * +from helper_functions import * +from report_profiles import * +import pytest +import msgpack +import json +import base64 +from test_runs_as_daemon import run_shell_command + +RUN_START_TIME = None +LOG_PROFILE_ENABLE = "Successfully enabled profile :" +LOG_PROFILE_SET = "Successfully set profile :" + +#negative cases: +# random value for Protocol +# empty string for version +# empty string for protocol +@pytest.mark.run(order=3) +def test_with_wrong_protocol_value(): + clear_T2logs() + kill_telemetry(9) + RUN_START_TIME = dt.now() + remove_T2bootup_flag() + clear_persistant_files() + run_telemetry() + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") + sleep(2) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_with_wrong_protocol_value)) + sleep(10) + ERROR_WRONG_PROTOCOL = "Unsupported protocol" + assert ERROR_WRONG_PROTOCOL in grep_T2logs(ERROR_WRONG_PROTOCOL) #Verify the right protocol is given + assert "TR_AC16" not in grep_T2logs(LOG_PROFILE_ENABLE) # Verify profile is not enabled with an incorrect protocol + assert "TR_AC17" in grep_T2logs(LOG_PROFILE_ENABLE) # Verify Profile can be enabled for empty version + assert "TR_AC13" not in grep_T2logs(LOG_PROFILE_ENABLE) # Verify Profile cannot be enabled for empty protocol + sleep(2) + +#negative cases +# without EncodingType & ActivationTimeout values +# without encodingType param +# without ActivationTimeout param +# without ReportingInterval param +# without GenerateNow param + +@pytest.mark.run(order=4) +def test_without_EncodingType_ActivationTimeout_values(): + ERROR_REPORTING_INTERVAL = "If TriggerCondition is not given ReportingInterval parameter is mandatory" + ERROR_ENCODING = "Incomplete profile information, unable to create profile" + clear_T2logs() + kill_telemetry(9) + RUN_START_TIME = dt.now() + remove_T2bootup_flag() + clear_persistant_files() + run_telemetry() + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") + sleep(2) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_without_EncodingType_ActivationTimeout_values)) + sleep(25) + #Multiple profiles configured simultaneously + assert "TR_AC18" in grep_T2logs(LOG_PROFILE_ENABLE) # Verify profile is enabled with an empty encodingType + assert "TR_AC19" not in grep_T2logs(LOG_PROFILE_ENABLE) # Verify profile is not enabled with an empty ActivationTimeout #differs to rp behaviour + assert "TR_AC20" not in grep_T2logs(LOG_PROFILE_ENABLE) # Verify profile is not enabled without encodingType param + assert ERROR_ENCODING in grep_T2logs(ERROR_ENCODING) + assert "TR_AC21" in grep_T2logs(LOG_PROFILE_ENABLE) # Verify profile is enabled without ActivationTimeout param + # 313 - Multiple profiles configured simultaneously - 1 + assert ERROR_REPORTING_INTERVAL in grep_T2logs(ERROR_REPORTING_INTERVAL) # Verify ReportingInterval error is thrown + assert "TR_AC22" not in grep_T2logs(LOG_PROFILE_ENABLE) # Verify profile is not enabled without ReportingInterval param + assert "TR_AC23" in grep_T2logs(LOG_PROFILE_ENABLE) # Verify profile is enabled without GenerateNow param + # 313 - Multiple profiles configured simultaneously - 2 + sleep(5) + +#1).positive case for working of Reporting Interval +#2).positive case for event marker & with count +#3).positive case for event marker with accumulate +@pytest.mark.run(order=11) +def test_reporting_interval_working(): + clear_T2logs() + kill_telemetry(9) + RUN_START_TIME = dt.now() + remove_T2bootup_flag() + clear_persistant_files() + run_telemetry() + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") + sleep(2) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", data_with_reporting_interval) + sleep(5) + REPORTING_INTERVAL_LOG1 = grep_T2logs("reporting interval is taken - TR_AC732") + + command1 = ["telemetry2_0_client TEST_EVENT_MARKER_1 300"] + command2 = ["telemetry2_0_client TEST_EVENT_MARKER_2 occurrance1"] + command3 = ["telemetry2_0_client TEST_EVENT_MARKER_2 occurrance2"] + + run_shell_command(command1) + sleep(2) + run_shell_command(command1) + sleep(2) + run_shell_command(command2) + sleep(2) + run_shell_command(command3) + sleep(2) + assert "20 sec" in REPORTING_INTERVAL_LOG1 + sleep(10) + assert "TIMEOUT for profile" in grep_T2logs("TR_AC732") # 314 - Report on interval + assert "TEST_EVENT_MARKER_1\":\"2" in grep_T2logs("cJSON Report ") #verify event marker for count + assert "occurrance1" in grep_T2logs("TEST_EVENT_MARKER_2") # 312 - Include data from data source T2 events parameters as Accumulate + assert "occurrance2" in grep_T2logs("TEST_EVENT_MARKER_2") # 312 - Include data from data source T2 events parameters as Accumulate + assert "TEST_EVENT_MARKER_2_CT" in grep_T2logs("cJSON Report ") #Epoch time/UTC time support + assert "Device.X_RDK_Xmidt.SendData" in grep_T2logs("T2 asyncMethodHandler called: ") # 324 - Report sending over RBUS_METHOD + assert "send via rbusMethod is failure" in grep_T2logs("send via rbusMethod is failure") # 321 - Caching of upload failed reports + assert "Report Cached, No. of reportes cached = " in grep_T2logs("Report Cached, No. of reportes cached = ") # 321 - Caching of upload failed reports + run_shell_command("/usr/local/bin/rbus_timeout.sh") + +# verification for GenerateNow +# count - grep marker validation +# absolute - grep marker validation +# Trim - grep marker validation +# Datamodel validation +@pytest.mark.run(order=6) +def test_for_Generate_Now(): + clear_T2logs() + os.makedirs('/opt/logs', exist_ok=True) + # Create log file with the logs needed for grep marker + file = open('/opt/logs/core_log.txt', 'w') + file.write( + "Success uploading report 300\n" + "Success uploading report 200\n" + "random string1\n" + "rando\n" + "file uploading newfile1 20%\n" + "file reading newfile2 line 10\n" + "file writing to file.txt 22 lines\n" + "file writing to file.txt 23 lines\n" + ) + file.close() + sleep(2) + + LOG_GENERATE_NOW = "Waiting for timeref or reporting interval for the profile" + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_with_Generate_Now)) + sleep(5) + assert "TR_AC777" in grep_T2logs(LOG_GENERATE_NOW) # verification for GenerateNow + kill_telemetry(29) + sleep(2) + assert "LOG_UPLOAD_ONDEMAND received" in grep_T2logs("LOG_UPLOAD_ONDEMAND received") # 317 - Forced on demand reporting to support log upload - 1 + assert "TR_AC767" in grep_T2logs("Interrupted before TIMEOUT for profile") # 317 - Forced on demand reporting to support log upload - 2 + assert "SYS_INFO_CrashPortalUpload_success\":\"2" in grep_T2logs("cJSON Report ") # count - grep marker validation + assert "FILE_Upload_Progress\":\" newfile1 20%" in grep_T2logs("cJSON Report ") # absolute - grep marker validation + assert "FILE_Read_Progress\":\"newfile2 line 10" in grep_T2logs("cJSON Report ") # Trim - grep marker validation + assert "MODEL_NAME" in grep_T2logs("cJSON Report ") # Datamodel validation + +# Negative case with activation timeout less than reporting interval +# Postive case for Empty report sent when reportEmpty is true +# Positive case for FirstReporting Interval +# DCM profile is running parallel to the Multiprofile - positve +@pytest.mark.run(order=7) +def test_for_invalid_activation_timeout(): + ERROR_PROFILE_TIMEOUT = "activationTimeoutPeriod is less than reporting interval. invalid profile: " + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.ConfigURL", "string", "https://mockxconf:50050/loguploader1/getT2DCMSettings") + clear_T2logs() + kill_telemetry(9) + RUN_START_TIME = dt.now() + remove_T2bootup_flag() + clear_persistant_files() + run_telemetry() + sleep(5) + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_with_less_activation_timeout)) + sleep(2) + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", "string", "T2_Container_0.0.2") + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", "string", "T2_Container_0.0.3") + command2 = ["telemetry2_0_client SYS_EVENT_TEST_accum 7"] + run_shell_command(command2) + sleep(2) + command2 = ["telemetry2_0_client SYS_EVENT_TEST_accum 6"] + run_shell_command(command2) + sleep(60) + #kill_telemetry(29) + assert "TR_AC88" in grep_T2logs(ERROR_PROFILE_TIMEOUT) # Verify profile not set if activation timeout is less than reporting interval + assert "MODEL_NAME\":\"NULL" in grep_T2logs("cJSON Report ") # verify Empty report is sent for reportEmpty is true + assert "TR_AC6919" in grep_T2logs("firstreporting interval is given") # + assert "5 sec" in grep_T2logs("firstreporting interval is given") #} Verify Firstreporting Interval is working + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.ConfigURL", "string", "https://mockxconf:50050/loguploader1/getT2DCMSettings") + +#1).positive case for activation timeout +#2).regex - grep marker validation +#3).regex - Datamodel validation +#4).regex - Event marker validation +#5).positive case with delete on timeout +@pytest.mark.run(order=8) +def test_with_delete_on_timeout(): + #clear_T2logs() + RUN_START_TIME = dt.now() + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") + sleep(2) + os.makedirs('/opt/logs', exist_ok=True) + # Create log file with the logs needed for grep marker + file = open('/opt/logs/core_log.txt', 'w') + file.write("Success uploading report 200\n") + file.close() + sleep(2) + LOG_PROFILE_TIMEOUT = "Profile activation timeout" + LOG_DELETE_PROFILE = "removing profile :" + RET = rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", data_with_delete_on_timeout) + print(f"RET: {RET}") + sleep(5) + command2 = ["telemetry2_0_client TEST_EVENT_MARKER_2 occurrance17"] + run_shell_command(command2) + sleep(30) + assert "TR_AC66" in grep_T2logs(LOG_PROFILE_ENABLE) # 301 - Profile setting and parsing in JSON format + assert "TR_AC66" in grep_T2logs(LOG_PROFILE_TIMEOUT) # 315 - Support for activation timeout of profiles + assert "SYS_INFO_CrashPortalUpload_success\":\"200" in grep_T2logs("cJSON Report ") # 318 - Regex support for log grep patterns + assert "MODEL_NAME\":\"DOCKER" in grep_T2logs("cJSON Report ") # 304 - Include data from data source as TR181 Parameter + assert "TEST_EVENT_MARKER_2\":\"17" in grep_T2logs("cJSON Report ") # 309 - Include data from data source as T2 events + assert "TR_AC66" in grep_T2logs(LOG_DELETE_PROFILE) # verify profile is removed from active profile list if DeleteOnTimeout is true + +#1.First reporting interval is applicable only when time ref is default - non-working case +#2.Maxlatency is applicable only when time ref is not default - non- working case +#3.Maxlatency is greater than reporting interval - non-working case +#4.Parameter array is entirely empty - non-working case +@pytest.mark.run(order=9) +def test_for_first_reporting_interval_Maxlatency(): + MLU_ERROR_LOG = "MaxUploadLatency is greater than reporting interval. Invalid Profile" + TIMEOUT_LOG = "TIMEOUT for profile" + PARAM_ERROR_LOG = "Incomplete profile information, unable to create profile" + RUN_START_TIME = dt.now() + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") + sleep(2) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_empty_profile)) # instead of telemetry restart giving empty profile to clear previous profile data + sleep(2) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_with_first_reporting_interval_max_latency)) + sleep(5) + assert "PARAM_NULL" not in grep_T2logs(LOG_PROFILE_ENABLE) + assert "NA_FRI" in grep_T2logs(LOG_PROFILE_ENABLE) #verify when timeref is not default first reporting inetrval is not accepted + + assert "NA_MLU" in grep_T2logs(LOG_PROFILE_ENABLE) + assert "NA_FRI" not in grep_T2logs("Waiting for 5 sec for next TIMEOUT for profile as firstreporting interval is given") + sleep(10) + assert "NA_FRI" in grep_T2logs(TIMEOUT_LOG) + assert "NA_MLU" in grep_T2logs(TIMEOUT_LOG) #verify when timeref is not default max uploadlatency is accepted + assert MLU_ERROR_LOG in grep_T2logs(MLU_ERROR_LOG) + +@pytest.mark.run(order=10) +def test_for_triggerCondition_negative_case(): + TC_ERROR_LOG = "TriggerCondition is invalid, unable to create profile" + RUN_START_TIME = dt.now() + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 DEBUG") + sleep(2) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_empty_profile)) + sleep(2) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_with_triggerconditon_neg)) + sleep(5) + assert TC_ERROR_LOG in grep_T2logs(TC_ERROR_LOG) + assert "Null type verifyTriggerCondition ++out" in grep_T2logs("Null type verifyTriggerCondition ++out") + assert "Null reference verifyTriggerCondition ++out" in grep_T2logs("Null reference verifyTriggerCondition ++out") + assert "Unexpected type verifyTriggerCondition ++out" in grep_T2logs("Unexpected type verifyTriggerCondition ++out") + assert "Null operator verifyTriggerCondition ++out" in grep_T2logs("Null operator verifyTriggerCondition ++out") + assert "Unexpected operator verifyTriggerCondition ++out" in grep_T2logs("Unexpected operator verifyTriggerCondition ++out") + assert "Null threshold verifyTriggerCondition ++out" in grep_T2logs("Null threshold verifyTriggerCondition ++out") + assert "Unexpected reference verifyTriggerCondition ++out" in grep_T2logs("Unexpected reference verifyTriggerCondition ++out") + +@pytest.mark.run(order=12) +def test_for_subscribe_tr181(): + clear_T2logs() + kill_telemetry(9) + RUN_START_TIME = dt.now() + remove_T2bootup_flag() + clear_persistant_files() + run_telemetry() + sleep(2) + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 DEBUG") + sleep(2) + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", "string", "T2_Container_0.0.1") + sleep(1) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_with_split_markers)) + sleep(2) + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", "string", "T2_Container_0.0.2") + sleep(1) + rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", "string", "T2_Container_0.0.3") + #sleep(1) + #rbus_set_data("Device.DeviceInfo.X_RDKCENTRAL-COM.IUI.Version", "string", "T2_Container_0.0.4") + sleep(10) + assert "SYS_INFO_WhoAmI" in grep_T2logs("cJSON Report ") # 307 - Include data from data source as log files with string match pattern + assert "SYS_INFO_WhoAmI_Status" in grep_T2logs("cJSON Report ") # multiple Split markers in the same line + assert "SYS_INFO_PreviousLogs" in grep_T2logs("cJSON Report ") # 308, 327 - Capability to read backwards from previous logs + assert "T2_Container_0.0.1" in grep_T2logs("IUI_VERSION\":") # 305 - Support for subscribing to TR181 Parameter value change + assert "T2_Container_0.0.2" in grep_T2logs("IUI_VERSION\":") # 312 - Include data from data source Tr181 parameters as Accumulate + assert "T2_Container_0.0.3" in grep_T2logs("IUI_VERSION\":") # 312 - Include data from data source Tr181 parameters as Accumulate + assert "IUI_VERSION_CT" in grep_T2logs("cJSON Report ") # report timestamp + assert "Report Sent Successfully over HTTP" in grep_T2logs ("Report Sent Successfully over HTTP") # 319 - Report sending over HTTP protocol + +@pytest.mark.run(order=11) +def test_for_triggerCondition_working_case(): + clear_T2logs() + RUN_START_TIME = dt.now() + kill_telemetry(9) + remove_T2bootup_flag() + clear_persistant_files() + run_telemetry() + sleep(5) + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") + subprocess.run("rbuscli set Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.RDKRemoteDebugger.Enable bool true", shell=True) + sleep(2) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_with_triggerconditon_pos)) + sleep(5) + subprocess.run("rbuscli set Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.RDKRemoteDebugger.Enable bool false", shell=True) + sleep(2) + assert "Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.RDKRemoteDebugger.Enable" in grep_T2logs("TriggerConditionResult") # 316 - Report on trigger condition - 1 + assert "false" in grep_T2logs("TriggerConditionResult") # 316 - Report on trigger condition - 2 + +@pytest.mark.run(order=13) +def test_for_profile_non_persistence(): + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 DEBUG") + sleep(1) + rbus_set_data(T2_TEMP_REPORT_PROFILE_PARAM, "string", (data_with_split_markers)) + sleep(20) + assert "Split66" in grep_T2logs("URL: https://mockxconf:50051/dataLookeMock") # 322 - Configurable reporting end points + # 323 - Configurable URL parameters for HTTP Protocol + assert "Split66" in grep_T2logs("removing profile :") # 326 - Profile non persistence - 1 + clear_T2logs() + RUN_START_TIME = dt.now() + kill_telemetry(9) + remove_T2bootup_flag() + run_telemetry() + run_shell_command("rdklogctrl telemetry2_0 LOG.RDK.T2 ~DEBUG") + sleep(5) + assert "Split66" not in grep_T2logs(LOG_PROFILE_ENABLE) # 326 - Profile non persistence - 2 diff --git a/test/run_l2.sh b/test/run_l2.sh index 6c92b046..7cfe5165 100755 --- a/test/run_l2.sh +++ b/test/run_l2.sh @@ -31,9 +31,10 @@ if ! grep -q "PERSISTENT_PATH=/opt/" /etc/include.properties; then echo "PERSISTENT_PATH=/opt/" >> /etc/include.properties fi +gcc test/functional-tests/tests/app.c -o test/functional-tests/tests/t2_app -ltelemetry_msgsender -lt2utils # removing --exitfirst flag as it is causing the test to exit after first failure pytest -v --json-report --json-report-summary --json-report-file $RESULT_DIR/runs_as_daemon.json test/functional-tests/tests/test_runs_as_daemon.py pytest -v --json-report --json-report-summary --json-report-file $RESULT_DIR/bootup_sequence.json test/functional-tests/tests/test_bootup_sequence.py -pytest -v --json-report --json-report-summary --json-report-file $RESULT_DIR/xconf_communications.json test/functional-tests/tests/test_xconf_communications.py -pytest -v --json-report --json-report-summary --json-report-file $RESULT_DIR/msg_packet.json test/functional-tests/tests/test_multiprofile_msgpacket.py +pytest -v --json-report --json-report-summary --json-report-file $RESULT_DIR/xconf_communications.json test/functional-tests/tests/test_xconf_communications.py --exitfirst +pytest -v --json-report --json-report-summary --json-report-file $RESULT_DIR/msg_packet.json test/functional-tests/tests/test_multiprofile_msgpacket.py --exitfirst