trafficserver-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From duke8...@apache.org
Subject [trafficserver] branch master updated: remove unnecessary parts from tests
Date Fri, 02 Aug 2019 18:22:15 GMT
This is an automated email from the ASF dual-hosted git repository.

duke8253 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/trafficserver.git


The following commit(s) were added to refs/heads/master by this push:
     new b0e699b  remove unnecessary parts from tests
b0e699b is described below

commit b0e699bc96c03db6adb2a748080e372e650d9e3f
Author: Fei Deng <duke8253@gmail.com>
AuthorDate: Wed Jul 31 15:18:51 2019 -0500

    remove unnecessary parts from tests
---
 tests/gold_tests/cont_schedule/gold/http_200.gold  |   9 --
 tests/gold_tests/cont_schedule/schedule.test.py    |  25 +---
 .../cont_schedule/schedule_on_pool.test.py         |  25 +---
 .../cont_schedule/schedule_on_thread.test.py       |  25 +---
 .../cont_schedule/thread_affinity.test.py          |  25 +---
 tests/tools/plugins/cont_schedule.cc               | 142 ++++++++++-----------
 6 files changed, 76 insertions(+), 175 deletions(-)

diff --git a/tests/gold_tests/cont_schedule/gold/http_200.gold b/tests/gold_tests/cont_schedule/gold/http_200.gold
deleted file mode 100644
index f3752f1..0000000
--- a/tests/gold_tests/cont_schedule/gold/http_200.gold
+++ /dev/null
@@ -1,9 +0,0 @@
-``
-< HTTP/1.1 200 OK
-< Date: ``
-< Age: ``
-< Transfer-Encoding: chunked
-< Proxy-Connection: keep-alive
-< Server: ATS/``
-< 
-``
diff --git a/tests/gold_tests/cont_schedule/schedule.test.py b/tests/gold_tests/cont_schedule/schedule.test.py
index a647236..98c7622 100644
--- a/tests/gold_tests/cont_schedule/schedule.test.py
+++ b/tests/gold_tests/cont_schedule/schedule.test.py
@@ -24,20 +24,8 @@ Test.ContinueOnFail = True
 
 # Define default ATS
 ts = Test.MakeATSProcess('ts')
-server = Test.MakeOriginServer('server')
 
-Test.testName = ''
-request_header = {
-    'headers': 'GET / HTTP/1.1\r\nHost: www.example.com\r\n\r\n',
-    'timestamp': '1469733493.993',
-    'body': ''
-}
-response_header = {
-    'headers': 'HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n',
-    'timestamp': '1469733493.993',
-    'body': ''
-}
-server.addResponse("sessionfile.log", request_header, response_header)
+Test.testName = 'Test TSContSchedule API'
 
 ts.Disk.records_config.update({
     'proxy.config.exec_thread.autoconfig': 0,
@@ -48,23 +36,14 @@ ts.Disk.records_config.update({
     'proxy.config.diags.debug.enabled': 1,
     'proxy.config.diags.debug.tags': 'TSContSchedule_test'
 })
-ts.Disk.remap_config.AddLine(
-    'map / http://127.0.0.1:{0}'.format(server.Variables.Port)
-)
 
 # Load plugin
 Test.PreparePlugin(os.path.join(Test.Variables.AtsTestToolsDir, 'plugins', 'cont_schedule.cc'),
ts)
 
 # www.example.com Host
 tr = Test.AddTestRun()
-tr.Processes.Default.Command = 'curl --proxy 127.0.0.1:{0} "http://www.example.com" -H "Proxy-Connection:
Keep-Alive" --verbose'.format(ts.Variables.port)
+tr.Processes.Default.Command = 'printf "Test TSContSchedule API" && sleep 5'
 tr.Processes.Default.ReturnCode = 0
 tr.Processes.Default.StartBefore(ts)
-tr.Processes.Default.StartBefore(server)
-tr.Processes.Default.Streams.stderr = 'gold/http_200.gold'
-tr.StillRunningAfter = ts
-tr.StillRunningAfter = server
-
-# Check Plugin Results
 ts.Streams.All = "gold/schedule.gold"
 ts.Streams.All += Testers.ExcludesExpression('fail', 'should not contain "fail"')
diff --git a/tests/gold_tests/cont_schedule/schedule_on_pool.test.py b/tests/gold_tests/cont_schedule/schedule_on_pool.test.py
index ae3ce99..23a7373 100644
--- a/tests/gold_tests/cont_schedule/schedule_on_pool.test.py
+++ b/tests/gold_tests/cont_schedule/schedule_on_pool.test.py
@@ -24,20 +24,8 @@ Test.ContinueOnFail = True
 
 # Define default ATS
 ts = Test.MakeATSProcess('ts')
-server = Test.MakeOriginServer('server')
 
-Test.testName = ''
-request_header = {
-    'headers': 'GET / HTTP/1.1\r\nHost: www.example.com\r\n\r\n',
-    'timestamp': '1469733493.993',
-    'body': ''
-}
-response_header = {
-    'headers': 'HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n',
-    'timestamp': '1469733493.993',
-    'body': ''
-}
-server.addResponse("sessionfile.log", request_header, response_header)
+Test.testName = 'Test TSContScheduleOnPool API'
 
 ts.Disk.records_config.update({
     'proxy.config.exec_thread.autoconfig': 0,
@@ -48,23 +36,14 @@ ts.Disk.records_config.update({
     'proxy.config.diags.debug.enabled': 1,
     'proxy.config.diags.debug.tags': 'TSContSchedule_test'
 })
-ts.Disk.remap_config.AddLine(
-    'map / http://127.0.0.1:{0}'.format(server.Variables.Port)
-)
 
 # Load plugin
 Test.PreparePlugin(os.path.join(Test.Variables.AtsTestToolsDir, 'plugins', 'cont_schedule.cc'),
ts, 'pool')
 
 # www.example.com Host
 tr = Test.AddTestRun()
-tr.Processes.Default.Command = 'curl --proxy 127.0.0.1:{0} "http://www.example.com" -H "Proxy-Connection:
Keep-Alive" --verbose'.format(ts.Variables.port)
+tr.Processes.Default.Command = 'printf "Test TSContScheduleOnPool API"'
 tr.Processes.Default.ReturnCode = 0
 tr.Processes.Default.StartBefore(ts)
-tr.Processes.Default.StartBefore(server)
-tr.Processes.Default.Streams.stderr = 'gold/http_200.gold'
-tr.StillRunningAfter = ts
-tr.StillRunningAfter = server
-
-# Check Plugin Results
 ts.Streams.All = "gold/schedule_on_pool.gold"
 ts.Streams.All += Testers.ExcludesExpression('fail', 'should not contain "fail"')
diff --git a/tests/gold_tests/cont_schedule/schedule_on_thread.test.py b/tests/gold_tests/cont_schedule/schedule_on_thread.test.py
index d11309b..59e5a81 100644
--- a/tests/gold_tests/cont_schedule/schedule_on_thread.test.py
+++ b/tests/gold_tests/cont_schedule/schedule_on_thread.test.py
@@ -24,20 +24,8 @@ Test.ContinueOnFail = True
 
 # Define default ATS
 ts = Test.MakeATSProcess('ts')
-server = Test.MakeOriginServer('server')
 
-Test.testName = ''
-request_header = {
-    'headers': 'GET / HTTP/1.1\r\nHost: www.example.com\r\n\r\n',
-    'timestamp': '1469733493.993',
-    'body': ''
-}
-response_header = {
-    'headers': 'HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n',
-    'timestamp': '1469733493.993',
-    'body': ''
-}
-server.addResponse("sessionfile.log", request_header, response_header)
+Test.testName = 'Test TSContScheduleOnThread API'
 
 ts.Disk.records_config.update({
     'proxy.config.exec_thread.autoconfig': 0,
@@ -48,23 +36,14 @@ ts.Disk.records_config.update({
     'proxy.config.diags.debug.enabled': 1,
     'proxy.config.diags.debug.tags': 'TSContSchedule_test'
 })
-ts.Disk.remap_config.AddLine(
-    'map / http://127.0.0.1:{0}'.format(server.Variables.Port)
-)
 
 # Load plugin
 Test.PreparePlugin(os.path.join(Test.Variables.AtsTestToolsDir, 'plugins', 'cont_schedule.cc'),
ts, 'thread')
 
 # www.example.com Host
 tr = Test.AddTestRun()
-tr.Processes.Default.Command = 'curl --proxy 127.0.0.1:{0} "http://www.example.com" -H "Proxy-Connection:
Keep-Alive" --verbose'.format(ts.Variables.port)
+tr.Processes.Default.Command = 'printf "Test TSContScheduleOnThread API"'
 tr.Processes.Default.ReturnCode = 0
 tr.Processes.Default.StartBefore(ts)
-tr.Processes.Default.StartBefore(server)
-tr.Processes.Default.Streams.stderr = 'gold/http_200.gold'
-tr.StillRunningAfter = ts
-tr.StillRunningAfter = server
-
-# Check Plugin Results
 ts.Streams.All = "gold/schedule_on_thread.gold"
 ts.Streams.All += Testers.ExcludesExpression('fail', 'should not contain "fail"')
diff --git a/tests/gold_tests/cont_schedule/thread_affinity.test.py b/tests/gold_tests/cont_schedule/thread_affinity.test.py
index 0b453ef..619a3d8 100644
--- a/tests/gold_tests/cont_schedule/thread_affinity.test.py
+++ b/tests/gold_tests/cont_schedule/thread_affinity.test.py
@@ -24,20 +24,8 @@ Test.ContinueOnFail = True
 
 # Define default ATS
 ts = Test.MakeATSProcess('ts')
-server = Test.MakeOriginServer('server')
 
-Test.testName = ''
-request_header = {
-    'headers': 'GET / HTTP/1.1\r\nHost: www.example.com\r\n\r\n',
-    'timestamp': '1469733493.993',
-    'body': ''
-}
-response_header = {
-    'headers': 'HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n',
-    'timestamp': '1469733493.993',
-    'body': ''
-}
-server.addResponse("sessionfile.log", request_header, response_header)
+Test.testName = 'Test TSContThreadAffinity APIs'
 
 ts.Disk.records_config.update({
     'proxy.config.exec_thread.autoconfig': 0,
@@ -48,23 +36,14 @@ ts.Disk.records_config.update({
     'proxy.config.diags.debug.enabled': 1,
     'proxy.config.diags.debug.tags': 'TSContSchedule_test'
 })
-ts.Disk.remap_config.AddLine(
-    'map / http://127.0.0.1:{0}'.format(server.Variables.Port)
-)
 
 # Load plugin
 Test.PreparePlugin(os.path.join(Test.Variables.AtsTestToolsDir, 'plugins', 'cont_schedule.cc'),
ts, 'affinity')
 
 # www.example.com Host
 tr = Test.AddTestRun()
-tr.Processes.Default.Command = 'curl --proxy 127.0.0.1:{0} "http://www.example.com" -H "Proxy-Connection:
Keep-Alive" --verbose'.format(ts.Variables.port)
+tr.Processes.Default.Command = 'printf "Test TSContThreadAffinity API"'
 tr.Processes.Default.ReturnCode = 0
 tr.Processes.Default.StartBefore(ts)
-tr.Processes.Default.StartBefore(server)
-tr.Processes.Default.Streams.stderr = 'gold/http_200.gold'
-tr.StillRunningAfter = ts
-tr.StillRunningAfter = server
-
-# Check Plugin Results
 ts.Streams.All = "gold/thread_affinity.gold"
 ts.Streams.All += Testers.ExcludesExpression('fail', 'should not contain "fail"')
diff --git a/tests/tools/plugins/cont_schedule.cc b/tests/tools/plugins/cont_schedule.cc
index 7a3b0c6..cab71f1 100644
--- a/tests/tools/plugins/cont_schedule.cc
+++ b/tests/tools/plugins/cont_schedule.cc
@@ -39,6 +39,9 @@ static int test_flag = 0;
 static TSEventThread thread_1 = nullptr;
 static TSEventThread thread_2 = nullptr;
 
+static TSCont contp_1 = nullptr;
+static TSCont contp_2 = nullptr;
+
 static int TSContSchedule_handler_1(TSCont contp, TSEvent event, void *edata);
 static int TSContSchedule_handler_2(TSCont contp, TSEvent event, void *edata);
 static int TSContScheduleOnPool_handler_1(TSCont contp, TSEvent event, void *edata);
@@ -55,18 +58,12 @@ TSContSchedule_handler_1(TSCont contp, TSEvent event, void *edata)
     // First time entering this handler, before everything else starts.
     thread_1 = TSEventThreadSelf();
 
-    TSCont contp_new = TSContCreate(TSContSchedule_handler_2, TSMutexCreate());
-
-    if (contp_new == nullptr) {
-      TSDebug(DEBUG_TAG_HDL, "[%s] could not create continuation", plugin_name);
-      abort();
-    } else {
-      // Set the affinity of contp_new to thread_1, and schedule it twice.
-      TSDebug(DEBUG_TAG_HDL, "[%s] scheduling continuation", plugin_name);
-      TSContThreadAffinitySet(contp_new, thread_1);
-      TSContSchedule(contp_new, 0);
-      TSContSchedule(contp_new, 100);
-    }
+    // Set the affinity of contp_2 to thread_1, and schedule it twice.
+    // Since it's on the same thread, we don't need a delay.
+    TSDebug(DEBUG_TAG_HDL, "[%s] scheduling continuation", plugin_name);
+    TSContThreadAffinitySet(contp_2, thread_1);
+    TSContSchedule(contp_2, 0);
+    TSContSchedule(contp_2, 0);
   } else if (thread_2 == nullptr) {
     TSDebug(DEBUG_TAG_CHK, "fail [schedule delay not applied]");
   } else {
@@ -101,6 +98,23 @@ TSContSchedule_handler_2(TSCont contp, TSEvent event, void *edata)
   return 0;
 }
 
+void
+TSContSchedule_test()
+{
+  contp_1 = TSContCreate(TSContSchedule_handler_1, TSMutexCreate());
+  contp_2 = TSContCreate(TSContSchedule_handler_2, TSMutexCreate());
+
+  if (contp_1 == nullptr || contp_2 == nullptr) {
+    TSDebug(DEBUG_TAG_SCHD, "[%s] could not create continuation", plugin_name);
+    abort();
+  } else {
+    TSDebug(DEBUG_TAG_SCHD, "[%s] scheduling continuation", plugin_name);
+    TSContScheduleOnPool(contp_1, 0, TS_THREAD_POOL_NET);
+    TSContThreadAffinityClear(contp_1);
+    TSContScheduleOnPool(contp_1, 200, TS_THREAD_POOL_NET);
+  }
+}
+
 static int
 TSContScheduleOnPool_handler_1(TSCont contp, TSEvent event, void *edata)
 {
@@ -141,6 +155,28 @@ TSContScheduleOnPool_handler_2(TSCont contp, TSEvent event, void *edata)
   return 0;
 }
 
+void
+TSContScheduleOnPool_test()
+{
+  contp_1 = TSContCreate(TSContScheduleOnPool_handler_1, TSMutexCreate());
+  contp_2 = TSContCreate(TSContScheduleOnPool_handler_2, TSMutexCreate());
+
+  if (contp_1 == nullptr || contp_2 == nullptr) {
+    TSDebug(DEBUG_TAG_SCHD, "[%s] could not create continuation", plugin_name);
+    abort();
+  } else {
+    TSDebug(DEBUG_TAG_SCHD, "[%s] scheduling continuation", plugin_name);
+
+    TSContScheduleOnPool(contp_1, 0, TS_THREAD_POOL_NET);
+    TSContThreadAffinityClear(contp_1);
+    TSContScheduleOnPool(contp_1, 100, TS_THREAD_POOL_NET);
+
+    TSContScheduleOnPool(contp_2, 200, TS_THREAD_POOL_TASK);
+    TSContThreadAffinityClear(contp_2);
+    TSContScheduleOnPool(contp_2, 300, TS_THREAD_POOL_TASK);
+  }
+}
+
 static int
 TSContScheduleOnThread_handler_1(TSCont contp, TSEvent event, void *edata)
 {
@@ -150,16 +186,9 @@ TSContScheduleOnThread_handler_1(TSCont contp, TSEvent event, void *edata)
   if (thread_1 == nullptr) {
     thread_1 = TSEventThreadSelf();
 
-    TSCont contp_new = TSContCreate(TSContScheduleOnThread_handler_2, TSMutexCreate());
-
-    if (contp_new == nullptr) {
-      TSDebug(DEBUG_TAG_HDL, "[%s] could not create continuation", plugin_name);
-      abort();
-    } else {
-      TSDebug(DEBUG_TAG_HDL, "[%s] scheduling continuation", plugin_name);
-      TSContScheduleOnThread(contp_new, 0, thread_1);
-      TSContScheduleOnThread(contp_new, 100, thread_1);
-    }
+    TSDebug(DEBUG_TAG_HDL, "[%s] scheduling continuation", plugin_name);
+    TSContScheduleOnThread(contp_2, 0, thread_1);
+    TSContScheduleOnThread(contp_2, 0, thread_1);
   } else if (thread_2 == nullptr) {
     TSDebug(DEBUG_TAG_CHK, "fail [schedule delay not applied]");
   } else {
@@ -186,6 +215,23 @@ TSContScheduleOnThread_handler_2(TSCont contp, TSEvent event, void *edata)
   return 0;
 }
 
+void
+TSContScheduleOnThread_test()
+{
+  contp_1 = TSContCreate(TSContScheduleOnThread_handler_1, TSMutexCreate());
+  contp_2 = TSContCreate(TSContScheduleOnThread_handler_2, TSMutexCreate());
+
+  if (contp_1 == nullptr || contp_2 == nullptr) {
+    TSDebug(DEBUG_TAG_SCHD, "[%s] could not create continuation", plugin_name);
+    abort();
+  } else {
+    TSDebug(DEBUG_TAG_SCHD, "[%s] scheduling continuation", plugin_name);
+    TSContScheduleOnPool(contp_1, 0, TS_THREAD_POOL_NET);
+    TSContThreadAffinityClear(contp_1);
+    TSContScheduleOnPool(contp_1, 200, TS_THREAD_POOL_NET);
+  }
+}
+
 static int
 TSContThreadAffinity_handler(TSCont contp, TSEvent event, void *edata)
 {
@@ -215,58 +261,6 @@ TSContThreadAffinity_handler(TSCont contp, TSEvent event, void *edata)
 }
 
 void
-TSContSchedule_test()
-{
-  TSCont contp = TSContCreate(TSContSchedule_handler_1, TSMutexCreate());
-
-  if (contp == nullptr) {
-    TSDebug(DEBUG_TAG_SCHD, "[%s] could not create continuation", plugin_name);
-    abort();
-  } else {
-    TSDebug(DEBUG_TAG_SCHD, "[%s] scheduling continuation", plugin_name);
-    TSContScheduleOnPool(contp, 0, TS_THREAD_POOL_NET);
-    TSContThreadAffinityClear(contp);
-    TSContScheduleOnPool(contp, 200, TS_THREAD_POOL_NET);
-  }
-}
-
-void
-TSContScheduleOnPool_test()
-{
-  TSCont contp_1 = TSContCreate(TSContScheduleOnPool_handler_1, TSMutexCreate());
-  TSCont contp_2 = TSContCreate(TSContScheduleOnPool_handler_2, TSMutexCreate());
-
-  if (contp_1 == nullptr || contp_2 == nullptr) {
-    TSDebug(DEBUG_TAG_SCHD, "[%s] could not create continuation", plugin_name);
-    abort();
-  } else {
-    TSDebug(DEBUG_TAG_SCHD, "[%s] scheduling continuation", plugin_name);
-    TSContScheduleOnPool(contp_1, 0, TS_THREAD_POOL_NET);
-    TSContThreadAffinityClear(contp_1);
-    TSContScheduleOnPool(contp_1, 100, TS_THREAD_POOL_NET);
-    TSContScheduleOnPool(contp_2, 200, TS_THREAD_POOL_TASK);
-    TSContThreadAffinityClear(contp_2);
-    TSContScheduleOnPool(contp_2, 300, TS_THREAD_POOL_TASK);
-  }
-}
-
-void
-TSContScheduleOnThread_test()
-{
-  TSCont contp = TSContCreate(TSContScheduleOnThread_handler_1, TSMutexCreate());
-
-  if (contp == nullptr) {
-    TSDebug(DEBUG_TAG_SCHD, "[%s] could not create continuation", plugin_name);
-    abort();
-  } else {
-    TSDebug(DEBUG_TAG_SCHD, "[%s] scheduling continuation", plugin_name);
-    TSContScheduleOnPool(contp, 0, TS_THREAD_POOL_NET);
-    TSContThreadAffinityClear(contp);
-    TSContScheduleOnPool(contp, 200, TS_THREAD_POOL_NET);
-  }
-}
-
-void
 TSContThreadAffinity_test()
 {
   TSCont contp = TSContCreate(TSContThreadAffinity_handler, TSMutexCreate());


Mime
View raw message