Merge lp:~patrick-crews/drizzle/qp-sysbench into lp:drizzle
- qp-sysbench
- Merge into 7.2
Proposed by
Patrick Crews
Status: | Merged |
---|---|
Approved by: | Brian Aker |
Approved revision: | 2574 |
Merged at revision: | 2568 |
Proposed branch: | lp:~patrick-crews/drizzle/qp-sysbench |
Merge into: | lp:drizzle |
Diff against target: |
335 lines (+76/-123) 7 files modified
tests/lib/server_mgmt/drizzled.py (+4/-2) tests/lib/server_mgmt/server_management.py (+0/-22) tests/lib/test_mgmt/test_execution.py (+1/-1) tests/lib/util/crashme_methods.py (+1/-1) tests/lib/util/sqlbench_methods.py (+1/-1) tests/lib/util/sysbench_methods.py (+25/-70) tests/qp_tests/sysbench/sysbench_readonly_test.py (+44/-26) |
To merge this branch: | bzr merge lp:~patrick-crews/drizzle/qp-sysbench |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Drizzle Trunk | Pending | ||
Review via email: mp+111520@code.launchpad.net |
Commit message
Description of the change
Fixes to the sysbench suite + some code cleanups.
Fixed an issue where test server paths weren't correct.
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'tests/lib/server_mgmt/drizzled.py' | |||
2 | --- tests/lib/server_mgmt/drizzled.py 2012-03-27 00:53:10 +0000 | |||
3 | +++ tests/lib/server_mgmt/drizzled.py 2012-06-21 23:35:23 +0000 | |||
4 | @@ -84,7 +84,7 @@ | |||
5 | 84 | } | 84 | } |
6 | 85 | } | 85 | } |
7 | 86 | } | 86 | } |
9 | 87 | self.workdir = self.system_manager.create_dirset( workdir_root | 87 | self.workdir = self.system_manager.create_dirset( self.test_executor.workdir |
10 | 88 | , self.dirset) | 88 | , self.dirset) |
11 | 89 | self.vardir = self.workdir | 89 | self.vardir = self.workdir |
12 | 90 | self.tmpdir = os.path.join(self.vardir,'tmp') | 90 | self.tmpdir = os.path.join(self.vardir,'tmp') |
13 | @@ -147,7 +147,9 @@ | |||
14 | 147 | 147 | ||
15 | 148 | """ | 148 | """ |
16 | 149 | 149 | ||
18 | 150 | server_args = [ self.process_server_options() | 150 | server_args = [ "--no-defaults" |
19 | 151 | , "--server-id=%d" %(int(self.name.split('s')[1])+1) | ||
20 | 152 | , self.process_server_options() | ||
21 | 151 | , "--mysql-protocol.port=%d" %(self.master_port) | 153 | , "--mysql-protocol.port=%d" %(self.master_port) |
22 | 152 | , "--mysql-protocol.connect-timeout=60" | 154 | , "--mysql-protocol.connect-timeout=60" |
23 | 153 | , "--innodb.data-file-path=ibdata1:20M:autoextend" | 155 | , "--innodb.data-file-path=ibdata1:20M:autoextend" |
24 | 154 | 156 | ||
25 | === modified file 'tests/lib/server_mgmt/server_management.py' | |||
26 | --- tests/lib/server_mgmt/server_management.py 2012-02-09 02:28:38 +0000 | |||
27 | +++ tests/lib/server_mgmt/server_management.py 2012-06-21 23:35:23 +0000 | |||
28 | @@ -64,28 +64,6 @@ | |||
29 | 64 | self.libeatmydata_path = variables['libeatmydatapath'] | 64 | self.libeatmydata_path = variables['libeatmydatapath'] |
30 | 65 | 65 | ||
31 | 66 | self.logging.info("Using default-storage-engine: %s" %(self.default_storage_engine)) | 66 | self.logging.info("Using default-storage-engine: %s" %(self.default_storage_engine)) |
32 | 67 | test_server = self.allocate_server( 'test_bot' | ||
33 | 68 | , None | ||
34 | 69 | , [] | ||
35 | 70 | , self.system_manager.workdir | ||
36 | 71 | ) | ||
37 | 72 | self.logging.info("Testing for Innodb / Xtradb version...") | ||
38 | 73 | test_server.start(working_environ=os.environ) | ||
39 | 74 | try: | ||
40 | 75 | innodb_ver, xtradb_ver = test_server.get_engine_info() | ||
41 | 76 | self.logging.info("Innodb version: %s" %innodb_ver) | ||
42 | 77 | self.logging.info("Xtradb version: %s" %xtradb_ver) | ||
43 | 78 | except Exception, e: | ||
44 | 79 | self.logging.error("Problem detecting innodb/xtradb version:") | ||
45 | 80 | self.logging.error(Exception) | ||
46 | 81 | self.logging.error(e) | ||
47 | 82 | self.logging.error("Dumping server error.log...") | ||
48 | 83 | test_server.dump_errlog() | ||
49 | 84 | test_server.stop() | ||
50 | 85 | test_server.cleanup() | ||
51 | 86 | shutil.rmtree(test_server.workdir) | ||
52 | 87 | del(test_server) | ||
53 | 88 | |||
54 | 89 | self.logging.debug_class(self) | 67 | self.logging.debug_class(self) |
55 | 90 | 68 | ||
56 | 91 | def request_servers( self | 69 | def request_servers( self |
57 | 92 | 70 | ||
58 | === modified file 'tests/lib/test_mgmt/test_execution.py' | |||
59 | --- tests/lib/test_mgmt/test_execution.py 2012-05-25 18:41:11 +0000 | |||
60 | +++ tests/lib/test_mgmt/test_execution.py 2012-06-21 23:35:23 +0000 | |||
61 | @@ -296,7 +296,7 @@ | |||
62 | 296 | #, 'MYSQL_BASEDIR' : self.system_manager.code_manager.code_trees['mysql'][0].basedir | 296 | #, 'MYSQL_BASEDIR' : self.system_manager.code_manager.code_trees['mysql'][0].basedir |
63 | 297 | , 'MYSQL_TEST_WORKDIR' : self.system_manager.workdir | 297 | , 'MYSQL_TEST_WORKDIR' : self.system_manager.workdir |
64 | 298 | , 'SQLBENCH_DIR' : os.path.join( self.system_manager.testdir | 298 | , 'SQLBENCH_DIR' : os.path.join( self.system_manager.testdir |
66 | 299 | , 'sql-bench') | 299 | , 'test_tools/sql-bench') |
67 | 300 | } | 300 | } |
68 | 301 | 301 | ||
69 | 302 | self.working_environment = self.system_manager.env_manager.create_working_environment(env_reqs) | 302 | self.working_environment = self.system_manager.env_manager.create_working_environment(env_reqs) |
70 | 303 | 303 | ||
71 | === modified file 'tests/lib/util/crashme_methods.py' | |||
72 | --- tests/lib/util/crashme_methods.py 2011-10-20 02:44:33 +0000 | |||
73 | +++ tests/lib/util/crashme_methods.py 2012-06-21 23:35:23 +0000 | |||
74 | @@ -70,7 +70,7 @@ | |||
75 | 70 | 70 | ||
76 | 71 | crashme_subproc = subprocess.Popen( crashme_cmd | 71 | crashme_subproc = subprocess.Popen( crashme_cmd |
77 | 72 | , shell=True | 72 | , shell=True |
79 | 73 | , cwd=os.path.join(bot.system_manager.testdir, 'sql-bench') | 73 | , cwd=os.path.join(bot.system_manager.testdir, 'test_tools/sql-bench') |
80 | 74 | , env=bot.working_environment | 74 | , env=bot.working_environment |
81 | 75 | , stdout = crashme_output | 75 | , stdout = crashme_output |
82 | 76 | , stderr = subprocess.STDOUT | 76 | , stderr = subprocess.STDOUT |
83 | 77 | 77 | ||
84 | === modified file 'tests/lib/util/sqlbench_methods.py' | |||
85 | --- tests/lib/util/sqlbench_methods.py 2011-10-20 02:44:33 +0000 | |||
86 | +++ tests/lib/util/sqlbench_methods.py 2012-06-21 23:35:23 +0000 | |||
87 | @@ -35,7 +35,7 @@ | |||
88 | 35 | bot.logging.info("This may take some time...") | 35 | bot.logging.info("This may take some time...") |
89 | 36 | sqlbench_subproc = subprocess.Popen( test_cmd | 36 | sqlbench_subproc = subprocess.Popen( test_cmd |
90 | 37 | , shell=True | 37 | , shell=True |
92 | 38 | , cwd=os.path.join(bot.system_manager.testdir, 'sql-bench') | 38 | , cwd=os.path.join(bot.system_manager.testdir, 'test_tools/sql-bench') |
93 | 39 | , env=bot.working_environment | 39 | , env=bot.working_environment |
94 | 40 | , stdout = sqlbench_output | 40 | , stdout = sqlbench_output |
95 | 41 | , stderr = subprocess.STDOUT | 41 | , stderr = subprocess.STDOUT |
96 | 42 | 42 | ||
97 | === modified file 'tests/lib/util/sysbench_methods.py' | |||
98 | --- tests/lib/util/sysbench_methods.py 2012-05-29 21:27:11 +0000 | |||
99 | +++ tests/lib/util/sysbench_methods.py 2012-06-21 23:35:23 +0000 | |||
100 | @@ -20,80 +20,35 @@ | |||
101 | 20 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA | 20 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA |
102 | 21 | 21 | ||
103 | 22 | import os | 22 | import os |
104 | 23 | import re | ||
105 | 23 | import subprocess | 24 | import subprocess |
106 | 24 | 25 | ||
110 | 25 | def execute_sqlbench(test_cmd, test_executor, servers): | 26 | def prepare_sysbench(test_executor, test_cmd): |
111 | 26 | """ Execute the commandline and return the result. | 27 | """ Prepare the server for a sysbench run |
109 | 27 | We use subprocess as we can pass os.environ dicts and whatnot | ||
112 | 28 | 28 | ||
113 | 29 | """ | 29 | """ |
114 | 30 | |||
115 | 31 | bot = test_executor | 30 | bot = test_executor |
125 | 32 | sqlbench_outfile = os.path.join(bot.logdir,'sqlbench.out') | 31 | sysbench_outfile = os.path.join(bot.logdir,'sysbench.out') |
126 | 33 | sqlbench_output = open(sqlbench_outfile,'w') | 32 | sysbench_output = open(sysbench_outfile,'w') |
127 | 34 | bot.logging.info("Executing sqlbench: %s" %(test_cmd)) | 33 | sysbench_prep_cmd = ' '.join([test_cmd,'prepare']) |
128 | 35 | bot.logging.info("This may take some time...") | 34 | bot.logging.info("Preparing database for sysbench run...") |
129 | 36 | sqlbench_subproc = subprocess.Popen( test_cmd | 35 | bot.logging.verbose(sysbench_prep_cmd) |
130 | 37 | , shell=True | 36 | sysbench_subproc = subprocess.Popen( sysbench_prep_cmd |
131 | 38 | , cwd=os.path.join(bot.system_manager.testdir, 'sql-bench') | 37 | , shell = True |
132 | 39 | , env=bot.working_environment | 38 | , env = bot.working_environment |
133 | 40 | , stdout = sqlbench_output | 39 | , stdout = sysbench_output |
134 | 41 | , stderr = subprocess.STDOUT | 40 | , stderr = subprocess.STDOUT |
135 | 42 | ) | 41 | ) |
190 | 43 | sqlbench_subproc.wait() | 42 | sysbench_subproc.wait() |
191 | 44 | retcode = sqlbench_subproc.returncode | 43 | retcode = sysbench_subproc.returncode |
192 | 45 | 44 | sysbench_output.close() | |
193 | 46 | sqlbench_output.close() | 45 | with open(sysbench_outfile,'r') as sysbench_file: |
194 | 47 | sqlbench_file = open(sqlbench_outfile,'r') | 46 | output = ''.join(sysbench_file.readlines()) |
195 | 48 | output = ''.join(sqlbench_file.readlines()) | 47 | sysbench_file.close() |
196 | 49 | sqlbench_file.close() | 48 | bot.logging.verbose("sysbench_retcode: %d" %(retcode)) |
197 | 50 | 49 | return retcode, output | |
198 | 51 | bot.current_test_retcode = retcode | 50 | |
199 | 52 | bot.current_test_output = output | 51 | def execute_sysbench(test_executor, test_cmd): |
146 | 53 | test_status = process_sqlbench_output(bot) | ||
147 | 54 | return test_status, retcode, output | ||
148 | 55 | |||
149 | 56 | def process_sqlbench_output(bot): | ||
150 | 57 | |||
151 | 58 | # Check for 'Failed' in sql-bench output | ||
152 | 59 | # The tests don't die on a failed test and | ||
153 | 60 | # require some checking of the output file | ||
154 | 61 | error_flag = False | ||
155 | 62 | for inline in bot.current_test_output: | ||
156 | 63 | if 'Failed' in inline: | ||
157 | 64 | error_flag= True | ||
158 | 65 | logging.info(inline.strip()) | ||
159 | 66 | if bot.current_test_retcode == 0 and not error_flag: | ||
160 | 67 | return 'pass' | ||
161 | 68 | else: | ||
162 | 69 | return 'fail' | ||
163 | 70 | |||
164 | 71 | def prepare_sysbench(test_executor, test_cmd): | ||
165 | 72 | """ Prepare the server for a sysbench run | ||
166 | 73 | |||
167 | 74 | """ | ||
168 | 75 | bot = test_executor | ||
169 | 76 | sysbench_outfile = os.path.join(bot.logdir,'sysbench.out') | ||
170 | 77 | sysbench_output = open(sysbench_outfile,'w') | ||
171 | 78 | sysbench_prep_cmd = ' '.join([test_cmd,'prepare']) | ||
172 | 79 | bot.logging.info("Preparing database for sysbench run...") | ||
173 | 80 | bot.logging.verbose(sysbench_prep_cmd) | ||
174 | 81 | sysbench_subproc = subprocess.Popen( sysbench_prep_cmd | ||
175 | 82 | , shell = True | ||
176 | 83 | , env = bot.working_environment | ||
177 | 84 | , stdout = sysbench_output | ||
178 | 85 | , stderr = subprocess.STDOUT | ||
179 | 86 | ) | ||
180 | 87 | sysbench_subproc.wait() | ||
181 | 88 | retcode = sysbench_subproc.returncode | ||
182 | 89 | sysbench_output.close() | ||
183 | 90 | with open(sysbench_outfile,'r') as sysbench_file: | ||
184 | 91 | output = ''.join(sysbench_file.readlines()) | ||
185 | 92 | sysbench_file.close() | ||
186 | 93 | bot.logging.verbose("sysbench_retcode: %d" %(retcode)) | ||
187 | 94 | return retcode, output | ||
188 | 95 | |||
189 | 96 | def execute_sysbench(test_executor, test_cmd): | ||
200 | 97 | """ Execute the commandline and return the result. | 52 | """ Execute the commandline and return the result. |
201 | 98 | We use subprocess as we can pass os.environ dicts and whatnot | 53 | We use subprocess as we can pass os.environ dicts and whatnot |
202 | 99 | 54 | ||
203 | @@ -101,7 +56,7 @@ | |||
204 | 101 | 56 | ||
205 | 102 | bot = test_executor | 57 | bot = test_executor |
206 | 103 | sysbench_cmd = ' '.join([test_cmd, 'run']) | 58 | sysbench_cmd = ' '.join([test_cmd, 'run']) |
208 | 104 | self.logging.info("Executing sysbench: %s" %(sysbench_cmd)) | 59 | bot.logging.info("Executing sysbench: %s" %(sysbench_cmd)) |
209 | 105 | sysbench_outfile = os.path.join(bot.logdir,'sysbench.out') | 60 | sysbench_outfile = os.path.join(bot.logdir,'sysbench.out') |
210 | 106 | with open(sysbench_outfile,'w') as sysbench_output: | 61 | with open(sysbench_outfile,'w') as sysbench_output: |
211 | 107 | sysbench_subproc = subprocess.Popen( sysbench_cmd | 62 | sysbench_subproc = subprocess.Popen( sysbench_cmd |
212 | @@ -116,11 +71,11 @@ | |||
213 | 116 | 71 | ||
214 | 117 | sysbench_file = open(sysbench_outfile,'r') | 72 | sysbench_file = open(sysbench_outfile,'r') |
215 | 118 | output = ''.join(sysbench_file.readlines()) | 73 | output = ''.join(sysbench_file.readlines()) |
217 | 119 | self.logging.debug(output) | 74 | bot.logging.debug(output) |
218 | 120 | sysbench_file.close() | 75 | sysbench_file.close() |
219 | 121 | return retcode, output | 76 | return retcode, output |
220 | 122 | 77 | ||
222 | 123 | def process_sysbench_output(test_output): | 78 | def process_sysbench_output(test_output): |
223 | 124 | """ sysbench has run, we now check out what we have | 79 | """ sysbench has run, we now check out what we have |
224 | 125 | We also output the data from the run | 80 | We also output the data from the run |
225 | 126 | 81 | ||
226 | 127 | 82 | ||
227 | === modified file 'tests/qp_tests/sysbench/sysbench_readonly_test.py' | |||
228 | --- tests/qp_tests/sysbench/sysbench_readonly_test.py 2012-05-29 21:27:11 +0000 | |||
229 | +++ tests/qp_tests/sysbench/sysbench_readonly_test.py 2012-06-21 23:35:23 +0000 | |||
230 | @@ -21,19 +21,24 @@ | |||
231 | 21 | 21 | ||
232 | 22 | import unittest | 22 | import unittest |
233 | 23 | import subprocess | 23 | import subprocess |
234 | 24 | import time | ||
235 | 24 | 25 | ||
236 | 25 | from lib.util.sysbench_methods import prepare_sysbench | 26 | from lib.util.sysbench_methods import prepare_sysbench |
237 | 26 | from lib.util.sysbench_methods import execute_sysbench | 27 | from lib.util.sysbench_methods import execute_sysbench |
238 | 28 | from lib.util.sysbench_methods import process_sysbench_output | ||
239 | 27 | from lib.util.mysqlBaseTestCase import mysqlBaseTestCase | 29 | from lib.util.mysqlBaseTestCase import mysqlBaseTestCase |
240 | 28 | 30 | ||
242 | 29 | server_requirements = [['innodb.buffer-pool-size=256M innodb.log-file-size=64M innodb.log-buffer-size=8M innodb.thread-concurrency=0 innodb.additional-mem-pool-size=16M table-open-cache=4096 table-definition-cache=4096 mysql-protocol.max-connections=2048']] | 31 | # TODO: make server_options vary depending on the type of server being used here |
243 | 32 | # drizzle options | ||
244 | 33 | #server_requirements = [['innodb.buffer-pool-size=256M innodb.log-file-size=64M innodb.log-buffer-size=8M innodb.thread-concurrency=0 innodb.additional-mem-pool-size=16M table-open-cache=4096 table-definition-cache=4096 mysql-protocol.max-connections=2048']] | ||
245 | 34 | # mysql options | ||
246 | 35 | #server_requirements = [['innodb_buffer_pool_size=256M innodb_log_file_size=64M innodb_log_buffer_size=8M innodb_thread_concurrency=0 innodb_additional_mem_pool_size=16M table_open_cache=4096 table_definition_cache=4096 max_connections=2048']] | ||
247 | 36 | server_requirements = [[]] | ||
248 | 30 | servers = [] | 37 | servers = [] |
249 | 31 | server_manager = None | 38 | server_manager = None |
250 | 32 | test_executor = None | 39 | test_executor = None |
251 | 33 | 40 | ||
252 | 34 | class basicTest(mysqlBaseTestCase): | 41 | class basicTest(mysqlBaseTestCase): |
253 | 35 | def setUp(self): | ||
254 | 36 | retcode, output = sysbench_methods.prepare_sysbench(test_executor, test_cmd) | ||
255 | 37 | 42 | ||
256 | 38 | def test_sysbench_readonly(self): | 43 | def test_sysbench_readonly(self): |
257 | 39 | self.logging = test_executor.logging | 44 | self.logging = test_executor.logging |
258 | @@ -44,41 +49,54 @@ | |||
259 | 44 | , "--max-requests=0" | 49 | , "--max-requests=0" |
260 | 45 | , "--test=oltp" | 50 | , "--test=oltp" |
261 | 46 | , "--db-ps-mode=disable" | 51 | , "--db-ps-mode=disable" |
263 | 47 | , "--drizzle-table-engine=innodb" | 52 | , "--%s-table-engine=innodb" %master_server.type |
264 | 48 | , "--oltp-read-only=on" | 53 | , "--oltp-read-only=on" |
265 | 49 | , "--oltp-table-size=1000000" | 54 | , "--oltp-table-size=1000000" |
272 | 50 | , "--drizzle-mysql=on" | 55 | , "--%s-user=root" %master_server.type |
273 | 51 | , "--drizzle-user=root" | 56 | , "--%s-db=test" %master_server.type |
274 | 52 | , "--drizzle-db=test" | 57 | , "--%s-port=%d" %(master_server.type, master_server.master_port) |
275 | 53 | , "--drizzle-port=$MASTER_MYPORT" | 58 | , "--%s-host=localhost" %master_server.type |
276 | 54 | , "--drizzle-host=localhost" | 59 | , "--db-driver=%s" %master_server.type |
271 | 55 | , "--db-driver=drizzle" | ||
277 | 56 | ] | 60 | ] |
279 | 57 | 61 | ||
280 | 62 | if master_server.type == 'drizzle': | ||
281 | 63 | test_cmd.append("--drizzle-mysql=on") | ||
282 | 64 | if master_server.type == 'mysql': | ||
283 | 65 | test_cmd.append("--mysql-socket=%s" %master_server.socket_file) | ||
284 | 66 | |||
285 | 67 | # We sleep for a minute to wait | ||
286 | 68 | time.sleep(10) | ||
287 | 58 | # how many times to run sysbench at each concurrency | 69 | # how many times to run sysbench at each concurrency |
288 | 59 | iterations = 1 | 70 | iterations = 1 |
289 | 60 | 71 | ||
290 | 61 | # various concurrencies to use with sysbench | 72 | # various concurrencies to use with sysbench |
292 | 62 | concurrencies = [16, 32, 64, 128, 256, 512, 1024] | 73 | #concurrencies = [16, 32, 64, 128, 256, 512, 1024] |
293 | 74 | concurrencies = [1, 4, 8 ] | ||
294 | 63 | 75 | ||
295 | 64 | # start the test! | 76 | # start the test! |
296 | 65 | for concurrency in concurrencies: | 77 | for concurrency in concurrencies: |
297 | 78 | self.logging.info("Resetting test server...") | ||
298 | 79 | for query in ["DROP SCHEMA IF EXISTS test" | ||
299 | 80 | ,"CREATE SCHEMA test" | ||
300 | 81 | ]: | ||
301 | 82 | retcode, result = self.execute_query(query, master_server, schema="INFORMATION_SCHEMA") | ||
302 | 66 | test_cmd.append("--num-threads=%d" %concurrency) | 83 | test_cmd.append("--num-threads=%d" %concurrency) |
314 | 67 | # we setup once per concurrency, copying drizzle-automation | 84 | # we setup once per concurrency, copying drizzle-automation |
315 | 68 | # this should likely change and if not for readonly, then definitely | 85 | # this should likely change and if not for readonly, then definitely |
316 | 69 | # for readwrite | 86 | # for readwrite |
317 | 70 | retcode, output = sysbench_methods.prepare_sysbench(test_executor, test_cmd) | 87 | |
318 | 71 | self.assertEqual(retcode, 0, msg = ("sysbench 'prepare' phase failed.\n" | 88 | exec_cmd = " ".join(test_cmd) |
319 | 72 | "retcode: %d" | 89 | retcode, output = prepare_sysbench(test_executor, exec_cmd) |
320 | 73 | "output: %s" | 90 | err_msg = ("sysbench 'prepare' phase failed.\n" |
321 | 74 | %(retcode, output)) | 91 | "retcode: %d" |
322 | 75 | for iteration in iterations: | 92 | "output: %s" %(retcode,output)) |
323 | 76 | test_cmd = " ".join(test_cmd) | 93 | self.assertEqual(retcode, 0, msg = err_msg) |
324 | 77 | retcode, output = execute_sysbench(test_cmd, test_executor, servers) | 94 | |
325 | 95 | for test_iteration in range(iterations): | ||
326 | 96 | retcode, output = execute_sysbench(test_executor, exec_cmd) | ||
327 | 78 | self.assertEqual(retcode, 0, msg = output) | 97 | self.assertEqual(retcode, 0, msg = output) |
331 | 79 | parsed_output = sysbench_methods.process_output(output) | 98 | parsed_output = process_sysbench_output(output) |
332 | 80 | for line in parsed_output: | 99 | self.logging.info(parsed_output) |
330 | 81 | self.logging.info(line) | ||
333 | 82 | 100 | ||
334 | 83 | def tearDown(self): | 101 | def tearDown(self): |
335 | 84 | server_manager.reset_servers(test_executor.name) | 102 | server_manager.reset_servers(test_executor.name) |