Merge lp:~facundo/ubuntuone-client/stable--query-no-content-file into lp:ubuntuone-client/stable-1-2

Proposed by Facundo Batista
Status: Merged
Approved by: Facundo Batista
Approved revision: 511
Merged at revision: 512
Proposed branch: lp:~facundo/ubuntuone-client/stable--query-no-content-file
Merge into: lp:ubuntuone-client/stable-1-2
Diff against target: 210 lines (+93/-20)
3 files modified
tests/syncdaemon/test_sync.py (+74/-13)
ubuntuone/syncdaemon/sync.py (+16/-4)
ubuntuone/syncdaemon/u1fsfsm.py (+3/-3)
To merge this branch: bzr merge lp:~facundo/ubuntuone-client/stable--query-no-content-file
Reviewer Review Type Date Requested Status
Guillermo Gonzalez Approve
Tim Cole (community) Approve
Review via email: mp+25005@code.launchpad.net

Commit message

Queue an upload again on SV_HASH_NEW if being LOCAL with hashes ok.

Description of the change

Queue an upload again on SV_HASH_NEW if being LOCAL with hashes ok.

Refactored the tests a little to include more cases.

This is a backport from trunk.

To post a comment you must log in.
Revision history for this message
Tim Cole (tcole) :
review: Approve
Revision history for this message
Guillermo Gonzalez (verterok) wrote :

Looks good! tests pass.

review: Approve
Revision history for this message
dobey (dobey) wrote :

Attempt to merge lp:~facundo/ubuntuone-client/stable--query-no-content-file into lp:ubuntuone-client/stable-1-2 failed due to merge conflicts:

text conflict in tests/syncdaemon/test_sync.py

511. By Facundo Batista

Merged stable-1-2 in

Revision history for this message
Facundo Batista (facundo) wrote :

Merged branch back

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'tests/syncdaemon/test_sync.py'
--- tests/syncdaemon/test_sync.py 2010-05-10 19:15:09 +0000
+++ tests/syncdaemon/test_sync.py 2010-05-26 18:06:30 +0000
@@ -30,7 +30,7 @@
30from contrib.testing.testcase import (30from contrib.testing.testcase import (
31 FakeVolumeManager,31 FakeVolumeManager,
32 BaseTwistedTestCase,32 BaseTwistedTestCase,
33 MementoHandler, DummyClass33 MementoHandler
34)34)
3535
36from contrib.testing import testcase36from contrib.testing import testcase
@@ -182,8 +182,8 @@
182 self.fsm.get_by_path(path).server_hash)182 self.fsm.get_by_path(path).server_hash)
183183
184184
185class TestSync(BaseTwistedTestCase):185class BaseSync(BaseTwistedTestCase):
186 """Test for Sync."""186 """Base test infrastructure for Sync."""
187187
188 def setUp(self):188 def setUp(self):
189 """Init."""189 """Init."""
@@ -220,6 +220,10 @@
220 raise exc_info[0], exc_info[1], exc_info[2]220 raise exc_info[0], exc_info[1], exc_info[2]
221 BaseTwistedTestCase.tearDown(self)221 BaseTwistedTestCase.tearDown(self)
222222
223
224class TestSync(BaseSync):
225 """Test for Sync."""
226
223 def test_deleting_open_files_is_no_cause_for_despair(self):227 def test_deleting_open_files_is_no_cause_for_despair(self):
224 """test_deleting_open_files_is_no_cause_for_despair."""228 """test_deleting_open_files_is_no_cause_for_despair."""
225 def cb(_):229 def cb(_):
@@ -258,8 +262,8 @@
258 def faked_nothing(ssmr, event, params, *args):262 def faked_nothing(ssmr, event, params, *args):
259 """Wrap SSMR.nothing to test."""263 """Wrap SSMR.nothing to test."""
260 self.called = True264 self.called = True
265 self.patch(SyncStateMachineRunner, 'nothing', faked_nothing)
261266
262 SyncStateMachineRunner.nothing = faked_nothing
263 kwargs = dict(share_id='share_id', node_id='node_id')267 kwargs = dict(share_id='share_id', node_id='node_id')
264 sync.handle_AQ_DOWNLOAD_DOES_NOT_EXIST(**kwargs)268 sync.handle_AQ_DOWNLOAD_DOES_NOT_EXIST(**kwargs)
265 self.assertTrue(self.called, 'nothing was called')269 self.assertTrue(self.called, 'nothing was called')
@@ -272,7 +276,7 @@
272 def faked_nothing(ssmr, event, params, *args):276 def faked_nothing(ssmr, event, params, *args):
273 """Wrap SSMR.nothing to test."""277 """Wrap SSMR.nothing to test."""
274 self.called = True278 self.called = True
275 SyncStateMachineRunner.nothing = faked_nothing279 self.patch(SyncStateMachineRunner, 'nothing', faked_nothing)
276280
277 # create a file and put it in local281 # create a file and put it in local
278 fsm = self.main.fs282 fsm = self.main.fs
@@ -284,28 +288,85 @@
284 sync.handle_FS_FILE_CREATE(somepath)288 sync.handle_FS_FILE_CREATE(somepath)
285 self.assertTrue(self.called)289 self.assertTrue(self.called)
286290
287291 def test_SV_HASH_NEW_with_file_uploadinterrupted(self):
288class SyncStateMachineRunnerTestCase(BaseTwistedTestCase):292 """A SV_HASH_NEW is received after upload interrupted."""
293 sync = Sync(main=self.main)
294 self.called = False
295
296 def fake_meth(_, event, params, hash):
297 """Wrap SSMR.reput_file_from_local to test."""
298 self.assertEqual(event, 'SV_HASH_NEW')
299 self.assertEqual(hash, '')
300 self.called = True
301 self.patch(SyncStateMachineRunner, 'reput_file_from_local', fake_meth)
302
303 # create a file and put it in local, without server_hash, as
304 # if the upload was cut in the middle after the make file
305 fsm = self.main.fs
306 somepath = os.path.join(self.root, 'somepath')
307 mdid = fsm.create(somepath, '', node_id='node_id')
308 fsm.set_by_mdid(mdid, local_hash='somehash', crc32='crc32',
309 stat='stat', size='size')
310
311 # send the event and check
312 mdobj = fsm.get_by_mdid(mdid)
313 sync.handle_SV_HASH_NEW(mdobj.share_id, mdobj.node_id, '') # no content
314 self.assertTrue(self.called)
315
316
317class SyncStateMachineRunnerTestCase(BaseSync):
289 """Tests for the SyncStateMachineRunner."""318 """Tests for the SyncStateMachineRunner."""
290319
291 def setUp(self):320 def setUp(self):
292 """Init."""321 """Init."""
293 BaseTwistedTestCase.setUp(self)322 BaseSync.setUp(self)
294 self.ssmr = SyncStateMachineRunner(fsm=None, main=None,323
295 key=DummyClass(), logger=None)324 # create a file
325 self.fsm = self.main.fs
326 somepath = os.path.join(self.root, 'somepath')
327 self.mdid = self.fsm.create(somepath, '', node_id='node_id')
328
329 key = FSKey(self.main.fs, share_id='', node_id='node_id')
330 self.ssmr = SyncStateMachineRunner(fsm=self.main.fs, main=self.main,
331 key=key, logger=None)
296332
297 def tearDown(self):333 def tearDown(self):
298 """Clean up."""334 """Clean up."""
299 self.ssmr = None335 self.ssmr = None
300 BaseTwistedTestCase.tearDown(self)336 BaseSync.tearDown(self)
301337
302 def test_delete_file(self):338 def test_delete_file_without_hash(self):
303 """delete_file can be called with or without the server hash."""339 """Delete_file can be called without the server hash."""
304 self.ssmr.delete_file(event='AQ_DOWNLOAD_ERROR', params=None)340 self.ssmr.delete_file(event='AQ_DOWNLOAD_ERROR', params=None)
305341
342 def test_delete_file_with_hash(self):
343 """Delete_file can be called with the server hash."""
306 self.ssmr.delete_file(event='AQ_DOWNLOAD_ERROR', params=None,344 self.ssmr.delete_file(event='AQ_DOWNLOAD_ERROR', params=None,
307 server_hash='')345 server_hash='')
308346
347 def test_put_file_stores_info(self):
348 """The put_file method should store the info in FSM."""
349 self.ssmr.put_file('HQ_HASH_NEW', None, 'hash', 'crc', 'size', 'stt')
350
351 # check the info is stored
352 mdobj = self.fsm.get_by_mdid(self.mdid)
353 self.assertEqual(mdobj.local_hash, 'hash')
354 self.assertEqual(mdobj.crc32, 'crc')
355 self.assertEqual(mdobj.size, 'size')
356 self.assertEqual(mdobj.stat, 'stt')
357
358 def test_reput_file_stores_info(self):
359 """The reput_file method should store the info in FSM."""
360 self.ssmr.reput_file('HQ_HASH_NEW', None, 'hash', 'crc', 'size', 'stt')
361
362 # check the info is stored
363 mdobj = self.fsm.get_by_mdid(self.mdid)
364 self.assertEqual(mdobj.local_hash, 'hash')
365 self.assertEqual(mdobj.crc32, 'crc')
366 self.assertEqual(mdobj.size, 'size')
367 self.assertEqual(mdobj.stat, 'stt')
368
369
309class FakedState(object):370class FakedState(object):
310 """A faked state."""371 """A faked state."""
311372
312373
=== modified file 'ubuntuone/syncdaemon/sync.py'
--- ubuntuone/syncdaemon/sync.py 2010-04-08 14:39:27 +0000
+++ ubuntuone/syncdaemon/sync.py 2010-05-26 18:06:30 +0000
@@ -669,10 +669,22 @@
669 """Starts the scan again on a dir."""669 """Starts the scan again on a dir."""
670 self.m.lr.scan_dir(self.key['mdid'], self.key['path'], udfmode)670 self.m.lr.scan_dir(self.key['mdid'], self.key['path'], udfmode)
671671
672 def reput_file_from_local(self, event, params, hash):
673 """Re put the file from its local state."""
674 local_hash = self.key['local_hash']
675 previous_hash = self.key['server_hash']
676 crc32 = self.key['crc32']
677 size = self.key['size']
678 share_id = self.key['share_id']
679 node_id = self.key['node_id']
680
681 self.m.action_q.upload(share_id, node_id, previous_hash, local_hash,
682 crc32, size, self.key.open_file)
683
672 def put_file(self, event, params, hash, crc32, size, stat):684 def put_file(self, event, params, hash, crc32, size, stat):
673 """upload the file to the server."""685 """Upload the file to the server."""
674 previous_hash = self.key['server_hash']686 previous_hash = self.key['server_hash']
675 self.key.set(local_hash=hash, stat=stat)687 self.key.set(local_hash=hash, stat=stat, crc32=crc32, size=size)
676 self.key.sync()688 self.key.sync()
677689
678 self.m.action_q.upload(share_id=self.key['share_id'],690 self.m.action_q.upload(share_id=self.key['share_id'],
@@ -699,12 +711,12 @@
699711
700712
701 def reput_file(self, event, param, hash, crc32, size, stat):713 def reput_file(self, event, param, hash, crc32, size, stat):
702 """put the file again."""714 """Put the file again."""
703 self.m.action_q.cancel_upload(share_id=self.key['share_id'],715 self.m.action_q.cancel_upload(share_id=self.key['share_id'],
704 node_id=self.key['node_id'])716 node_id=self.key['node_id'])
705 previous_hash = self.key['server_hash']717 previous_hash = self.key['server_hash']
706718
707 self.key.set(local_hash=hash, stat=stat)719 self.key.set(local_hash=hash, stat=stat, crc32=crc32, size=size)
708 self.key.sync()720 self.key.sync()
709 self.m.action_q.upload(share_id=self.key['share_id'],721 self.m.action_q.upload(share_id=self.key['share_id'],
710 node_id=self.key['node_id'], previous_hash=previous_hash,722 node_id=self.key['node_id'], previous_hash=previous_hash,
711723
=== modified file 'ubuntuone/syncdaemon/u1fsfsm.ods'
712Binary files ubuntuone/syncdaemon/u1fsfsm.ods 2010-04-23 15:23:09 +0000 and ubuntuone/syncdaemon/u1fsfsm.ods 2010-05-26 18:06:30 +0000 differ724Binary files ubuntuone/syncdaemon/u1fsfsm.ods 2010-04-23 15:23:09 +0000 and ubuntuone/syncdaemon/u1fsfsm.ods 2010-05-26 18:06:30 +0000 differ
=== modified file 'ubuntuone/syncdaemon/u1fsfsm.py'
--- ubuntuone/syncdaemon/u1fsfsm.py 2010-04-23 15:23:09 +0000
+++ ubuntuone/syncdaemon/u1fsfsm.py 2010-05-26 18:06:30 +0000
@@ -2445,9 +2445,9 @@
2445 'STATE_OUT': {u'changed': u'*',2445 'STATE_OUT': {u'changed': u'*',
2446 u'has_metadata': u'*',2446 u'has_metadata': u'*',
2447 u'is_directory': u'*'}},2447 u'is_directory': u'*'}},
2448 {'ACTION': u'CONFLICT',2448 {'ACTION': u'aq.upload()',
2449 'ACTION_FUNC': u'nothing',2449 'ACTION_FUNC': u'reput_file_from_local',
2450 'COMMENTS': u'',2450 'COMMENTS': u'The upload was interrupted, just try it again. ',
2451 'PARAMETERS': {u'hash_eq_local_hash': u'F',2451 'PARAMETERS': {u'hash_eq_local_hash': u'F',
2452 u'hash_eq_server_hash': u'T',2452 u'hash_eq_server_hash': u'T',
2453 u'not_authorized': u'NA',2453 u'not_authorized': u'NA',

Subscribers

People subscribed via source and target branches