Merge lp:~dobey/ubuntuone-client/better-noconnect into lp:ubuntuone-client

Proposed by dobey
Status: Merged
Approved by: dobey
Approved revision: 1408
Merged at revision: 1405
Proposed branch: lp:~dobey/ubuntuone-client/better-noconnect
Merge into: lp:ubuntuone-client
Diff against target: 139 lines (+51/-53)
2 files modified
ubuntuone/syncdaemon/interaction_interfaces.py (+6/-0)
ubuntuone/syncdaemon/main.py (+45/-53)
To merge this branch: bzr merge lp:~dobey/ubuntuone-client/better-noconnect
Reviewer Review Type Date Requested Status
Facundo Batista (community) Approve
Review via email: mp+220698@code.launchpad.net

Commit message

Move the date check into the connect method.

Description of the change

This reverts the changes to main.py from the previous revision, and moves the date check to the connect() method. The previous change does not actually cause the process to exit and results in any code using the DBus API hanging around waiting for the request to time out, for a very long time. Instead, we just always block connections, and allow the syncdaemon to start normally otherwise, so that ubuntuone-control-panel, u1sdtool, and Nautilus will continue working normally without problems (outside of the intent of never connecting to the server and not synchronizing files).

To post a comment you must log in.
1407. By dobey

Need to import datetime here.

1408. By dobey

Fix the logic in the date check.

Revision history for this message
Facundo Batista (facundo) wrote :

Nice!

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'ubuntuone/syncdaemon/interaction_interfaces.py'
--- ubuntuone/syncdaemon/interaction_interfaces.py 2013-01-22 20:03:12 +0000
+++ ubuntuone/syncdaemon/interaction_interfaces.py 2014-05-22 19:23:08 +0000
@@ -38,6 +38,7 @@
38"""38"""
3939
40import collections40import collections
41import datetime
41import logging42import logging
42import os43import os
43import uuid44import uuid
@@ -1247,6 +1248,11 @@
1247 for login/registration, only already existent credentials will be used.1248 for login/registration, only already existent credentials will be used.
12481249
1249 """1250 """
1251 # Avoid connecting after June 1.
1252 end_date = datetime.date(2014, 6, 1)
1253 if datetime.date.today() >= end_date:
1254 return
1255
1250 if self.oauth_credentials is not None:1256 if self.oauth_credentials is not None:
1251 logger.debug('connect: oauth credentials were given by parameter.')1257 logger.debug('connect: oauth credentials were given by parameter.')
1252 ckey = csecret = key = secret = None1258 ckey = csecret = key = secret = None
12531259
=== modified file 'ubuntuone/syncdaemon/main.py'
--- ubuntuone/syncdaemon/main.py 2014-04-10 19:58:12 +0000
+++ ubuntuone/syncdaemon/main.py 2014-05-22 19:23:08 +0000
@@ -31,7 +31,6 @@
31import logging31import logging
32import os32import os
33import sys33import sys
34import datetime
3534
36from dirspec.utils import user_home35from dirspec.utils import user_home
37from twisted.internet import defer, reactor, task36from twisted.internet import defer, reactor, task
@@ -107,58 +106,51 @@
107 if not throttling_enabled:106 if not throttling_enabled:
108 throttling_enabled = user_config.get_throttling()107 throttling_enabled = user_config.get_throttling()
109108
110 end_date = datetime.date(2014, 6, 1)109 self.logger.info("Starting Ubuntu One client version %s",
111 if datetime.date.today() < end_date:110 clientdefs.VERSION)
112 self.logger.info("Starting Ubuntu One client version %s",111 self.logger.info("Using %r as root dir", self.root_dir)
113 clientdefs.VERSION)112 self.logger.info("Using %r as data dir", self.data_dir)
114 self.logger.info("Using %r as root dir", self.root_dir)113 self.logger.info("Using %r as shares root dir", self.shares_dir)
115 self.logger.info("Using %r as data dir", self.data_dir)114 self.db = tritcask.Tritcask(tritcask_dir)
116 self.logger.info("Using %r as shares root dir", self.shares_dir)115 self.vm = volume_manager.VolumeManager(self)
117 self.db = tritcask.Tritcask(tritcask_dir)116 self.fs = filesystem_manager.FileSystemManager(
118 self.vm = volume_manager.VolumeManager(self)117 data_dir, partials_dir, self.vm, self.db)
119 self.fs = filesystem_manager.FileSystemManager(118 self.event_q = event_queue.EventQueue(self.fs, ignore_files,
120 data_dir, partials_dir, self.vm, self.db)119 monitor_class=monitor_class)
121 self.event_q = event_queue.EventQueue(120 self.fs.register_eq(self.event_q)
122 self.fs, ignore_files, monitor_class=monitor_class)121
123 self.fs.register_eq(self.event_q)122 # subscribe VM to EQ, to be unsubscribed in shutdown
124123 self.event_q.subscribe(self.vm)
125 # subscribe VM to EQ, to be unsubscribed in shutdown124 self.vm.init_root()
126 self.event_q.subscribe(self.vm)125
127 self.vm.init_root()126 # we don't have the oauth tokens yet, we 'll get them later
128127 self.action_q = action_queue.ActionQueue(self.event_q, self,
129 # we don't have the oauth tokens yet, we 'll get them later128 host, port,
130 self.action_q = action_queue.ActionQueue(self.event_q, self,129 dns_srv, ssl,
131 host, port,130 disable_ssl_verify,
132 dns_srv, ssl,131 read_limit, write_limit,
133 disable_ssl_verify,132 throttling_enabled)
134 read_limit, write_limit,133 self.hash_q = hash_queue.HashQueue(self.event_q)
135 throttling_enabled)134 events_nanny.DownloadFinishedNanny(self.fs, self.event_q, self.hash_q)
136 self.hash_q = hash_queue.HashQueue(self.event_q)135
137 events_nanny.DownloadFinishedNanny(self.fs, self.event_q,136 # call StateManager after having AQ
138 self.hash_q)137 self.state_manager = StateManager(self, handshake_timeout)
139138
140 # call StateManager after having AQ139 self.sync = sync.Sync(self)
141 self.state_manager = StateManager(self, handshake_timeout)140 self.lr = local_rescan.LocalRescan(self.vm, self.fs,
142141 self.event_q, self.action_q)
143 self.sync = sync.Sync(self)142
144 self.lr = local_rescan.LocalRescan(self.vm, self.fs,143 self.external = SyncdaemonService(main=self,
145 self.event_q, self.action_q)144 send_events=broadcast_events)
146145 self.external.oauth_credentials = oauth_credentials
147 self.external = SyncdaemonService(main=self,146 if user_config.get_autoconnect():
148 send_events=broadcast_events)147 self.external.connect(autoconnecting=True)
149 self.external.oauth_credentials = oauth_credentials148
150 if user_config.get_autoconnect():149 self.status_listener = None
151 self.external.connect(autoconnecting=True)150 self.start_status_listener()
152151
153 self.status_listener = None152 self.mark = task.LoopingCall(self.log_mark)
154 self.start_status_listener()153 self.mark.start(mark_interval)
155
156 self.mark = task.LoopingCall(self.log_mark)
157 self.mark.start(mark_interval)
158 else:
159 if reactor.running:
160 reactor.stop()
161 sys.exit(0)
162154
163 def start_status_listener(self):155 def start_status_listener(self):
164 """Start the status listener if it is configured to start."""156 """Start the status listener if it is configured to start."""

Subscribers

People subscribed via source and target branches