Merge lp:~stefanor/ibid/exchange-336443 into lp:~ibid-core/ibid/old-trunk-pack-0.92

Proposed by Stefano Rivera
Status: Merged
Approved by: Michael Gorven
Approved revision: 575
Merged at revision: 566
Proposed branch: lp:~stefanor/ibid/exchange-336443
Merge into: lp:~ibid-core/ibid/old-trunk-pack-0.92
Diff against target: None lines
To merge this branch: bzr merge lp:~stefanor/ibid/exchange-336443
Reviewer Review Type Date Requested Status
Michael Gorven Approve
Jonathan Hitchcock Approve
Review via email: mp+4267@code.launchpad.net
To post a comment you must log in.
Revision history for this message
Stefano Rivera (stefanor) wrote :

Added a dependency. This should probably be tested on hardy.

lp:~stefanor/ibid/exchange-336443 updated
564. By Stefano Rivera

Move get_soup to utils

565. By Stefano Rivera

Merge from trunk

Revision history for this message
Jonathan Hitchcock (vhata) :
review: Approve
Revision history for this message
Michael Gorven (mgorven) wrote :

Looks good. If this works on hardy I'm happy to merge.

lp:~stefanor/ibid/exchange-336443 updated
566. By Stefano Rivera

Further INSTALL work

567. By Stefano Rivera

Use elemnttree for XE.com

568. By Stefano Rivera

While python-html5lib suggests celemnttree, we should probably ensure it gets installed

569. By Stefano Rivera

Wrong DNS library

570. By Stefano Rivera

Other nicities

571. By Stefano Rivera

Yet more nicities

572. By Stefano Rivera

Use pure BS, if BS is requested

573. By Stefano Rivera

Don't stomp all over the other 'convert' functions

574. By Stefano Rivera

Unicode fix

575. By Stefano Rivera

Merge from trunk

Revision history for this message
Michael Gorven (mgorven) :
review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'INSTALL'
--- INSTALL 2009-02-24 16:03:17 +0000
+++ INSTALL 2009-03-07 13:40:14 +0000
@@ -1,32 +1,26 @@
1Installation Instructions for Debian/Ubuntu Systems:1Installation Instructions for Debian/Ubuntu Systems:
22
3Add the Ibid PPA to your APT sources:3Add the Ibid PPA to your APT sources:
4# echo deb http://ppa.launchpad.net/ibid-dev/ppa/ubuntu `lsb_release -c | cut -f2` main > /etc/apt/sources.list.d/ibid4# echo deb http://ppa.launchpad.net/ibid-core/ppa/ubuntu `lsb_release -c | cut -f2` main > /etc/apt/sources.list.d/ibid.list
5# apt-key adv --recv-keys --keyserver keyserver.ubuntu.com c2d0f8531bba37930c0d85e3d59f9e8dfd1c44ba
6# aptitude update
57
6Install required modules:8Install required modules:
7# apt-get install python-virtualenv python-soappy python-twisted \9# aptitude install python-virtualenv python-soappy python-twisted \
8 python-configobj python-sqllite2 python-feedparser \10 python-configobj python-sqllite2 python-feedparser \
9 python-httplib2 python-beautifulsoup python-dictclient \11 python-httplib2 python-html5lib python-dictclient \
10 python-imdbpy python-dns python-simplejson \12 python-imdbpy python-dns python-simplejson \
11 python-jinja pysilc python-pinder13 python-jinja python-pysilc python-pinder
1214
13Switch to the user ibid will be running as.15Switch to the user ibid will be running as.
14Set up a virtual Python environment16$ export PYTHONPATH=.
15(this will create a directory called ibid):
16$ virtualenv ibid
17$ source ibid/bin/activate
18
19Change directory to where you extracted Ibid.
20
21Set up any dependancies:
22$ ./setup.py install
2317
24Set up your bot:18Set up your bot:
25$ ibid-setup19$ scripts/ibid-setup
26$ mkdir logs20$ mkdir logs
2721
28Run your bot:22Run your bot:
29$ ibid23$ scripts/ibid
3024
31Other things we recommend:25Other things we recommend:
32* Install a dictd on localhost for the dict plugin (debian/ubuntu package "dictd")26* Install a dictd on localhost for the dict plugin (debian/ubuntu package "dictd")
3327
=== modified file 'ibid/plugins/lookup.py'
--- ibid/plugins/lookup.py 2009-03-02 09:21:35 +0000
+++ ibid/plugins/lookup.py 2009-03-07 13:40:04 +0000
@@ -3,10 +3,11 @@
3from time import time3from time import time
4from datetime import datetime4from datetime import datetime
5from simplejson import loads5from simplejson import loads
6import cgi
6import re7import re
78
8import feedparser9import feedparser
9from BeautifulSoup import BeautifulSoup10from html5lib import HTMLParser, treebuilders
1011
11from ibid.plugins import Processor, match, handler12from ibid.plugins import Processor, match, handler
12from ibid.config import Option13from ibid.config import Option
@@ -14,6 +15,25 @@
1415
15help = {}16help = {}
1617
18def get_soup(url, data=None, headers={}):
19 "Request a URL and create a BeautifulSoup parse tree from it"
20
21 req = Request(url, data, headers)
22 f = urlopen(req)
23 data = f.read()
24 f.close()
25
26 encoding = None
27 contentType = f.headers.get('content-type')
28 if contentType:
29 (mediaType, params) = cgi.parse_header(contentType)
30 encoding = params.get('charset')
31
32 treebuilder = treebuilders.getTreeBuilder("beautifulsoup")
33 parser = HTMLParser(tree=treebuilder)
34
35 return parser.parse(data, encoding = encoding)
36
17help['bash'] = u'Retrieve quotes from bash.org.'37help['bash'] = u'Retrieve quotes from bash.org.'
18class Bash(Processor):38class Bash(Processor):
19 u"bash[.org] (random|<number>)"39 u"bash[.org] (random|<number>)"
@@ -22,15 +42,13 @@
2242
23 @match(r'^bash(?:\.org)?\s+(random|\d+)$')43 @match(r'^bash(?:\.org)?\s+(random|\d+)$')
24 def bash(self, event, quote):44 def bash(self, event, quote):
25 f = urlopen('http://bash.org/?%s' % quote.lower())45 soup = get_soup('http://bash.org/?%s' % quote.lower())
26 soup = BeautifulSoup(f.read(), convertEntities=BeautifulSoup.HTML_ENTITIES)
27 f.close()
2846
29 if quote.lower() == "random":47 if quote.lower() == "random":
30 number = u"".join(soup.find('p', attrs={'class': 'quote'}).find('b').contents)48 number = u"".join(soup.find('p', 'quote').find('b').contents)
31 event.addresponse(u"%s:" % number)49 event.addresponse(u"%s:" % number)
3250
33 quote = soup.find('p', attrs={'class': 'qt'})51 quote = soup.find('p', 'qt')
34 if not quote:52 if not quote:
35 event.addresponse(u"There's no such quote, but if you keep talking like that maybe there will be.")53 event.addresponse(u"There's no such quote, but if you keep talking like that maybe there will be.")
36 else:54 else:
@@ -100,12 +118,14 @@
100 feature = "fml"118 feature = "fml"
101119
102 def remote_get(self, id):120 def remote_get(self, id):
103 f = urlopen('http://www.fmylife.com/' + str(id))121 soup = get_soup('http://www.fmylife.com/' + str(id))
104 soup = BeautifulSoup(f.read())
105 f.close()
106122
107 quote = soup.find('div', id='wrapper').div.p123 quote = soup.find('div', id='wrapper').div.p
108 return quote and u'"%s"' % (quote.contents[0],) or None124 if quote:
125 url = u"http://www.fmylife.com" + quote.find('a', 'fmllink')['href']
126 quote = u"".join(tag.contents[0] for tag in quote.findAll(True))
127
128 return u'%s: "%s"' % (url, quote)
109129
110 @match(r'^(?:fml\s+|http://www\.fmylife\.com/\S+/)(\d+|random)$')130 @match(r'^(?:fml\s+|http://www\.fmylife\.com/\S+/)(\d+|random)$')
111 def fml(self, event, id):131 def fml(self, event, id):
@@ -170,14 +190,12 @@
170 currencies = []190 currencies = []
171191
172 def _load_currencies(self):192 def _load_currencies(self):
173 request = Request('http://www.xe.com/iso4217.php', '', self.headers)193 soup = get_soup('http://www.xe.com/iso4217.php', headers=self.headers)
174 f = urlopen(request)
175 soup = BeautifulSoup(f.read())
176 f.close()
177194
178 self.currencies = []195 self.currencies = []
179 for tr in soup.find('table', attrs={'class': 'tbl_main'}).table.findAll('tr'):196 for tr in soup.find('table', 'tbl_main').table.findAll('tr'):
180 code, place = tr.findAll('td')197 code, place = tr.findAll('td')
198 code = code.contents[0]
181 place = ''.join(place.findAll(text=True))199 place = ''.join(place.findAll(text=True))
182 place, name = place.find(',') != -1 and place.split(',', 1) or place.split(' ', 1)200 place, name = place.find(',') != -1 and place.split(',', 1) or place.split(' ', 1)
183 self.currencies.append((code.string, place.strip(), name.strip()))201 self.currencies.append((code.string, place.strip(), name.strip()))
@@ -185,12 +203,9 @@
185 @match(r'^(?:exchange|convert)\s+([0-9.]+)\s+(\S+)\s+(?:for|to|into)\s+(\S+)$')203 @match(r'^(?:exchange|convert)\s+([0-9.]+)\s+(\S+)\s+(?:for|to|into)\s+(\S+)$')
186 def exchange(self, event, amount, frm, to):204 def exchange(self, event, amount, frm, to):
187 data = {'Amount': amount, 'From': frm, 'To': to}205 data = {'Amount': amount, 'From': frm, 'To': to}
188 request = Request('http://www.xe.com/ucc/convert.cgi', urlencode(data), self.headers)206 soup = get_soup('http://www.xe.com/ucc/convert.cgi', urlencode(data), self.headers)
189 f = urlopen(request)
190 soup = BeautifulSoup(f.read())
191 f.close()
192207
193 event.addresponse(soup.findAll('span', attrs={'class': 'XEsmall'})[1].contents[0])208 event.addresponse(u" ".join(tag.contents[0] for tag in soup.findAll('h2', 'XE')))
194209
195 @match(r'^(?:currency|currencies)\s+for\s+(?:the\s+)?(.+)$')210 @match(r'^(?:currency|currencies)\s+for\s+(?:the\s+)?(.+)$')
196 def currency(self, event, place):211 def currency(self, event, place):
@@ -238,9 +253,7 @@
238 if place.lower() in self.places:253 if place.lower() in self.places:
239 place = self.places[place.lower()]254 place = self.places[place.lower()]
240255
241 f = urlopen('http://m.wund.com/cgi-bin/findweather/getForecast?brand=mobile_metric&query=' + quote(place))256 soup = get_soup('http://m.wund.com/cgi-bin/findweather/getForecast?brand=mobile_metric&query=' + quote(place))
242 soup = BeautifulSoup(f.read(), convertEntities=BeautifulSoup.HTML_ENTITIES)
243 f.close()
244257
245 if soup.body.center and soup.body.center.b.string == 'Search not found:':258 if soup.body.center and soup.body.center.b.string == 'Search not found:':
246 raise Weather.WeatherException(u'City not found')259 raise Weather.WeatherException(u'City not found')
@@ -257,7 +270,7 @@
257 soup = self._get_page(place)270 soup = self._get_page(place)
258 tds = soup.table.table.findAll('td')271 tds = soup.table.table.findAll('td')
259272
260 values = {'place': tds[0].findAll('b')[1].string, 'time': tds[0].findAll('b')[0].string}273 values = {'place': tds[0].findAll('b')[1].contents[0], 'time': tds[0].findAll('b')[0].contents[0]}
261 for index, td in enumerate(tds[2::2]):274 for index, td in enumerate(tds[2::2]):
262 values[self.labels[index]] = self._text(td)275 values[self.labels[index]] = self._text(td)
263276
@@ -268,7 +281,7 @@
268 forecasts = []281 forecasts = []
269282
270 for td in soup.findAll('table')[2].findAll('td', align='left'):283 for td in soup.findAll('table')[2].findAll('td', align='left'):
271 day = td.b.string284 day = td.b.contents[0]
272 forecast = td.contents[2]285 forecast = td.contents[2]
273 forecasts.append('%s: %s' % (day, self._text(forecast)))286 forecasts.append('%s: %s' % (day, self._text(forecast)))
274287

Subscribers

People subscribed via source and target branches