home *** CD-ROM | disk | FTP | other *** search
- # Source Generated with Decompyle++
- # File: in.pyo (Python 2.6)
-
- import hooks
- import random
- from util.net import UrlQuery
- from util import Storage as S
- import common.asynchttp as asynchttp
- import common
- import lxml.etree as ET
- from logging import getLogger
- log = getLogger('geo_trends')
- active_geoip = None
- did_receive_geoip = False
-
- def on_geoip(geoip, *a, **k):
- global did_receive_geoip, active_geoip
- did_receive_geoip = True
- active_geoip = geoip
-
-
- class PlacementMethodReprMeta(type):
-
- def __repr__(cls):
- return '<PlacementMethod %r>' % cls.__name__
-
-
-
- class PlacementMethod(object):
- __metaclass__ = PlacementMethodReprMeta
-
- def supports_location(cls, location):
- return (all,)((lambda .0: for a in .0:
- location.get(a, None))(cls.location_attrs))
-
- supports_location = classmethod(supports_location)
-
-
- class citystate(PlacementMethod):
- location_attrs = ('city', 'state')
-
- def urlargs(location):
- return dict(where = '%s, %s' % (location['city'], location['state']))
-
- urlargs = staticmethod(urlargs)
-
-
- class ipaddress(PlacementMethod):
- location_attrs = ('ip',)
-
- def urlargs(location):
- return dict(client_ip = '%s' % location['ip'])
-
- urlargs = staticmethod(urlargs)
-
-
- class zipcode(PlacementMethod):
- location_attrs = ('postal',)
-
- def urlargs(location):
- return dict(where = '%s' % location['postal'])
-
- urlargs = staticmethod(urlargs)
-
- location_methods = [
- citystate,
- ipaddress,
- zipcode]
-
- def _get_possible_methods(location):
- if not location:
- return []
- return _[1]
-
- from feed_trends import NewsItem
-
- class CityGridNewsItem(NewsItem):
- __slots__ = ()
-
- def notify_click(self):
- ppe = self.content['net_ppe']
- if ppe is not None:
- cents = int(ppe * 100)
- hooks.notify('digsby.statistics.feed_ads.citygrid.click_cents', cents)
-
- hooks.notify('digsby.statistics.feed_ads.citygrid.click')
-
-
- def _notify_impression_hook(self):
- hooks.notify('digsby.statistics.feed_ads.citygrid.impression')
-
-
- ad_keywords = '\n food\n lunch\n restaurant\n cuisine\n bar\n pub\n clothing\n shopping\n beauty\n spa\n automotive\n jewelry\n furniture\n fitness\n education\n'.strip().split()
-
- def _get_ad_url(location, method, keyword):
- url_kwargs = dict(placement = method.__name__, what = keyword)
- publisher = common.pref('social.feed_ads_publisher', 'digsby')
- if publisher and publisher.lower() != 'none':
- url_kwargs['publisher'] = publisher
-
- url_kwargs.update(method.urlargs(location))
- url_kwargs = dict((lambda .0: for k, v in .0:
- (k, to_utf8(v)))(url_kwargs.iteritems()))
- url = UrlQuery('http://api.citygridmedia.com/ads/custom/v2/where', **url_kwargs)
- return url
-
-
- def to_utf8(s):
- if isinstance(s, unicode):
- return s.encode('utf8')
- return s
-
- httpopen = asynchttp.httpopen
-
- class CityGridAdSource(object):
-
- def __init__(self, campaign):
- self.set_location(active_geoip)
-
-
- def enabled(cls):
- if active_geoip:
- pass
- return active_geoip.get('country', '').upper() == 'US'
-
- enabled = classmethod(enabled)
-
- def set_location(self, location):
- self.location = location
-
-
- def request_ads(self, success, error = None):
- NewsItemList = NewsItemList
- import feed_trends
- if error is None:
-
- def error(*a):
- print 'ERROR:',
- if not a:
- pass
- print
- print
-
-
- location = self.location
- possible_methods = _get_possible_methods(location)
- if not possible_methods:
- return success([])
- placement_method = random.choice(possible_methods)
- NUM_KEYWORDS = 2
- keywords_copy = ad_keywords[:]
- random.shuffle(keywords_copy)
- chosen_keywords = keywords_copy[:NUM_KEYWORDS]
- used_urls = []
- ctx = dict(count = 0)
- all_ads = []
- all_errors = []
-
- def maybe_done():
- ctx['count'] += 1
- if ctx['count'] < NUM_KEYWORDS:
- return None
- if all_ads:
- self.endpoints = []
- ad_objects = []
- for url, data, ads, kwd in all_ads:
- ad_objects.extend(ads)
- datetime = datetime
- import datetime
- self.endpoints.append(S(last_received_xml = prettyxml(data), last_keyword = kwd, last_url = url, last_update_time = datetime.now().isoformat()))
-
- return success(NewsItemList(1, 0, 0, items = ad_objects))
- all_ads(error if all_errors else None)
-
-
- def _url_success(url, data, ads, keyword):
- all_ads.append((url, data, ads, keyword))
- maybe_done()
-
-
- def _url_error(url, e, keyword):
- all_errors.append(e)
- maybe_done(keyword)
-
- for kwd in chosen_keywords:
- url = _get_ad_url(location, placement_method, kwd)
-
- def on_success(req, resp, url = (None, (None, (None, None, None, None, None, None, None, possible_methods))), keyword = (url, kwd)):
-
- try:
- data = resp.read()
- ads = newsitems_from_citygrid_xml(data)
- except Exception:
- e = None
- _url_error(url, e, keyword)
-
- _url_success(url, data, ads, keyword)
-
- httpopen(url, success = on_success, error = _url_error)
-
-
-
- _default_image_url_path = None
-
- def _default_image_url():
- global _default_image_url_path
- path = path
- import path
- if _default_image_url_path is None:
- _default_image_url_path = (path(__file__).parent / 'res' / 'information.png').url()
-
- return _default_image_url_path
-
-
- def newsitems_from_citygrid_xml(xmlstring):
- NewsItemList = NewsItemList
- strip_html = strip_html
- import feed_trends
- xml = ET.fromstring(xmlstring)
- items = []
- for findtext in xml:
- ad = None
- if ad.tag != 'ad':
- continue
-
-
- def find(s):
- elem = ad.find(s)
- if elem is not None:
- return elem.text
- return ''
-
-
- def findurl(s):
- url = find(s)
- if url and not url.startswith('http'):
- url = 'http://' + url
-
- return url
-
-
- def findfloat(s):
- f = find(s)
-
- try:
- return float(f)
- except Exception:
- return None
-
-
- name = findtext('name')
- tagline = findtext('tagline')
- if name:
- pass
- elif not findtext('description'):
- pass
- snippet = ''
- if not name:
- pass
- if not findurl('ad_image_url'):
- pass
- item = CityGridNewsItem(title = tagline, display_url = findurl('ad_display_url'), snippet = snippet, source = '', source_logo_url = _default_image_url(), redirect_url = findurl('ad_destination_url'), tracking_url = '', shortened_url = None, keyword = _('Featured Content'), content = dict(street = findtext('street'), phone = findtext('phone'), reviews = findtext('reviews'), net_ppe = findfloat('net_ppe')), show_snippet = True)
- items.append(item)
-
- return NewsItemList(version = '1', time = 0, max_age = 1080000, items = items)
-
-
- def prettyxml(xml):
- if isinstance(xml, basestring):
- xml = ET.fromstring(xml)
-
- return ET.tostring(xml, pretty_print = True)
-
-