home *** CD-ROM | disk | FTP | other *** search
- # Source Generated with Decompyle++
- # File: in.pyc (Python 2.6)
-
- from __future__ import with_statement
- __license__ = 'GPL v3'
- __copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
- __docformat__ = 'restructuredtext en'
- import re
- import itertools
- import functools
- from itertools import repeat
- from datetime import timedelta
- from threading import Thread, RLock
- from Queue import Queue, Empty
- from PyQt4.Qt import QImage, Qt
- from calibre.utils.config import tweaks
- from calibre.utils.date import parse_date, now, UNDEFINED_DATE
- from calibre.utils.search_query_parser import SearchQueryParser
- from calibre.utils.pyparsing import ParseException
- from calibre.ebooks.metadata import title_sort
- from calibre import fit_image
-
- class CoverCache(Thread):
-
- def __init__(self, db):
- Thread.__init__(self)
- self.daemon = True
- self.db = db
- self.load_queue = Queue()
- self.keep_running = True
- self.cache = { }
- self.lock = RLock()
- self.null_image = QImage()
-
-
- def stop(self):
- self.keep_running = False
-
-
- def _image_for_id(self, id_):
- img = self.db.cover(id_, index_is_id = True, as_image = True)
- if img is None:
- img = QImage()
-
- if not img.isNull():
- (scaled, nwidth, nheight) = fit_image(img.width(), img.height(), 600, 800)
- if scaled:
- img = img.scaled(nwidth, nheight, Qt.KeepAspectRatio, Qt.SmoothTransformation)
-
-
- return img
-
-
- def run(self):
- while self.keep_running:
-
- try:
- id_ = self.load_queue.get(True, 1)
- except Empty:
- continue
-
-
- try:
- img = self._image_for_id(id_)
- except:
- import traceback
- traceback.print_exc()
- continue
-
- self.lock.__enter__()
-
- try:
- self.cache[id_] = img
- finally:
- pass
-
- continue
- self.lock.__exit__
-
-
- def set_cache(self, ids):
- self.lock.__enter__()
-
- try:
- already_loaded = set([])
- for id in self.cache.keys():
- if id in ids:
- already_loaded.add(id)
- continue
- self.lock.__exit__
- self.cache.pop(id)
- finally:
- pass
-
- for id_ in set(ids) - already_loaded:
- self.load_queue.put(id_)
-
-
-
- def cover(self, id_):
- self.lock.__enter__()
-
- try:
- return self.cache.get(id_, self.null_image)
- finally:
- pass
-
-
-
- def clear_cache(self):
- self.lock.__enter__()
-
- try:
- self.cache = { }
- finally:
- pass
-
-
-
- def refresh(self, ids):
- self.lock.__enter__()
-
- try:
- for id_ in ids:
- self.cache.pop(id_, None)
- self.load_queue.put(id_)
- finally:
- pass
-
-
-
- CONTAINS_MATCH = 0
- EQUALS_MATCH = 1
- REGEXP_MATCH = 2
-
- def _match(query, value, matchkind):
- for t in value:
- t = t.lower()
-
- try:
- if not matchkind == EQUALS_MATCH or query == t:
- if (matchkind == REGEXP_MATCH or re.search(query, t, re.I) or matchkind == CONTAINS_MATCH) and query in t:
- return True
- continue
- except re.error:
- continue
-
-
-
- return False
-
-
- class ResultCache(SearchQueryParser):
-
- def __init__(self, FIELD_MAP, field_metadata):
- self.FIELD_MAP = FIELD_MAP
- self._map = self._map_filtered = self._data = []
- self.first_sort = True
- self.search_restriction = ''
- self.field_metadata = field_metadata
- self.all_search_locations = field_metadata.get_search_terms()
- SearchQueryParser.__init__(self, self.all_search_locations)
- self.build_date_relop_dict()
- self.build_numeric_relop_dict()
-
-
- def __getitem__(self, row):
- return self._data[self._map_filtered[row]]
-
-
- def __len__(self):
- return len(self._map_filtered)
-
-
- def __iter__(self):
- for id in self._map_filtered:
- yield self._data[id]
-
-
-
- def iterall(self):
- for x in self._data:
- if x is not None:
- yield x
- continue
-
-
-
- def iterallids(self):
- idx = self.FIELD_MAP['id']
- for x in self.iterall():
- yield x[idx]
-
-
-
- def universal_set(self):
- return [](_[1])
-
-
- def build_date_relop_dict(self):
-
- def relop_eq(db, query, field_count):
- return False
-
-
- def relop_gt(db, query, field_count):
- if db.year > query.year:
- return True
- if field_count > 1 and db.year == query.year:
- if db.month > query.month:
- return True
- if field_count == 3 and db.month == query.month:
- pass
- return db.day > query.day
- return False
-
-
- def relop_lt(db, query, field_count):
- if db.year < query.year:
- return True
- if field_count > 1 and db.year == query.year:
- if db.month < query.month:
- return True
- if field_count == 3 and db.month == query.month:
- pass
- return db.day < query.day
- return False
-
-
- def relop_ne(db, query, field_count):
- return not relop_eq(db, query, field_count)
-
-
- def relop_ge(db, query, field_count):
- return not relop_lt(db, query, field_count)
-
-
- def relop_le(db, query, field_count):
- return not relop_gt(db, query, field_count)
-
- self.date_search_relops = {
- '=': [
- 1,
- relop_eq],
- '>': [
- 1,
- relop_gt],
- '<': [
- 1,
- relop_lt],
- '!=': [
- 2,
- relop_ne],
- '>=': [
- 2,
- relop_ge],
- '<=': [
- 2,
- relop_le] }
-
-
- def get_dates_matches(self, location, query):
- matches = set([])
- if len(query) < 2:
- return matches
- if location == 'date':
- location = 'timestamp'
-
- loc = self.field_metadata[location]['rec_index']
- if query == 'false':
- for item in self._data:
- if item is None:
- continue
-
- if item[loc] is None or item[loc] <= UNDEFINED_DATE:
- matches.add(item[0])
- continue
-
- return matches
- if query == 'true':
- for item in self._data:
- if item is None:
- continue
-
- if item[loc] is not None and item[loc] > UNDEFINED_DATE:
- matches.add(item[0])
- continue
-
- return matches
- relop = None
- for k in self.date_search_relops.keys():
- if query.startswith(k):
- (p, relop) = self.date_search_relops[k]
- query = query[p:]
- continue
- query == 'true'
-
- if relop is None:
- (p, relop) = self.date_search_relops['=']
-
- if query == _('today'):
- qd = now()
- field_count = 3
- elif query == _('yesterday'):
- qd = now() - timedelta(1)
- field_count = 3
- elif query == _('thismonth'):
- qd = now()
- field_count = 2
- elif query.endswith(_('daysago')):
- num = query[0:-len(_('daysago'))]
-
- try:
- qd = now() - timedelta(int(num))
- except:
- raise ParseException(query, len(query), 'Number conversion error', self)
-
- field_count = 3
- else:
-
- try:
- qd = parse_date(query)
- except:
- raise ParseException(query, len(query), 'Date conversion error', self)
-
- if '-' in query:
- field_count = query.count('-') + 1
- else:
- field_count = query.count('/') + 1
- for item in self._data:
- if item is None or item[loc] is None:
- continue
-
- if relop(item[loc], qd, field_count):
- matches.add(item[0])
- continue
-
- return matches
-
-
- def build_numeric_relop_dict(self):
- self.numeric_search_relops = {
- '=': [
- 1,
- (lambda r, q: r == q)],
- '>': [
- 1,
- (lambda r, q: r > q)],
- '<': [
- 1,
- (lambda r, q: r < q)],
- '!=': [
- 2,
- (lambda r, q: r != q)],
- '>=': [
- 2,
- (lambda r, q: r >= q)],
- '<=': [
- 2,
- (lambda r, q: r <= q)] }
-
-
- def get_numeric_matches(self, location, query):
- matches = set([])
- if len(query) == 0:
- return matches
- if query == 'false':
- query = '0'
- elif query == 'true':
- query = '!=0'
-
- relop = None
- for k in self.numeric_search_relops.keys():
- if query.startswith(k):
- (p, relop) = self.numeric_search_relops[k]
- query = query[p:]
- continue
-
- if relop is None:
- (p, relop) = self.numeric_search_relops['=']
-
- loc = self.field_metadata[location]['rec_index']
- dt = self.field_metadata[location]['datatype']
- if dt == 'int':
-
- cast = lambda x: int(x)
-
- adjust = lambda x: x
- elif dt == 'rating':
-
- cast = lambda x: int(x)
-
- adjust = lambda x: x / 2
- elif dt == 'float':
-
- cast = lambda x: float(x)
-
- adjust = lambda x: x
-
- if len(query) > 1:
- mult = query[-1:].lower()
- mult = {
- 'k': 1024,
- 'm': 1.04858e+06,
- 'g': 1.07374e+09 }.get(mult, 1)
- if mult != 1:
- query = query[:-1]
-
- else:
- mult = 1
-
- try:
- q = cast(query) * mult
- except:
- return matches
-
- for item in self._data:
- if item is None:
- continue
-
- if not item[loc]:
- i = 0
- else:
- i = adjust(item[loc])
- if relop(i, q):
- matches.add(item[0])
- continue
-
- return matches
-
-
- def get_matches(self, location, query):
- matches = set([])
- if query and query.strip():
- location = self.field_metadata.search_term_to_key(location.lower().strip())
- if location in self.field_metadata and self.field_metadata[location]['datatype'] == 'datetime':
- return self.get_dates_matches(location, query.lower())
- if location in self.field_metadata and self.field_metadata[location]['datatype'] in ('rating', 'int', 'float'):
- return self.get_numeric_matches(location, query.lower())
- matchkind = CONTAINS_MATCH
- if matchkind != REGEXP_MATCH:
- query = query.lower()
-
- if not isinstance(query, unicode):
- query = query.decode('utf-8')
-
- db_col = { }
- exclude_fields = []
- col_datatype = []
- is_multiple_cols = { }
- for x in range(len(self.FIELD_MAP)):
- col_datatype.append('')
-
- for x in self.field_metadata:
- if len(self.field_metadata[x]['search_terms']):
- db_col[x] = self.field_metadata[x]['rec_index']
- if self.field_metadata[x]['datatype'] not in ('text', 'comments', 'series'):
- exclude_fields.append(db_col[x])
-
- col_datatype[db_col[x]] = self.field_metadata[x]['datatype']
- is_multiple_cols[db_col[x]] = self.field_metadata[x]['is_multiple']
- continue
-
-
- try:
- rating_query = int(query) * 2
- except:
- rating_query = None
-
- location = None if location != 'all' else list(db_col.keys())
- for i, loc in enumerate(location):
- location[i] = db_col[loc]
-
- bools_are_tristate = tweaks['bool_custom_columns_are_tristate'] == 'yes'
- for loc in location:
- if loc == db_col['authors']:
- q = query.replace(',', '|')
- else:
- q = query
- for item in self._data:
- if item is None:
- continue
-
- if col_datatype[loc] == 'bool':
- v = item[loc]
- if not bools_are_tristate:
- if v is None or not v:
- if q in [
- _('no'),
- _('unchecked'),
- 'false']:
- matches.add(item[0])
-
- elif q in [
- _('yes'),
- _('checked'),
- 'true']:
- matches.add(item[0])
-
- not v
- if v is None:
- if q in [
- _('empty'),
- _('blank'),
- 'false']:
- matches.add(item[0])
-
- q in [
- _('empty'),
- _('blank'),
- 'false']
- if not v:
- if q in [
- _('no'),
- _('unchecked'),
- 'true']:
- matches.add(item[0])
-
- q in [
- _('no'),
- _('unchecked'),
- 'true']
- if q in [
- _('yes'),
- _('checked'),
- 'true']:
- matches.add(item[0])
- continue
- continue
-
- if not item[loc]:
- if q == 'false':
- matches.add(item[0])
- continue
- continue
-
- if q == 'false':
- continue
-
- if q == 'true':
- if isinstance(item[loc], basestring):
- if item[loc].strip() == '':
- continue
-
-
- matches.add(item[0])
- continue
-
- if col_datatype[loc] == 'rating':
- if rating_query and rating_query == int(item[loc]):
- matches.add(item[0])
- continue
- continue
-
-
- try:
- if col_datatype[loc] == 'float':
- if float(query) == item[loc]:
- matches.add(item[0])
-
- continue
-
- if col_datatype[loc] == 'int':
- if int(query) == item[loc]:
- matches.add(item[0])
-
- continue
- except:
- continue
-
- if loc not in exclude_fields:
- if is_multiple_cols[loc] is not None:
- vals = item[loc].split(is_multiple_cols[loc])
- else:
- vals = [
- item[loc]]
- if _match(q, vals, matchkind):
- matches.add(item[0])
- continue
-
- _match(q, vals, matchkind)
-
-
-
- return matches
-
-
- def remove(self, id):
- self._data[id] = None
- if id in self._map:
- self._map.remove(id)
-
- if id in self._map_filtered:
- self._map_filtered.remove(id)
-
-
-
- def set(self, row, col, val, row_is_id = False):
- id = None if row_is_id else self._map_filtered[row]
- self._data[id][col] = val
-
-
- def get(self, row, col, row_is_id = False):
- id = None if row_is_id else self._map_filtered[row]
- return self._data[id][col]
-
-
- def index(self, id, cache = False):
- x = None if cache else self._map_filtered
- return x.index(id)
-
-
- def row(self, id):
- return self.index(id)
-
-
- def has_id(self, id):
-
- try:
- return self._data[id] is not None
- except IndexError:
- pass
-
- return False
-
-
- def refresh_ids(self, db, ids):
- for id in ids:
-
- try:
- self._data[id] = db.conn.get('SELECT * from meta2 WHERE id=?', (id,))[0]
- self._data[id].append(db.has_cover(id, index_is_id = True))
- self._data[id].append(db.book_on_device_string(id))
- continue
- except IndexError:
- return None
-
-
-
-
- try:
- return map(self.row, ids)
- except ValueError:
- None<EXCEPTION MATCH>IndexError
- None<EXCEPTION MATCH>IndexError
- except:
- None<EXCEPTION MATCH>IndexError
-
-
-
- def books_added(self, ids, db):
- if not ids:
- return None
- self._data.extend(repeat(None, (max(ids) - len(self._data)) + 2))
- for id in ids:
- self._data[id] = db.conn.get('SELECT * from meta2 WHERE id=?', (id,))[0]
- self._data[id].append(db.has_cover(id, index_is_id = True))
- self._data[id].append(db.book_on_device_string(id))
-
- self._map[0:0] = ids
- self._map_filtered[0:0] = ids
-
-
- def books_deleted(self, ids):
- for id in ids:
- self._data[id] = None
- if id in self._map:
- self._map.remove(id)
-
- if id in self._map_filtered:
- self._map_filtered.remove(id)
- continue
-
-
-
- def count(self):
- return len(self._map)
-
-
- def refresh_ondevice(self, db):
- ondevice_col = self.FIELD_MAP['ondevice']
- for item in self._data:
- if item is not None:
- item[ondevice_col] = db.book_on_device_string(item[0])
- continue
-
-
-
- def refresh(self, db, field = None, ascending = True):
- temp = db.conn.get('SELECT * FROM meta2')
- self._data = None if temp else []
- for r in temp:
- self._data[r[0]] = r
-
- for item in self._data:
- if item is not None:
- item.append(db.has_cover(item[0], index_is_id = True))
- item.append(db.book_on_device_string(item[0]))
- continue
-
- self._map = _[1]
- self._map_filtered = list(self._map)
-
-
- def seriescmp(self, sidx, siidx, x, y, library_order = None):
-
- try:
- if library_order:
- ans = cmp(title_sort(self._data[x][sidx].lower()), title_sort(self._data[y][sidx].lower()))
- else:
- ans = cmp(self._data[x][sidx].lower(), self._data[y][sidx].lower())
- except AttributeError:
- ans = cmp(self._data[x][sidx], self._data[y][sidx])
-
- if ans != 0:
- return ans
- return cmp(self._data[x][siidx], self._data[y][siidx])
-
-
- def cmp(self, loc, x, y, asstr = True, subsort = False):
-
- try:
- ans = None if asstr else cmp(self._data[x][loc], self._data[y][loc])
- except AttributeError:
- ans = cmp(self._data[x][loc], self._data[y][loc])
- except TypeError:
- x = self._data[x][loc]
- if x is None:
- x = UNDEFINED_DATE
-
- y = self._data[y][loc]
- if y is None:
- y = UNDEFINED_DATE
-
- return cmp(x, y)
-
- if subsort and ans == 0:
- return cmp(self._data[x][11].lower(), self._data[y][11].lower())
- return ans
-
-
- def sort(self, field, ascending, subsort = False):
- field = field.lower().strip()
- if field in ('author', 'tag', 'comment'):
- field += 's'
-
- if field == 'date':
- field = 'timestamp'
- elif field == 'title':
- field = 'sort'
- elif field == 'authors':
- field = 'author_sort'
-
- as_string = field not in ('size', 'rating', 'timestamp')
- if self.first_sort:
- subsort = True
- self.first_sort = False
-
- if self.field_metadata[field]['is_custom']:
- if self.field_metadata[field]['datatype'] == 'series':
- fcmp = functools.partial(self.seriescmp, self.field_metadata[field]['rec_index'], self.field_metadata.cc_series_index_column_for(field), library_order = tweaks['title_series_sorting'] == 'library_order')
- else:
- as_string = self.field_metadata[field]['datatype'] in ('comments', 'text')
- field = self.field_metadata[field]['colnum']
- fcmp = functools.partial(self.cmp, self.FIELD_MAP[field], subsort = subsort, asstr = as_string)
- elif field == 'series':
- fcmp = functools.partial(self.seriescmp, self.FIELD_MAP['series'], self.FIELD_MAP['series_index'], library_order = tweaks['title_series_sorting'] == 'library_order')
- else:
- fcmp = functools.partial(self.cmp, self.FIELD_MAP[field], subsort = subsort, asstr = as_string)
- self._map.sort(cmp = fcmp, reverse = not ascending)
- self._map_filtered = _[1]
-
-
- def search(self, query, return_matches = False, ignore_search_restriction = False):
- q = ''
- if not query or not query.strip():
- if not ignore_search_restriction:
- q = self.search_restriction
-
- else:
- q = query
- if not ignore_search_restriction and self.search_restriction:
- q = u'%s (%s)' % (self.search_restriction, query)
-
- if not q:
- if return_matches:
- return list(self._map)
- self._map_filtered = list(self._map)
- return None
- matches = sorted(self.parse(q))
- ans = _[1]
- if return_matches:
- return ans
- self._map_filtered = ans
-
-
- def set_search_restriction(self, s):
- self.search_restriction = s
-
-
-