summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorlufte <javier@lufte.net>2021-07-02 22:18:12 -0300
committerlufte <javier@lufte.net>2021-07-02 22:18:12 -0300
commit03b8a266455b2cfd0484e1d16ba7e6370ae49655 (patch)
tree0ac496bdc7e285ddb9d327056f01f6b66793b8cf
parentb75d23e0b1ad305a681847107bdaff5b870e1fef (diff)
downloadqutebrowser-03b8a266455b2cfd0484e1d16ba7e6370ae49655.tar.gz
qutebrowser-03b8a266455b2cfd0484e1d16ba7e6370ae49655.zip
The database of a table shouldn't be private
-rw-r--r--qutebrowser/browser/history.py41
-rw-r--r--qutebrowser/misc/sql.py28
-rw-r--r--tests/unit/browser/test_history.py2
-rw-r--r--tests/unit/completion/test_histcategory.py18
4 files changed, 45 insertions, 44 deletions
diff --git a/qutebrowser/browser/history.py b/qutebrowser/browser/history.py
index ada8bf0a6..4163f1957 100644
--- a/qutebrowser/browser/history.py
+++ b/qutebrowser/browser/history.py
@@ -126,8 +126,8 @@ class CompletionMetaInfo(sql.SqlTable):
def __getitem__(self, key):
self._check_key(key)
- query = self._database.query('SELECT value FROM CompletionMetaInfo '
- 'WHERE key = :key')
+ query = self.database.query('SELECT value FROM CompletionMetaInfo '
+ 'WHERE key = :key')
return query.run(key=key).value()
def __setitem__(self, key, value):
@@ -172,7 +172,6 @@ class WebHistory(sql.SqlTable):
'atime': 'NOT NULL',
'redirect': 'NOT NULL'},
parent=parent)
- self.database = database
self._progress = progress
# Store the last saved url to avoid duplicate immediate saves.
self._last_url = None
@@ -188,8 +187,8 @@ class WebHistory(sql.SqlTable):
self.metainfo.try_recover()
rebuild_completion = self.metainfo['force_rebuild']
- if self._database.user_version_changed():
- with self._database.transaction():
+ if self.database.user_version_changed():
+ with self.database.transaction():
# If the DB user version changed, run a full cleanup and rebuild the
# completion history.
#
@@ -199,7 +198,7 @@ class WebHistory(sql.SqlTable):
# VACUUM to make things smaller.
self._cleanup_history()
rebuild_completion = True
- self._database.upgrade_user_version()
+ self.database.upgrade_user_version()
# Get a string of all patterns
patterns = config.instance.get_str('completion.web_history.exclude')
@@ -217,19 +216,19 @@ class WebHistory(sql.SqlTable):
self.create_index('HistoryIndex', 'url')
self.create_index('HistoryAtimeIndex', 'atime')
self._contains_query = self.contains_query('url')
- self._between_query = self._database.query('SELECT * FROM History '
- 'where not redirect '
- 'and not url like "qute://%" '
- 'and atime > :earliest '
- 'and atime <= :latest '
- 'ORDER BY atime desc')
-
- self._before_query = self._database.query('SELECT * FROM History '
+ self._between_query = self.database.query('SELECT * FROM History '
'where not redirect '
'and not url like "qute://%" '
+ 'and atime > :earliest '
'and atime <= :latest '
- 'ORDER BY atime desc '
- 'limit :limit offset :offset')
+ 'ORDER BY atime desc')
+
+ self._before_query = self.database.query('SELECT * FROM History '
+ 'where not redirect '
+ 'and not url like "qute://%" '
+ 'and atime <= :latest '
+ 'ORDER BY atime desc '
+ 'limit :limit offset :offset')
def __repr__(self):
return utils.get_repr(self, length=len(self))
@@ -277,7 +276,7 @@ class WebHistory(sql.SqlTable):
'qute://pdfjs%',
]
where_clause = ' OR '.join(f"url LIKE '{term}'" for term in terms)
- q = self._database.query(f'DELETE FROM History WHERE {where_clause}')
+ q = self.database.query(f'DELETE FROM History WHERE {where_clause}')
entries = q.run()
log.sql.debug(f"Cleanup removed {entries.rows_affected()} items")
@@ -303,9 +302,9 @@ class WebHistory(sql.SqlTable):
QApplication.processEvents()
# Select the latest entry for each url
- q = self._database.query('SELECT url, title, max(atime) AS atime FROM History '
- 'WHERE NOT redirect '
- 'GROUP BY url ORDER BY atime asc')
+ q = self.database.query('SELECT url, title, max(atime) AS atime FROM History '
+ 'WHERE NOT redirect '
+ 'GROUP BY url ORDER BY atime asc')
result = q.run()
QApplication.processEvents()
entries = list(result)
@@ -325,7 +324,7 @@ class WebHistory(sql.SqlTable):
self._progress.set_maximum(0)
# We might have caused fragmentation - let's clean up.
- self._database.query('VACUUM').run()
+ self.database.query('VACUUM').run()
QApplication.processEvents()
self.completion.insert_batch(data, replace=True)
diff --git a/qutebrowser/misc/sql.py b/qutebrowser/misc/sql.py
index d8755f0b4..460d88ce9 100644
--- a/qutebrowser/misc/sql.py
+++ b/qutebrowser/misc/sql.py
@@ -387,12 +387,14 @@ class SqlTable(QObject):
Attributes:
_name: Name of the SQL table this wraps.
+ database: The Database to which this table belongs.
Signals:
changed: Emitted when the table is modified.
"""
changed = pyqtSignal()
+ database: Database
def __init__(self, database: Database, name: str, fields: List[str],
constraints: Optional[Dict[str, str]] = None,
@@ -407,7 +409,7 @@ class SqlTable(QObject):
"""
super().__init__(parent)
self._name = name
- self._database = database
+ self.database = database
self._create_table(fields, constraints)
def _create_table(self, fields: List[str], constraints: Optional[Dict[str, str]],
@@ -417,13 +419,13 @@ class SqlTable(QObject):
If the table already exists, this does nothing (except with force=True), so it
can e.g. be called on every user_version change.
"""
- if not self._database.user_version_changed() and not force:
+ if not self.database.user_version_changed() and not force:
return
constraints = constraints or {}
column_defs = [f'{field} {constraints.get(field, "")}'
for field in fields]
- q = self._database.query(
+ q = self.database.query(
f"CREATE TABLE IF NOT EXISTS {self._name} ({', '.join(column_defs)})"
)
q.run()
@@ -435,17 +437,17 @@ class SqlTable(QObject):
name: Name of the index, should be unique.
field: Name of the field to index.
"""
- if not self._database.user_version_changed():
+ if not self.database.user_version_changed():
return
- q = self._database.query(
+ q = self.database.query(
f"CREATE INDEX IF NOT EXISTS {name} ON {self._name} ({field})"
)
q.run()
def __iter__(self) -> Iterator[Any]:
"""Iterate rows in the table."""
- q = self._database.query(f"SELECT * FROM {self._name}")
+ q = self.database.query(f"SELECT * FROM {self._name}")
q.run()
return iter(q)
@@ -455,19 +457,19 @@ class SqlTable(QObject):
Args:
field: Field to match.
"""
- return self._database.query(
+ return self.database.query(
f"SELECT EXISTS(SELECT * FROM {self._name} WHERE {field} = :val)"
)
def __len__(self) -> int:
"""Return the count of rows in the table."""
- q = self._database.query(f"SELECT count(*) FROM {self._name}")
+ q = self.database.query(f"SELECT count(*) FROM {self._name}")
q.run()
return q.value()
def __bool__(self) -> bool:
"""Check whether there's any data in the table."""
- q = self._database.query(f"SELECT 1 FROM {self._name} LIMIT 1")
+ q = self.database.query(f"SELECT 1 FROM {self._name} LIMIT 1")
q.run()
return q.query.next()
@@ -481,7 +483,7 @@ class SqlTable(QObject):
Return:
The number of rows deleted.
"""
- q = self._database.query(f"DELETE FROM {self._name} where {field} = :val")
+ q = self.database.query(f"DELETE FROM {self._name} where {field} = :val")
q.run(val=value)
if not q.rows_affected():
raise KeyError('No row with {field} = "{value}"')
@@ -491,7 +493,7 @@ class SqlTable(QObject):
params = ', '.join(f':{key}' for key in values)
columns = ', '.join(values)
verb = "REPLACE" if replace else "INSERT"
- return self._database.query(
+ return self.database.query(
f"{verb} INTO {self._name} ({columns}) values({params})"
)
@@ -520,7 +522,7 @@ class SqlTable(QObject):
def delete_all(self) -> None:
"""Remove all rows from the table."""
- self._database.query(f"DELETE FROM {self._name}").run()
+ self.database.query(f"DELETE FROM {self._name}").run()
self.changed.emit()
def select(self, sort_by: str, sort_order: str, limit: int = -1) -> Query:
@@ -533,7 +535,7 @@ class SqlTable(QObject):
Return: A prepared and executed select query.
"""
- q = self._database.query(
+ q = self.database.query(
f"SELECT * FROM {self._name} ORDER BY {sort_by} {sort_order} LIMIT :limit"
)
q.run(limit=limit)
diff --git a/tests/unit/browser/test_history.py b/tests/unit/browser/test_history.py
index 904a86a5f..0454fb722 100644
--- a/tests/unit/browser/test_history.py
+++ b/tests/unit/browser/test_history.py
@@ -528,7 +528,7 @@ class TestCompletionMetaInfo:
assert not metainfo['force_rebuild']
def test_recovery_no_table(self, metainfo):
- metainfo._database.query("DROP TABLE CompletionMetaInfo").run()
+ metainfo.database.query("DROP TABLE CompletionMetaInfo").run()
with pytest.raises(sql.BugError, match='no such table: CompletionMetaInfo'):
metainfo['force_rebuild']
diff --git a/tests/unit/completion/test_histcategory.py b/tests/unit/completion/test_histcategory.py
index 4e3b657e2..cb37fb784 100644
--- a/tests/unit/completion/test_histcategory.py
+++ b/tests/unit/completion/test_histcategory.py
@@ -100,7 +100,7 @@ def test_set_pattern(pattern, before, after, model_validator, hist):
"""Validate the filtering and sorting results of set_pattern."""
for row in before:
hist.insert({'url': row[0], 'title': row[1], 'last_atime': 1})
- cat = histcategory.HistoryCategory(database=hist._database)
+ cat = histcategory.HistoryCategory(database=hist.database)
model_validator.set_model(cat)
cat.set_pattern(pattern)
model_validator.validate(after)
@@ -111,7 +111,7 @@ def test_set_pattern_repeated(model_validator, hist):
hist.insert({'url': 'example.com/foo', 'title': 'title1', 'last_atime': 1})
hist.insert({'url': 'example.com/bar', 'title': 'title2', 'last_atime': 1})
hist.insert({'url': 'example.com/baz', 'title': 'title3', 'last_atime': 1})
- cat = histcategory.HistoryCategory(database=hist._database)
+ cat = histcategory.HistoryCategory(database=hist.database)
model_validator.set_model(cat)
cat.set_pattern('b')
@@ -144,7 +144,7 @@ def test_set_pattern_repeated(model_validator, hist):
], ids=['numbers', 'characters'])
def test_set_pattern_long(hist, message_mock, caplog, pattern):
hist.insert({'url': 'example.com/foo', 'title': 'title1', 'last_atime': 1})
- cat = histcategory.HistoryCategory(database=hist._database)
+ cat = histcategory.HistoryCategory(database=hist.database)
with caplog.at_level(logging.ERROR):
cat.set_pattern(pattern)
msg = message_mock.getmsg(usertypes.MessageLevel.error)
@@ -154,7 +154,7 @@ def test_set_pattern_long(hist, message_mock, caplog, pattern):
@hypothesis.given(pat=strategies.text())
def test_set_pattern_hypothesis(hist, pat, caplog):
hist.insert({'url': 'example.com/foo', 'title': 'title1', 'last_atime': 1})
- cat = histcategory.HistoryCategory(database=hist._database)
+ cat = histcategory.HistoryCategory(database=hist.database)
with caplog.at_level(logging.ERROR):
cat.set_pattern(pat)
@@ -203,7 +203,7 @@ def test_sorting(max_items, before, after, model_validator, hist, config_stub):
for url, title, atime in before:
timestamp = datetime.datetime.strptime(atime, '%Y-%m-%d').timestamp()
hist.insert({'url': url, 'title': title, 'last_atime': timestamp})
- cat = histcategory.HistoryCategory(database=hist._database)
+ cat = histcategory.HistoryCategory(database=hist.database)
model_validator.set_model(cat)
cat.set_pattern('')
model_validator.validate(after)
@@ -212,7 +212,7 @@ def test_sorting(max_items, before, after, model_validator, hist, config_stub):
def test_remove_rows(hist, model_validator):
hist.insert({'url': 'foo', 'title': 'Foo', 'last_atime': 0})
hist.insert({'url': 'bar', 'title': 'Bar', 'last_atime': 0})
- cat = histcategory.HistoryCategory(database=hist._database)
+ cat = histcategory.HistoryCategory(database=hist.database)
model_validator.set_model(cat)
cat.set_pattern('')
hist.delete('url', 'foo')
@@ -228,7 +228,7 @@ def test_remove_rows_fetch(hist):
'title': [str(i) for i in range(300)],
'last_atime': [0] * 300,
})
- cat = histcategory.HistoryCategory(database=hist._database)
+ cat = histcategory.HistoryCategory(database=hist.database)
cat.set_pattern('')
# sanity check that we didn't fetch everything up front
@@ -253,14 +253,14 @@ def test_timestamp_fmt(fmt, expected, model_validator, config_stub, data_tmpdir)
hist = sql.SqlTable(db, 'CompletionHistory', ['url', 'title', 'last_atime'])
atime = datetime.datetime(2018, 2, 27, 8, 30)
hist.insert({'url': 'foo', 'title': '', 'last_atime': atime.timestamp()})
- cat = histcategory.HistoryCategory(database=hist._database)
+ cat = histcategory.HistoryCategory(database=hist.database)
model_validator.set_model(cat)
cat.set_pattern('')
model_validator.validate([('foo', '', expected)])
def test_skip_duplicate_set(message_mock, caplog, hist):
- cat = histcategory.HistoryCategory(database=hist._database)
+ cat = histcategory.HistoryCategory(database=hist.database)
cat.set_pattern('foo')
cat.set_pattern('foobarbaz')
msg = caplog.messages[-1]