Move all remaining stores to SQLCipher

This commit is contained in:
Scott Nonnenberg 2018-10-17 18:01:21 -07:00
parent 7aa9031c7f
commit 1755e0adfd
25 changed files with 2464 additions and 2047 deletions

View File

@ -798,11 +798,6 @@
"message":
"Are you sure? Clicking 'delete' will permanently remove this message from this device only."
},
"unidentifiedDelivery": {
"message": "Unidentified Delivery",
"description":
"Label shown on the message detail screen for messages sent or received with Unidentified Delivery enabled"
},
"deleteThisMessage": {
"message": "Delete this message"
},

View File

@ -15,6 +15,47 @@ module.exports = {
close,
removeDB,
createOrUpdateGroup,
getGroupById,
getAllGroupIds,
bulkAddGroups,
removeGroupById,
removeAllGroups,
createOrUpdateIdentityKey,
getIdentityKeyById,
bulkAddIdentityKeys,
removeIdentityKeyById,
removeAllIdentityKeys,
createOrUpdatePreKey,
getPreKeyById,
bulkAddPreKeys,
removePreKeyById,
removeAllPreKeys,
createOrUpdateSignedPreKey,
getSignedPreKeyById,
getAllSignedPreKeys,
bulkAddSignedPreKeys,
removeSignedPreKeyById,
removeAllSignedPreKeys,
createOrUpdateItem,
getItemById,
getAllItems,
bulkAddItems,
removeItemById,
removeAllItems,
createOrUpdateSession,
getSessionById,
getSessionsByNumber,
bulkAddSessions,
removeSessionById,
removeSessionsByNumber,
removeAllSessions,
getConversationCount,
saveConversation,
saveConversations,
@ -51,6 +92,7 @@ module.exports = {
removeAllUnprocessed,
removeAll,
removeAllConfiguration,
getMessagesNeedingUpgrade,
getMessagesWithVisualMediaAttachments,
@ -320,12 +362,72 @@ async function updateToSchemaVersion4(currentVersion, instance) {
console.log('updateToSchemaVersion4: success!');
}
async function updateToSchemaVersion6(currentVersion, instance) {
if (currentVersion >= 6) {
return;
}
console.log('updateToSchemaVersion6: starting...');
await instance.run('BEGIN TRANSACTION;');
// key-value, ids are strings, one extra column
await instance.run(
`CREATE TABLE sessions(
id STRING PRIMARY KEY ASC,
number STRING,
json TEXT
);`
);
await instance.run(`CREATE INDEX sessions_number ON sessions (
number
) WHERE number IS NOT NULL;`);
// key-value, ids are strings
await instance.run(
`CREATE TABLE groups(
id STRING PRIMARY KEY ASC,
json TEXT
);`
);
await instance.run(
`CREATE TABLE identityKeys(
id STRING PRIMARY KEY ASC,
json TEXT
);`
);
await instance.run(
`CREATE TABLE items(
id STRING PRIMARY KEY ASC,
json TEXT
);`
);
// key-value, ids are integers
await instance.run(
`CREATE TABLE preKeys(
id INTEGER PRIMARY KEY ASC,
json TEXT
);`
);
await instance.run(
`CREATE TABLE signedPreKeys(
id INTEGER PRIMARY KEY ASC,
json TEXT
);`
);
await instance.run('PRAGMA schema_version = 6;');
await instance.run('COMMIT TRANSACTION;');
console.log('updateToSchemaVersion6: success!');
}
const SCHEMA_VERSIONS = [
updateToSchemaVersion1,
updateToSchemaVersion2,
updateToSchemaVersion3,
updateToSchemaVersion4,
// version 5 was dropped
updateToSchemaVersion6,
];
async function updateSchema(instance) {
@ -400,6 +502,228 @@ async function removeDB() {
rimraf.sync(filePath);
}
const GROUPS_TABLE = 'groups';
async function createOrUpdateGroup(data) {
return createOrUpdate(GROUPS_TABLE, data);
}
async function getGroupById(id) {
return getById(GROUPS_TABLE, id);
}
async function getAllGroupIds() {
const rows = await db.all('SELECT id FROM groups ORDER BY id ASC;');
return map(rows, row => row.id);
}
async function bulkAddGroups(array) {
return bulkAdd(GROUPS_TABLE, array);
}
async function removeGroupById(id) {
return removeById(GROUPS_TABLE, id);
}
async function removeAllGroups() {
return removeAllFromTable(GROUPS_TABLE);
}
const IDENTITY_KEYS_TABLE = 'identityKeys';
async function createOrUpdateIdentityKey(data) {
return createOrUpdate(IDENTITY_KEYS_TABLE, data);
}
async function getIdentityKeyById(id) {
return getById(IDENTITY_KEYS_TABLE, id);
}
async function bulkAddIdentityKeys(array) {
return bulkAdd(IDENTITY_KEYS_TABLE, array);
}
async function removeIdentityKeyById(id) {
return removeById(IDENTITY_KEYS_TABLE, id);
}
async function removeAllIdentityKeys() {
return removeAllFromTable(IDENTITY_KEYS_TABLE);
}
const PRE_KEYS_TABLE = 'preKeys';
async function createOrUpdatePreKey(data) {
return createOrUpdate(PRE_KEYS_TABLE, data);
}
async function getPreKeyById(id) {
return getById(PRE_KEYS_TABLE, id);
}
async function bulkAddPreKeys(array) {
return bulkAdd(PRE_KEYS_TABLE, array);
}
async function removePreKeyById(id) {
return removeById(PRE_KEYS_TABLE, id);
}
async function removeAllPreKeys() {
return removeAllFromTable(PRE_KEYS_TABLE);
}
const SIGNED_PRE_KEYS_TABLE = 'signedPreKeys';
async function createOrUpdateSignedPreKey(data) {
return createOrUpdate(SIGNED_PRE_KEYS_TABLE, data);
}
async function getSignedPreKeyById(id) {
return getById(SIGNED_PRE_KEYS_TABLE, id);
}
async function getAllSignedPreKeys() {
const rows = await db.all('SELECT json FROM signedPreKeys ORDER BY id ASC;');
return map(rows, row => jsonToObject(row.json));
}
async function bulkAddSignedPreKeys(array) {
return bulkAdd(SIGNED_PRE_KEYS_TABLE, array);
}
async function removeSignedPreKeyById(id) {
return removeById(SIGNED_PRE_KEYS_TABLE, id);
}
async function removeAllSignedPreKeys() {
return removeAllFromTable(SIGNED_PRE_KEYS_TABLE);
}
const ITEMS_TABLE = 'items';
async function createOrUpdateItem(data) {
return createOrUpdate(ITEMS_TABLE, data);
}
async function getItemById(id) {
return getById(ITEMS_TABLE, id);
}
async function getAllItems() {
const rows = await db.all('SELECT json FROM items ORDER BY id ASC;');
return map(rows, row => jsonToObject(row.json));
}
async function bulkAddItems(array) {
return bulkAdd(ITEMS_TABLE, array);
}
async function removeItemById(id) {
return removeById(ITEMS_TABLE, id);
}
async function removeAllItems() {
return removeAllFromTable(ITEMS_TABLE);
}
const SESSIONS_TABLE = 'sessions';
async function createOrUpdateSession(data) {
const { id, number } = data;
if (!id) {
throw new Error(
'createOrUpdateSession: Provided data did not have a truthy id'
);
}
if (!number) {
throw new Error(
'createOrUpdateSession: Provided data did not have a truthy number'
);
}
await db.run(
`INSERT OR REPLACE INTO sessions (
id,
number,
json
) values (
$id,
$number,
$json
)`,
{
$id: id,
$number: number,
$json: objectToJSON(data),
}
);
}
async function getSessionById(id) {
return getById(SESSIONS_TABLE, id);
}
async function getSessionsByNumber(number) {
const rows = await db.all('SELECT * FROM sessions WHERE number = $number;', {
$number: number,
});
return map(rows, row => jsonToObject(row.json));
}
async function bulkAddSessions(array) {
return bulkAdd(SESSIONS_TABLE, array);
}
async function removeSessionById(id) {
return removeById(SESSIONS_TABLE, id);
}
async function removeSessionsByNumber(number) {
await db.run('DELETE FROM sessions WHERE number = $number;', {
$number: number,
});
}
async function removeAllSessions() {
return removeAllFromTable(SESSIONS_TABLE);
}
async function createOrUpdate(table, data) {
const { id } = data;
if (!id) {
throw new Error('createOrUpdate: Provided data did not have a truthy id');
}
await db.run(
`INSERT OR REPLACE INTO ${table} (
id,
json
) values (
$id,
$json
)`,
{
$id: id,
$json: objectToJSON(data),
}
);
}
async function bulkAdd(table, array) {
let promise;
db.serialize(() => {
promise = Promise.all([
db.run('BEGIN TRANSACTION;'),
...map(array, data => createOrUpdate(table, data)),
db.run('COMMIT TRANSACTION;'),
]);
});
await promise;
}
async function getById(table, id) {
const row = await db.get(`SELECT * FROM ${table} WHERE id = $id;`, {
$id: id,
});
if (!row) {
return null;
}
return jsonToObject(row.json);
}
async function removeById(table, id) {
if (!Array.isArray(id)) {
await db.run(`DELETE FROM ${table} WHERE id = $id;`, { $id: id });
return;
}
if (!id.length) {
throw new Error('removeById: No ids to delete!');
}
// Our node interface doesn't seem to allow you to replace one single ? with an array
await db.run(
`DELETE FROM ${table} WHERE id IN ( ${id.map(() => '?').join(', ')} );`,
id
);
}
async function removeAllFromTable(table) {
await db.run(`DELETE FROM ${table};`);
}
// Conversations
async function getConversationCount() {
const row = await db.get('SELECT count(*) from conversations;');
@ -1007,15 +1331,42 @@ async function removeAllUnprocessed() {
await db.run('DELETE FROM unprocessed;');
}
// All data in database
async function removeAll() {
let promise;
db.serialize(() => {
promise = Promise.all([
db.run('BEGIN TRANSACTION;'),
db.run('DELETE FROM conversations;'),
db.run('DELETE FROM groups;'),
db.run('DELETE FROM identityKeys;'),
db.run('DELETE FROM items;'),
db.run('DELETE FROM messages;'),
db.run('DELETE FROM preKeys;'),
db.run('DELETE FROM sessions;'),
db.run('DELETE FROM signedPreKeys;'),
db.run('DELETE FROM unprocessed;'),
db.run('COMMIT TRANSACTION;'),
]);
});
await promise;
}
// Anything that isn't user-visible data
async function removeAllConfiguration() {
let promise;
db.serialize(() => {
promise = Promise.all([
db.run('BEGIN TRANSACTION;'),
db.run('DELETE FROM identityKeys;'),
db.run('DELETE FROM items;'),
db.run('DELETE FROM preKeys;'),
db.run('DELETE FROM sessions;'),
db.run('DELETE FROM signedPreKeys;'),
db.run('DELETE FROM unprocessed;'),
db.run('DELETE from conversations;'),
db.run('COMMIT TRANSACTION;'),
]);
});

View File

@ -590,6 +590,7 @@
<script type='text/javascript' src='js/reliable_trigger.js'></script>
<script type='text/javascript' src='js/database.js'></script>
<script type='text/javascript' src='js/storage.js'></script>
<script type='text/javascript' src='js/legacy_storage.js'></script>
<script type='text/javascript' src='js/signal_protocol_store.js'></script>
<script type='text/javascript' src='js/libtextsecure.js'></script>

View File

@ -125,16 +125,18 @@
window.setImmediate = window.nodeSetImmediate;
const { IdleDetector, MessageDataMigrator } = Signal.Workflow;
const {
mandatoryMessageUpgrade,
migrateAllToSQLCipher,
removeDatabase,
runMigrations,
doesDatabaseExist,
} = Signal.IndexedDB;
const { Errors, Message } = window.Signal.Types;
const {
upgradeMessageSchema,
writeNewAttachmentData,
deleteAttachmentData,
getCurrentVersion,
} = window.Signal.Migrations;
const {
Migrations0DatabaseWithAttachmentData,
Migrations1DatabaseWithoutAttachmentData,
} = window.Signal.Migrations;
const { Views } = window.Signal;
@ -184,16 +186,13 @@
};
const cancelInitializationMessage = Views.Initialization.setMessage();
window.log.info('Start IndexedDB migrations');
window.log.info('Run migrations on database with attachment data');
await Migrations0DatabaseWithAttachmentData.run({
Backbone,
logger: window.log,
});
const latestDBVersion2 = await getCurrentVersion();
Whisper.Database.migrations[0].version = latestDBVersion2;
const isIndexedDBPresent = await doesDatabaseExist();
if (isIndexedDBPresent) {
window.installStorage(window.legacyStorage);
window.log.info('Start IndexedDB migrations');
await runMigrations();
}
window.log.info('Storage fetch');
storage.fetch();
@ -294,121 +293,17 @@
);
}
const MINIMUM_VERSION = 7;
async function upgradeMessages() {
const NUM_MESSAGES_PER_BATCH = 10;
window.log.info(
'upgradeMessages: Mandatory message schema upgrade started.',
`Target version: ${MINIMUM_VERSION}`
);
if (isIndexedDBPresent) {
await mandatoryMessageUpgrade({ upgradeMessageSchema });
await migrateAllToSQLCipher({ writeNewAttachmentData, Views });
await removeDatabase();
let isMigrationWithoutIndexComplete = false;
while (!isMigrationWithoutIndexComplete) {
const database = Migrations0DatabaseWithAttachmentData.getDatabase();
// eslint-disable-next-line no-await-in-loop
const batchWithoutIndex = await MessageDataMigrator.processNextBatchWithoutIndex(
{
databaseName: database.name,
minDatabaseVersion: database.version,
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
upgradeMessageSchema,
maxVersion: MINIMUM_VERSION,
BackboneMessage: Whisper.Message,
saveMessage: window.Signal.Data.saveLegacyMessage,
}
);
window.log.info(
'upgradeMessages: upgrade without index',
batchWithoutIndex
);
isMigrationWithoutIndexComplete = batchWithoutIndex.done;
}
window.log.info('upgradeMessages: upgrade without index complete!');
let isMigrationWithIndexComplete = false;
while (!isMigrationWithIndexComplete) {
// eslint-disable-next-line no-await-in-loop
const batchWithIndex = await MessageDataMigrator.processNext({
BackboneMessage: Whisper.Message,
BackboneMessageCollection: Whisper.MessageCollection,
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
upgradeMessageSchema,
getMessagesNeedingUpgrade:
window.Signal.Data.getLegacyMessagesNeedingUpgrade,
saveMessage: window.Signal.Data.saveLegacyMessage,
maxVersion: MINIMUM_VERSION,
});
window.log.info('upgradeMessages: upgrade with index', batchWithIndex);
isMigrationWithIndexComplete = batchWithIndex.done;
}
window.log.info('upgradeMessages: upgrade with index complete!');
window.log.info('upgradeMessages: Message schema upgrade complete');
window.installStorage(window.newStorage);
await window.storage.fetch();
}
await upgradeMessages();
const db = await Whisper.Database.open();
let totalMessages;
try {
totalMessages = await MessageDataMigrator.getNumMessages({
connection: db,
});
} catch (error) {
window.log.error(
'background.getNumMessages error:',
error && error.stack ? error.stack : error
);
totalMessages = 0;
}
function showMigrationStatus(current) {
const status = `${current}/${totalMessages}`;
Views.Initialization.setMessage(
window.i18n('migratingToSQLCipher', [status])
);
}
if (totalMessages) {
window.log.info(`About to migrate ${totalMessages} messages`);
showMigrationStatus(0);
} else {
window.log.info('About to migrate non-messages');
}
await window.Signal.migrateToSQL({
db,
clearStores: Whisper.Database.clearStores,
handleDOMException: Whisper.Database.handleDOMException,
arrayBufferToString: textsecure.MessageReceiver.arrayBufferToStringBase64,
countCallback: count => {
window.log.info(`Migration: ${count} messages complete`);
showMigrationStatus(count);
},
writeNewAttachmentData,
});
db.close();
Views.Initialization.setMessage(window.i18n('optimizingApplication'));
window.log.info('Running cleanup IndexedDB migrations...');
// Close all previous connections to the database first
await Whisper.Database.close();
// Now we clean up IndexedDB database after extracting data from it
await Migrations1DatabaseWithoutAttachmentData.run({
Backbone,
logger: window.log,
});
await Whisper.Database.close();
const latestDBVersion = _.last(
Migrations1DatabaseWithoutAttachmentData.migrations
).version;
Whisper.Database.migrations[0].version = latestDBVersion;
window.log.info('Cleanup: starting...');
const messagesForCleanup = await window.Signal.Data.getOutgoingWithoutExpiresAt(
{

92
js/legacy_storage.js Normal file
View File

@ -0,0 +1,92 @@
/* global Backbone, Whisper */
/* eslint-disable more/no-then */
// eslint-disable-next-line func-names
(function() {
'use strict';
window.Whisper = window.Whisper || {};
const Item = Backbone.Model.extend({
database: Whisper.Database,
storeName: 'items',
});
const ItemCollection = Backbone.Collection.extend({
model: Item,
storeName: 'items',
database: Whisper.Database,
});
let ready = false;
const items = new ItemCollection();
items.on('reset', () => {
ready = true;
});
window.legacyStorage = {
/** ***************************
*** Base Storage Routines ***
**************************** */
put(key, value) {
if (value === undefined) {
throw new Error('Tried to store undefined');
}
if (!ready) {
window.log.warn(
'Called storage.put before storage is ready. key:',
key
);
}
const item = items.add({ id: key, value }, { merge: true });
return new Promise((resolve, reject) => {
item.save().then(resolve, reject);
});
},
get(key, defaultValue) {
const item = items.get(`${key}`);
if (!item) {
return defaultValue;
}
return item.get('value');
},
remove(key) {
const item = items.get(`${key}`);
if (item) {
items.remove(item);
return new Promise((resolve, reject) => {
item.destroy().then(resolve, reject);
});
}
return Promise.resolve();
},
onready(callback) {
if (ready) {
callback();
} else {
items.on('reset', callback);
}
},
fetch() {
return new Promise((resolve, reject) => {
items
.fetch({ reset: true })
.fail(() =>
reject(
new Error(
'Failed to fetch from storage.' +
' This may be due to an unexpected database version.'
)
)
)
.always(resolve);
});
},
reset() {
items.reset();
},
};
})();

View File

@ -47,7 +47,6 @@
];
Whisper.Conversation = Backbone.Model.extend({
database: Whisper.Database,
storeName: 'conversations',
defaults() {
return {
@ -1665,8 +1664,6 @@
});
Whisper.ConversationCollection = Backbone.Collection.extend({
database: Whisper.Database,
storeName: 'conversations',
model: Whisper.Conversation,
comparator(m) {

View File

@ -65,9 +65,6 @@
window.hasSignalAccount = number => window.AccountCache[number];
window.Whisper.Message = Backbone.Model.extend({
// Keeping this for legacy upgrade pre-migrate to SQLCipher
database: Whisper.Database,
storeName: 'messages',
initialize(attributes) {
if (_.isObject(attributes)) {
this.set(
@ -1418,9 +1415,6 @@
Whisper.MessageCollection = Backbone.Collection.extend({
model: Whisper.Message,
// Keeping this for legacy upgrade pre-migrate to SQLCipher
database: Whisper.Database,
storeName: 'messages',
comparator(left, right) {
if (left.get('received_at') === right.get('received_at')) {
return (left.get('sent_at') || 0) - (right.get('sent_at') || 0);

View File

@ -207,10 +207,10 @@ function exportContactsAndGroups(db, fileWriter) {
});
}
async function importNonMessages(db, parent, options) {
async function importNonMessages(parent, options) {
const file = 'db.json';
const string = await readFileAsText(parent, file);
return importFromJsonString(db, string, path.join(parent, file), options);
return importFromJsonString(string, path.join(parent, file), options);
}
function eliminateClientConfigInBackup(data, targetPath) {
@ -265,7 +265,7 @@ async function importConversationsFromJSON(conversations, options) {
);
}
async function importFromJsonString(db, jsonString, targetPath, options) {
async function importFromJsonString(jsonString, targetPath, options) {
options = options || {};
_.defaults(options, {
forceLightImport: false,
@ -278,136 +278,96 @@ async function importFromJsonString(db, jsonString, targetPath, options) {
fullImport: true,
};
return new Promise(async (resolve, reject) => {
const importObject = JSON.parse(jsonString);
delete importObject.debug;
const importObject = JSON.parse(jsonString);
delete importObject.debug;
if (!importObject.sessions || options.forceLightImport) {
result.fullImport = false;
if (!importObject.sessions || options.forceLightImport) {
result.fullImport = false;
delete importObject.items;
delete importObject.signedPreKeys;
delete importObject.preKeys;
delete importObject.identityKeys;
delete importObject.sessions;
delete importObject.unprocessed;
window.log.info(
'This is a light import; contacts, groups and messages only'
);
}
// We mutate the on-disk backup to prevent the user from importing client
// configuration more than once - that causes lots of encryption errors.
// This of course preserves the true data: conversations and groups.
eliminateClientConfigInBackup(importObject, targetPath);
const storeNames = _.keys(importObject);
window.log.info('Importing to these stores:', storeNames.join(', '));
let finished = false;
const finish = via => {
window.log.info('non-messages import done via', via);
if (finished) {
resolve(result);
}
finished = true;
};
// Special-case conversations key here, going to SQLCipher
const { conversations } = importObject;
const remainingStoreNames = _.without(
storeNames,
'conversations',
'unprocessed'
);
try {
await importConversationsFromJSON(conversations, options);
} catch (error) {
reject(error);
}
// Because the 'are we done?' check below looks at the keys remaining in importObject
delete importObject.conversations;
delete importObject.items;
delete importObject.signedPreKeys;
delete importObject.preKeys;
delete importObject.identityKeys;
delete importObject.sessions;
delete importObject.unprocessed;
// The rest go to IndexedDB
const transaction = db.transaction(remainingStoreNames, 'readwrite');
transaction.onerror = () => {
Whisper.Database.handleDOMException(
'importFromJsonString transaction error',
transaction.error,
reject
);
};
transaction.oncomplete = finish.bind(null, 'transaction complete');
window.log.info(
'This is a light import; contacts, groups and messages only'
);
}
_.each(remainingStoreNames, storeName => {
const items = importObject[storeName];
// We mutate the on-disk backup to prevent the user from importing client
// configuration more than once - that causes lots of encryption errors.
// This of course preserves the true data: conversations and groups.
eliminateClientConfigInBackup(importObject, targetPath);
window.log.info('Importing items for store', storeName);
const storeNames = _.keys(importObject);
window.log.info('Importing to these stores:', storeNames.join(', '));
let count = 0;
let skipCount = 0;
// Special-case conversations key here, going to SQLCipher
const { conversations } = importObject;
const remainingStoreNames = _.without(
storeNames,
'conversations',
'unprocessed'
);
await importConversationsFromJSON(conversations, options);
const finishStore = () => {
// added all objects for this store
delete importObject[storeName];
window.log.info(
'Done importing to store',
storeName,
'Total count:',
count,
'Skipped:',
skipCount
const SAVE_FUNCTIONS = {
groups: window.Signal.Data.createOrUpdateGroup,
identityKeys: window.Signal.Data.createOrUpdateIdentityKey,
items: window.Signal.Data.createOrUpdateItem,
preKeys: window.Signal.Data.createOrUpdatePreKey,
sessions: window.Signal.Data.createOrUpdateSession,
signedPreKeys: window.Signal.Data.createOrUpdateSignedPreKey,
};
await Promise.all(
_.map(remainingStoreNames, async storeName => {
const save = SAVE_FUNCTIONS[storeName];
if (!_.isFunction(save)) {
throw new Error(
`importFromJsonString: Didn't have save function for store ${storeName}`
);
if (_.keys(importObject).length === 0) {
// added all object stores
window.log.info('DB import complete');
finish('puts scheduled');
}
};
}
if (!items || !items.length) {
finishStore();
window.log.info(`Importing items for store ${storeName}`);
const toImport = importObject[storeName];
if (!toImport || !toImport.length) {
window.log.info(`No items in ${storeName} store`);
return;
}
_.each(items, toAdd => {
toAdd = unstringify(toAdd);
let skipCount = 0;
for (let i = 0, max = toImport.length; i < max; i += 1) {
const toAdd = unstringify(toImport[i]);
const haveGroupAlready =
storeName === 'groups' && groupLookup[getGroupKey(toAdd)];
if (haveGroupAlready) {
skipCount += 1;
count += 1;
return;
} else {
// eslint-disable-next-line no-await-in-loop
await save(toAdd);
}
const request = transaction.objectStore(storeName).put(toAdd, toAdd.id);
request.onsuccess = () => {
count += 1;
if (count + skipCount >= items.length) {
finishStore();
}
};
request.onerror = () => {
Whisper.Database.handleDOMException(
`importFromJsonString request error (store: ${storeName})`,
request.error,
reject
);
};
});
// We have to check here, because we may have skipped every item, resulting
// in no onsuccess callback at all.
if (skipCount === count) {
finishStore();
}
});
});
window.log.info(
'Done importing to store',
storeName,
'Total count:',
toImport.length,
'Skipped:',
skipCount
);
})
);
window.log.info('DB import complete');
return result;
}
function createDirectory(parent, name) {
@ -1043,11 +1003,11 @@ async function loadAttachments(dir, getName, options) {
// TODO: Handle video screenshots, and image/video thumbnails
}
function saveMessage(db, message) {
return saveAllMessages(db, [message]);
function saveMessage(message) {
return saveAllMessages([message]);
}
async function saveAllMessages(db, rawMessages) {
async function saveAllMessages(rawMessages) {
if (rawMessages.length === 0) {
return;
}
@ -1085,7 +1045,7 @@ async function saveAllMessages(db, rawMessages) {
// message, save it, and only then do we move on to the next message. Thus, every
// message with attachments needs to be removed from our overall message save with the
// filter() call.
async function importConversation(db, dir, options) {
async function importConversation(dir, options) {
options = options || {};
_.defaults(options, { messageLookup: {} });
@ -1141,7 +1101,7 @@ async function importConversation(db, dir, options) {
message,
key,
});
return saveMessage(db, message);
return saveMessage(message);
};
// eslint-disable-next-line more/no-then
@ -1153,7 +1113,7 @@ async function importConversation(db, dir, options) {
return true;
});
await saveAllMessages(db, messages);
await saveAllMessages(messages);
await promiseChain;
window.log.info(
@ -1166,7 +1126,7 @@ async function importConversation(db, dir, options) {
);
}
async function importConversations(db, dir, options) {
async function importConversations(dir, options) {
const contents = await getDirContents(dir);
let promiseChain = Promise.resolve();
@ -1175,8 +1135,7 @@ async function importConversations(db, dir, options) {
return;
}
const loadConversation = () =>
importConversation(db, conversationDir, options);
const loadConversation = () => importConversation(conversationDir, options);
// eslint-disable-next-line more/no-then
promiseChain = promiseChain.then(loadConversation);
@ -1211,46 +1170,9 @@ async function loadConversationLookup() {
function getGroupKey(group) {
return group.id;
}
function loadGroupsLookup(db) {
return assembleLookup(db, 'groups', getGroupKey);
}
function assembleLookup(db, storeName, keyFunction) {
const lookup = Object.create(null);
return new Promise((resolve, reject) => {
const transaction = db.transaction(storeName, 'readwrite');
transaction.onerror = () => {
Whisper.Database.handleDOMException(
`assembleLookup(${storeName}) transaction error`,
transaction.error,
reject
);
};
transaction.oncomplete = () => {
// not really very useful - fires at unexpected times
};
const store = transaction.objectStore(storeName);
const request = store.openCursor();
request.onerror = () => {
Whisper.Database.handleDOMException(
`assembleLookup(${storeName}) request error`,
request.error,
reject
);
};
request.onsuccess = event => {
const cursor = event.target.result;
if (cursor && cursor.value) {
lookup[keyFunction(cursor.value)] = true;
cursor.continue();
} else {
window.log.info(`Done creating ${storeName} lookup`);
resolve(lookup);
}
};
});
async function loadGroupsLookup() {
const array = await window.Signal.Data.getAllGroupIds();
return fromPairs(map(array, item => [getGroupKey(item), true]));
}
function getDirectoryForExport() {
@ -1383,11 +1305,10 @@ async function importFromDirectory(directory, options) {
options = options || {};
try {
const db = await Whisper.Database.open();
const lookups = await Promise.all([
loadMessagesLookup(db),
loadConversationLookup(db),
loadGroupsLookup(db),
loadMessagesLookup(),
loadConversationLookup(),
loadGroupsLookup(),
]);
const [messageLookup, conversationLookup, groupLookup] = lookups;
options = Object.assign({}, options, {
@ -1422,8 +1343,8 @@ async function importFromDirectory(directory, options) {
options = Object.assign({}, options, {
attachmentsDir,
});
const result = await importNonMessages(db, stagingDir, options);
await importConversations(db, stagingDir, Object.assign({}, options));
const result = await importNonMessages(stagingDir, options);
await importConversations(stagingDir, Object.assign({}, options));
window.log.info('Done importing from backup!');
return result;
@ -1437,8 +1358,8 @@ async function importFromDirectory(directory, options) {
}
}
const result = await importNonMessages(db, directory, options);
await importConversations(db, directory, options);
const result = await importNonMessages(directory, options);
await importConversations(directory, options);
window.log.info('Done importing!');
return result;

View File

@ -1,10 +1,19 @@
/* global window, setTimeout */
/* global window, setTimeout, IDBKeyRange */
const electron = require('electron');
const { forEach, isFunction, isObject, merge } = require('lodash');
const {
cloneDeep,
forEach,
get,
isFunction,
isObject,
map,
merge,
set,
} = require('lodash');
const { deferredToPromise } = require('./deferred_to_promise');
const { base64ToArrayBuffer, arrayBufferToBase64 } = require('./crypto');
const MessageType = require('./types/message');
const { ipcRenderer } = electron;
@ -13,11 +22,6 @@ const { ipcRenderer } = electron;
// any warnings that might be sent to the console in that case.
ipcRenderer.setMaxListeners(0);
// calls to search for when finding functions to convert:
// .fetch(
// .save(
// .destroy(
const DATABASE_UPDATE_TIMEOUT = 2 * 60 * 1000; // two minutes
const SQL_CHANNEL_KEY = 'sql-channel';
@ -38,6 +42,47 @@ module.exports = {
close,
removeDB,
createOrUpdateGroup,
getGroupById,
getAllGroupIds,
bulkAddGroups,
removeGroupById,
removeAllGroups,
createOrUpdateIdentityKey,
getIdentityKeyById,
bulkAddIdentityKeys,
removeIdentityKeyById,
removeAllIdentityKeys,
createOrUpdatePreKey,
getPreKeyById,
bulkAddPreKeys,
removePreKeyById,
removeAllPreKeys,
createOrUpdateSignedPreKey,
getSignedPreKeyById,
getAllSignedPreKeys,
bulkAddSignedPreKeys,
removeSignedPreKeyById,
removeAllSignedPreKeys,
createOrUpdateItem,
getItemById,
getAllItems,
bulkAddItems,
removeItemById,
removeAllItems,
createOrUpdateSession,
getSessionById,
getSessionsByNumber,
bulkAddSessions,
removeSessionById,
removeSessionsByNumber,
removeAllSessions,
getConversationCount,
saveConversation,
saveConversations,
@ -81,6 +126,8 @@ module.exports = {
removeAllUnprocessed,
removeAll,
removeAllConfiguration,
removeOtherData,
cleanupOrphanedAttachments,
@ -229,6 +276,36 @@ forEach(module.exports, fn => {
}
});
function keysToArrayBuffer(keys, data) {
const updated = cloneDeep(data);
for (let i = 0, max = keys.length; i < max; i += 1) {
const key = keys[i];
const value = get(data, key);
if (value) {
set(updated, key, base64ToArrayBuffer(value));
}
}
return updated;
}
function keysFromArrayBuffer(keys, data) {
const updated = cloneDeep(data);
for (let i = 0, max = keys.length; i < max; i += 1) {
const key = keys[i];
const value = get(data, key);
if (value) {
set(updated, key, arrayBufferToBase64(value));
}
}
return updated;
}
// Top-level calls
// Note: will need to restart the app after calling this, to set up afresh
async function close() {
await channels.close();
@ -239,6 +316,182 @@ async function removeDB() {
await channels.removeDB();
}
// Groups
async function createOrUpdateGroup(data) {
await channels.createOrUpdateGroup(data);
}
async function getGroupById(id) {
const group = await channels.getGroupById(id);
return group;
}
async function getAllGroupIds() {
const ids = await channels.getAllGroupIds();
return ids;
}
async function bulkAddGroups(array) {
await channels.bulkAddGroups(array);
}
async function removeGroupById(id) {
await channels.removeGroupById(id);
}
async function removeAllGroups() {
await channels.removeAllGroups();
}
// Identity Keys
const IDENTITY_KEY_KEYS = ['publicKey'];
async function createOrUpdateIdentityKey(data) {
const updated = keysFromArrayBuffer(IDENTITY_KEY_KEYS, data);
await channels.createOrUpdateIdentityKey(updated);
}
async function getIdentityKeyById(id) {
const data = await channels.getIdentityKeyById(id);
return keysToArrayBuffer(IDENTITY_KEY_KEYS, data);
}
async function bulkAddIdentityKeys(array) {
const updated = map(array, data =>
keysFromArrayBuffer(IDENTITY_KEY_KEYS, data)
);
await channels.bulkAddIdentityKeys(updated);
}
async function removeIdentityKeyById(id) {
await channels.removeIdentityKeyById(id);
}
async function removeAllIdentityKeys() {
await channels.removeAllIdentityKeys();
}
// Pre Keys
async function createOrUpdatePreKey(data) {
const updated = keysFromArrayBuffer(PRE_KEY_KEYS, data);
await channels.createOrUpdatePreKey(updated);
}
async function getPreKeyById(id) {
const data = await channels.getPreKeyById(id);
return keysToArrayBuffer(PRE_KEY_KEYS, data);
}
async function bulkAddPreKeys(array) {
const updated = map(array, data => keysFromArrayBuffer(PRE_KEY_KEYS, data));
await channels.bulkAddPreKeys(updated);
}
async function removePreKeyById(id) {
await channels.removePreKeyById(id);
}
async function removeAllPreKeys() {
await channels.removeAllPreKeys();
}
// Signed Pre Keys
const PRE_KEY_KEYS = ['privateKey', 'publicKey'];
async function createOrUpdateSignedPreKey(data) {
const updated = keysFromArrayBuffer(PRE_KEY_KEYS, data);
await channels.createOrUpdateSignedPreKey(updated);
}
async function getSignedPreKeyById(id) {
const data = await channels.getSignedPreKeyById(id);
return keysToArrayBuffer(PRE_KEY_KEYS, data);
}
async function getAllSignedPreKeys() {
const keys = await channels.getAllSignedPreKeys();
return keys;
}
async function bulkAddSignedPreKeys(array) {
const updated = map(array, data => keysFromArrayBuffer(PRE_KEY_KEYS, data));
await channels.bulkAddSignedPreKeys(updated);
}
async function removeSignedPreKeyById(id) {
await channels.removeSignedPreKeyById(id);
}
async function removeAllSignedPreKeys() {
await channels.removeAllSignedPreKeys();
}
// Items
const ITEM_KEYS = {
identityKey: ['value.pubKey', 'value.privKey'],
senderCertificate: [
'value.certificate',
'value.signature',
'value.serialized',
],
signaling_key: ['value'],
profileKey: ['value'],
};
async function createOrUpdateItem(data) {
const { id } = data;
if (!id) {
throw new Error(
'createOrUpdateItem: Provided data did not have a truthy id'
);
}
const keys = ITEM_KEYS[id];
const updated = Array.isArray(keys) ? keysFromArrayBuffer(keys, data) : data;
await channels.createOrUpdateItem(updated);
}
async function getItemById(id) {
const keys = ITEM_KEYS[id];
const data = await channels.getItemById(id);
return Array.isArray(keys) ? keysToArrayBuffer(keys, data) : data;
}
async function getAllItems() {
const items = await channels.getAllItems();
return map(items, item => {
const { id } = item;
const keys = ITEM_KEYS[id];
return Array.isArray(keys) ? keysToArrayBuffer(keys, item) : item;
});
}
async function bulkAddItems(array) {
const updated = map(array, data => {
const { id } = data;
const keys = ITEM_KEYS[id];
return Array.isArray(keys) ? keysFromArrayBuffer(keys, data) : data;
});
await channels.bulkAddItems(updated);
}
async function removeItemById(id) {
await channels.removeItemById(id);
}
async function removeAllItems() {
await channels.removeAllItems();
}
// Sessions
async function createOrUpdateSession(data) {
await channels.createOrUpdateSession(data);
}
async function getSessionById(id) {
const session = await channels.getSessionById(id);
return session;
}
async function getSessionsByNumber(number) {
const sessions = await channels.getSessionsByNumber(number);
return sessions;
}
async function bulkAddSessions(array) {
await channels.bulkAddSessions(array);
}
async function removeSessionById(id) {
await channels.removeSessionById(id);
}
async function removeSessionsByNumber(number) {
await channels.removeSessionsByNumber(number);
}
async function removeAllSessions(id) {
await channels.removeAllSessions(id);
}
// Conversation
async function getConversationCount() {
return channels.getConversationCount();
}
@ -319,6 +572,8 @@ async function searchConversations(query, { ConversationCollection }) {
return collection;
}
// Message
async function getMessageCount() {
return channels.getMessageCount();
}
@ -329,10 +584,41 @@ async function saveMessage(data, { forceSave, Message } = {}) {
return id;
}
async function saveLegacyMessage(data, { Message }) {
const message = new Message(data);
await deferredToPromise(message.save());
return message.id;
async function saveLegacyMessage(data) {
const db = await window.Whisper.Database.open();
try {
await new Promise((resolve, reject) => {
const transaction = db.transaction('messages', 'readwrite');
transaction.onerror = () => {
window.Whisper.Database.handleDOMException(
'saveLegacyMessage transaction error',
transaction.error,
reject
);
};
transaction.oncomplete = resolve;
const store = transaction.objectStore('messages');
if (!data.id) {
// eslint-disable-next-line no-param-reassign
data.id = window.getGuid();
}
const request = store.put(data, data.id);
request.onsuccess = resolve;
request.onerror = () => {
window.Whisper.Database.handleDOMException(
'saveLegacyMessage request error',
request.error,
reject
);
};
});
} finally {
db.close();
}
}
async function saveMessages(arrayOfMessages, { forceSave } = {}) {
@ -459,6 +745,8 @@ async function getNextExpiringMessage({ MessageCollection }) {
return new MessageCollection(messages);
}
// Unprocessed
async function getUnprocessedCount() {
return channels.getUnprocessedCount();
}
@ -495,10 +783,16 @@ async function removeAllUnprocessed() {
await channels.removeAllUnprocessed();
}
// Other
async function removeAll() {
await channels.removeAll();
}
async function removeAllConfiguration() {
await channels.removeAllConfiguration();
}
async function cleanupOrphanedAttachments() {
await callChannel(CLEANUP_ORPHANED_ATTACHMENTS_KEY);
}
@ -529,28 +823,61 @@ async function callChannel(name) {
});
}
// Functions below here return JSON
// Functions below here return plain JSON instead of Backbone Models
async function getLegacyMessagesNeedingUpgrade(
limit,
{ MessageCollection, maxVersion = MessageType.CURRENT_SCHEMA_VERSION }
{ maxVersion = MessageType.CURRENT_SCHEMA_VERSION }
) {
const messages = new MessageCollection();
const db = await window.Whisper.Database.open();
try {
await new Promise((resolve, reject) => {
const transaction = db.transaction('messages', 'readonly');
const messages = [];
await deferredToPromise(
messages.fetch({
limit,
index: {
name: 'schemaVersion',
upper: maxVersion,
excludeUpper: true,
order: 'desc',
},
})
);
transaction.onerror = () => {
window.Whisper.Database.handleDOMException(
'getLegacyMessagesNeedingUpgrade transaction error',
transaction.error,
reject
);
};
transaction.oncomplete = () => {
resolve(messages);
};
const models = messages.models || [];
return models.map(model => model.toJSON());
const store = transaction.objectStore('messages');
const index = store.index('schemaVersion');
const range = IDBKeyRange.upperBound(maxVersion, true);
const request = index.openCursor(range);
let count = 0;
request.onsuccess = event => {
const cursor = event.target.result;
if (cursor) {
count += 1;
messages.push(cursor.value);
if (count >= limit) {
return;
}
cursor.continue();
}
};
request.onerror = () => {
window.Whisper.Database.handleDOMException(
'getLegacyMessagesNeedingUpgrade request error',
request.error,
reject
);
};
});
} finally {
db.close();
}
}
async function getMessagesNeedingUpgrade(

168
js/modules/indexeddb.js Normal file
View File

@ -0,0 +1,168 @@
/* global window, Whisper, textsecure */
const { isFunction } = require('lodash');
const MessageDataMigrator = require('./messages_data_migrator');
const {
run,
getLatestVersion,
getDatabase,
} = require('./migrations/migrations');
const MESSAGE_MINIMUM_VERSION = 7;
module.exports = {
doesDatabaseExist,
mandatoryMessageUpgrade,
MESSAGE_MINIMUM_VERSION,
migrateAllToSQLCipher,
removeDatabase,
runMigrations,
};
async function runMigrations() {
window.log.info('Run migrations on database with attachment data');
await run({
Backbone: window.Backbone,
logger: window.log,
});
Whisper.Database.migrations[0].version = getLatestVersion();
}
async function mandatoryMessageUpgrade({ upgradeMessageSchema } = {}) {
if (!isFunction(upgradeMessageSchema)) {
throw new Error(
'mandatoryMessageUpgrade: upgradeMessageSchema must be a function!'
);
}
const NUM_MESSAGES_PER_BATCH = 10;
window.log.info(
'upgradeMessages: Mandatory message schema upgrade started.',
`Target version: ${MESSAGE_MINIMUM_VERSION}`
);
let isMigrationWithoutIndexComplete = false;
while (!isMigrationWithoutIndexComplete) {
const database = getDatabase();
// eslint-disable-next-line no-await-in-loop
const batchWithoutIndex = await MessageDataMigrator.processNextBatchWithoutIndex(
{
databaseName: database.name,
minDatabaseVersion: database.version,
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
upgradeMessageSchema,
maxVersion: MESSAGE_MINIMUM_VERSION,
BackboneMessage: Whisper.Message,
saveMessage: window.Signal.Data.saveLegacyMessage,
}
);
window.log.info(
'upgradeMessages: upgrade without index',
batchWithoutIndex
);
isMigrationWithoutIndexComplete = batchWithoutIndex.done;
}
window.log.info('upgradeMessages: upgrade without index complete!');
let isMigrationWithIndexComplete = false;
while (!isMigrationWithIndexComplete) {
// eslint-disable-next-line no-await-in-loop
const batchWithIndex = await MessageDataMigrator.processNext({
BackboneMessage: Whisper.Message,
BackboneMessageCollection: Whisper.MessageCollection,
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
upgradeMessageSchema,
getMessagesNeedingUpgrade:
window.Signal.Data.getLegacyMessagesNeedingUpgrade,
saveMessage: window.Signal.Data.saveLegacyMessage,
maxVersion: MESSAGE_MINIMUM_VERSION,
});
window.log.info('upgradeMessages: upgrade with index', batchWithIndex);
isMigrationWithIndexComplete = batchWithIndex.done;
}
window.log.info('upgradeMessages: upgrade with index complete!');
window.log.info('upgradeMessages: Message schema upgrade complete');
}
async function migrateAllToSQLCipher({ writeNewAttachmentData, Views } = {}) {
if (!isFunction(writeNewAttachmentData)) {
throw new Error(
'migrateAllToSQLCipher: writeNewAttachmentData must be a function'
);
}
if (!Views) {
throw new Error('migrateAllToSQLCipher: Views must be provided!');
}
let totalMessages;
const db = await Whisper.Database.open();
function showMigrationStatus(current) {
const status = `${current}/${totalMessages}`;
Views.Initialization.setMessage(
window.i18n('migratingToSQLCipher', [status])
);
}
try {
totalMessages = await MessageDataMigrator.getNumMessages({
connection: db,
});
} catch (error) {
window.log.error(
'background.getNumMessages error:',
error && error.stack ? error.stack : error
);
totalMessages = 0;
}
if (totalMessages) {
window.log.info(`About to migrate ${totalMessages} messages`);
showMigrationStatus(0);
} else {
window.log.info('About to migrate non-messages');
}
await window.Signal.migrateToSQL({
db,
clearStores: Whisper.Database.clearStores,
handleDOMException: Whisper.Database.handleDOMException,
arrayBufferToString: textsecure.MessageReceiver.arrayBufferToStringBase64,
countCallback: count => {
window.log.info(`Migration: ${count} messages complete`);
showMigrationStatus(count);
},
writeNewAttachmentData,
});
db.close();
}
async function doesDatabaseExist() {
return new Promise((resolve, reject) => {
const { id } = Whisper.Database;
const req = window.indexedDB.open(id);
let existed = true;
req.onerror = reject;
req.onsuccess = () => {
req.result.close();
resolve(existed);
};
req.onupgradeneeded = () => {
if (req.result.version === 1) {
existed = false;
window.indexedDB.deleteDatabase(id);
}
};
});
}
function removeDatabase() {
window.log.info(`Deleting IndexedDB database '${Whisper.Database.id}'`);
window.indexedDB.deleteDatabase(Whisper.Database.id);
}

View File

@ -2,10 +2,26 @@
const { includes, isFunction, isString, last, map } = require('lodash');
const {
bulkAddGroups,
bulkAddSessions,
bulkAddIdentityKeys,
bulkAddPreKeys,
bulkAddSignedPreKeys,
bulkAddItems,
removeGroupById,
removeSessionById,
removeIdentityKeyById,
removePreKeyById,
removeSignedPreKeyById,
removeItemById,
saveMessages,
_removeMessages,
saveUnprocesseds,
removeUnprocessed,
saveConversations,
_removeConversations,
} = require('./data');
@ -132,6 +148,8 @@ async function migrateToSQL({
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
@ -163,6 +181,153 @@ async function migrateToSQL({
window.log.warn('Failed to clear conversations store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddGroups,
remove: removeGroupById,
storeName: 'groups',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of groups complete');
try {
await clearStores(['groups']);
} catch (error) {
window.log.warn('Failed to clear groups store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddSessions,
remove: removeSessionById,
storeName: 'sessions',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of sessions complete');
try {
await clearStores(['sessions']);
} catch (error) {
window.log.warn('Failed to clear sessions store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddIdentityKeys,
remove: removeIdentityKeyById,
storeName: 'identityKeys',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of identityKeys complete');
try {
await clearStores(['identityKeys']);
} catch (error) {
window.log.warn('Failed to clear identityKeys store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddPreKeys,
remove: removePreKeyById,
storeName: 'preKeys',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of preKeys complete');
try {
await clearStores(['preKeys']);
} catch (error) {
window.log.warn('Failed to clear preKeys store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddSignedPreKeys,
remove: removeSignedPreKeyById,
storeName: 'signedPreKeys',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of signedPreKeys complete');
try {
await clearStores(['signedPreKeys']);
} catch (error) {
window.log.warn('Failed to clear signedPreKeys store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddItems,
remove: removeItemById,
storeName: 'items',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of items complete');
// Note: we don't clear the items store because it contains important metadata which,
// if this process fails, will be crucial to going through this process again.
window.log.info('migrateToSQL: complete');
}

View File

@ -1,13 +1,13 @@
/* global window, Whisper */
const Migrations0DatabaseWithAttachmentData = require('./migrations_0_database_with_attachment_data');
const Migrations = require('./migrations');
exports.getPlaceholderMigrations = () => {
const last0MigrationVersion = Migrations0DatabaseWithAttachmentData.getLatestVersion();
const version = Migrations.getLatestVersion();
return [
{
version: last0MigrationVersion,
version,
migrate() {
throw new Error(
'Unexpected invocation of placeholder migration!' +

View File

@ -170,8 +170,19 @@ const migrations = [
migrate(transaction, next) {
window.log.info('Migration 19');
// Empty because we don't want to cause incompatibility with beta users who have
// already run migration 19 when it was object store removal.
next();
},
},
{
version: 20,
migrate(transaction, next) {
window.log.info('Migration 20');
// Empty because we don't want to cause incompatibility with users who have already
// run migration 19 when it was the object store removal.
// run migration 20 when it was object store removal.
next();
},

View File

@ -1,84 +0,0 @@
/* global window */
const { last, includes } = require('lodash');
const { open } = require('../database');
const settings = require('../settings');
const { runMigrations } = require('./run_migrations');
// These are cleanup migrations, to be run after migration to SQLCipher
exports.migrations = [
{
version: 20,
migrate(transaction, next) {
window.log.info('Migration 20');
const { db } = transaction;
// This should be run after things are migrated to SQLCipher
// We check for existence first, because this removal was present in v1.17.0.beta.1,
// but reverted in v1.17.0-beta.3
if (includes(db.objectStoreNames, 'messages')) {
window.log.info('Removing messages store');
db.deleteObjectStore('messages');
}
if (includes(db.objectStoreNames, 'unprocessed')) {
window.log.info('Removing unprocessed store');
db.deleteObjectStore('unprocessed');
}
if (includes(db.objectStoreNames, 'conversations')) {
window.log.info('Removing conversations store');
db.deleteObjectStore('conversations');
}
next();
},
},
];
exports.run = async ({ Backbone, logger } = {}) => {
const database = {
id: 'signal',
nolog: true,
migrations: exports.migrations,
};
const { canRun } = await exports.getStatus({ database });
if (!canRun) {
throw new Error(
'Cannot run migrations on database without attachment data'
);
}
await runMigrations({
Backbone,
logger,
database,
});
};
exports.getStatus = async ({ database } = {}) => {
const connection = await open(database.id, database.version);
const isAttachmentMigrationComplete = await settings.isAttachmentMigrationComplete(
connection
);
const hasMigrations = exports.migrations.length > 0;
const canRun = isAttachmentMigrationComplete && hasMigrations;
return {
isAttachmentMigrationComplete,
hasMigrations,
canRun,
};
};
exports.getLatestVersion = () => {
const lastMigration = last(exports.migrations);
if (!lastMigration) {
return null;
}
return lastMigration.version;
};

View File

@ -52,7 +52,10 @@ exports.runMigrations = async ({ Backbone, database, logger } = {}) => {
storeName: 'items',
}))();
// Note: this legacy migration technique is required to bring old clients with
// data in IndexedDB forward into the new world of SQLCipher only.
await deferredToPromise(migrationCollection.fetch({ limit: 1 }));
logger.info('Close database connection');
await closeDatabaseConnection({ Backbone });
};

View File

@ -5,6 +5,7 @@ const Crypto = require('./crypto');
const Data = require('./data');
const Database = require('./database');
const Emoji = require('../../ts/util/emoji');
const IndexedDB = require('./indexeddb');
const Notifications = require('../../ts/notifications');
const OS = require('../../ts/OS');
const Settings = require('./settings');
@ -63,9 +64,7 @@ const {
getPlaceholderMigrations,
getCurrentVersion,
} = require('./migrations/get_placeholder_migrations');
const Migrations0DatabaseWithAttachmentData = require('./migrations/migrations_0_database_with_attachment_data');
const Migrations1DatabaseWithoutAttachmentData = require('./migrations/migrations_1_database_without_attachment_data');
const { run } = require('./migrations/migrations');
// Types
const AttachmentType = require('./types/attachment');
@ -132,8 +131,7 @@ function initializeMigrations({
loadAttachmentData,
loadQuoteData,
loadMessage: MessageType.createAttachmentLoader(loadAttachmentData),
Migrations0DatabaseWithAttachmentData,
Migrations1DatabaseWithoutAttachmentData,
run,
upgradeMessageSchema: (message, options = {}) => {
const { maxVersion } = options;
@ -225,6 +223,7 @@ exports.setup = (options = {}) => {
Data,
Database,
Emoji,
IndexedDB,
Migrations,
Notifications,
OS,

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,3 @@
/* global Backbone, Whisper */
/* eslint-disable more/no-then */
// eslint-disable-next-line func-names
@ -7,89 +5,103 @@
'use strict';
window.Whisper = window.Whisper || {};
const Item = Backbone.Model.extend({
database: Whisper.Database,
storeName: 'items',
});
const ItemCollection = Backbone.Collection.extend({
model: Item,
storeName: 'items',
database: Whisper.Database,
});
let ready = false;
const items = new ItemCollection();
items.on('reset', () => {
let items;
let callbacks = [];
reset();
async function put(key, value) {
if (value === undefined) {
throw new Error('Tried to store undefined');
}
if (!ready) {
window.log.warn('Called storage.put before storage is ready. key:', key);
}
const data = { id: key, value };
items[key] = data;
await window.Signal.Data.createOrUpdateItem(data);
}
function get(key, defaultValue) {
if (!ready) {
window.log.warn('Called storage.get before storage is ready. key:', key);
}
const item = items[key];
if (!item) {
return defaultValue;
}
return item.value;
}
async function remove(key) {
if (!ready) {
window.log.warn('Called storage.get before storage is ready. key:', key);
}
delete items[key];
await window.Signal.Data.removeItemById(key);
}
function onready(callback) {
if (ready) {
callback();
} else {
callbacks.push(callback);
}
}
function callListeners() {
if (ready) {
callbacks.forEach(callback => callback());
callbacks = [];
}
}
async function fetch() {
this.reset();
const array = await window.Signal.Data.getAllItems();
for (let i = 0, max = array.length; i < max; i += 1) {
const item = array[i];
const { id } = item;
items[id] = item;
}
ready = true;
});
window.storage = {
/** ***************************
*** Base Storage Routines ***
**************************** */
put(key, value) {
if (value === undefined) {
throw new Error('Tried to store undefined');
}
if (!ready) {
window.log.warn(
'Called storage.put before storage is ready. key:',
key
);
}
const item = items.add({ id: key, value }, { merge: true });
return new Promise((resolve, reject) => {
item.save().then(resolve, reject);
});
},
callListeners();
}
get(key, defaultValue) {
const item = items.get(`${key}`);
if (!item) {
return defaultValue;
}
return item.get('value');
},
function reset() {
ready = false;
items = Object.create(null);
}
remove(key) {
const item = items.get(`${key}`);
if (item) {
items.remove(item);
return new Promise((resolve, reject) => {
item.destroy().then(resolve, reject);
});
}
return Promise.resolve();
},
onready(callback) {
if (ready) {
callback();
} else {
items.on('reset', callback);
}
},
fetch() {
return new Promise((resolve, reject) => {
items
.fetch({ reset: true })
.fail(() =>
reject(
new Error(
'Failed to fetch from storage.' +
' This may be due to an unexpected database version.'
)
)
)
.always(resolve);
});
},
reset() {
items.reset();
},
const storage = {
fetch,
put,
get,
remove,
onready,
reset,
};
// Keep a reference to this storage system, since there are scenarios where
// we need to replace it with the legacy storage system for a while.
window.newStorage = storage;
window.textsecure = window.textsecure || {};
window.textsecure.storage = window.textsecure.storage || {};
window.textsecure.storage.impl = window.storage;
window.installStorage = newStorage => {
window.storage = newStorage;
window.textsecure.storage.impl = newStorage;
};
window.installStorage(storage);
})();

View File

@ -8,7 +8,6 @@
'use strict';
window.Whisper = window.Whisper || {};
const { Database } = window.Whisper;
const { Logs } = window.Signal;
const CLEAR_DATA_STEPS = {
@ -33,26 +32,12 @@
this.step = CLEAR_DATA_STEPS.DELETING;
this.render();
try {
await Database.clear();
await Database.close();
window.log.info(
'All database connections closed. Starting database drop.'
);
await Database.drop();
} catch (error) {
window.log.error(
'Something went wrong deleting IndexedDB data then dropping database.'
);
}
this.clearAllData();
await this.clearAllData();
},
async clearAllData() {
try {
await Logs.deleteAll();
// SQLCipher
await window.Signal.Data.removeAll();
await window.Signal.Data.close();
await window.Signal.Data.removeDB();

View File

@ -38,10 +38,7 @@
return storage.put(IMPORT_LOCATION, location);
},
reset() {
return Promise.all([
Whisper.Database.clear(),
window.Signal.Data.removeAll(),
]);
return window.Signal.Data.removeAll();
},
};

View File

@ -13,8 +13,6 @@
'national_number',
'international_number',
],
database: Whisper.Database,
storeName: 'conversations',
model: Whisper.Conversation,
async fetchContacts() {
const models = window.Signal.Data.getAllPrivateConversations({

View File

@ -73,7 +73,9 @@
getNumbers(groupId) {
return textsecure.storage.protocol.getGroup(groupId).then(group => {
if (group === undefined) return undefined;
if (!group) {
return undefined;
}
return group.numbers;
});

View File

@ -72,16 +72,8 @@ function deleteDatabase() {
before(async () => {
await deleteDatabase();
await window.Signal.Data.removeAll();
await Signal.Migrations.Migrations0DatabaseWithAttachmentData.run({
Backbone,
databaseName: Whisper.Database.id,
logger: window.log,
});
});
async function clearDatabase() {
const db = await Whisper.Database.open();
await Whisper.Database.clear();
await window.Signal.Data.removeAll();
}

File diff suppressed because it is too large Load Diff

View File

@ -164,7 +164,7 @@
"rule": "jQuery-$(",
"path": "js/background.js",
"line": " if ($('.dark-overlay').length) {",
"lineNumber": 265,
"lineNumber": 264,
"reasonCategory": "usageTrusted",
"updated": "2018-09-19T21:59:32.770Z",
"reasonDetail": "Protected from arbitrary input"
@ -173,7 +173,7 @@
"rule": "jQuery-$(",
"path": "js/background.js",
"line": " $(document.body).prepend('<div class=\"dark-overlay\"></div>');",
"lineNumber": 268,
"lineNumber": 267,
"reasonCategory": "usageTrusted",
"updated": "2018-09-19T21:59:32.770Z",
"reasonDetail": "Protected from arbitrary input"
@ -182,7 +182,7 @@
"rule": "jQuery-prepend(",
"path": "js/background.js",
"line": " $(document.body).prepend('<div class=\"dark-overlay\"></div>');",
"lineNumber": 268,
"lineNumber": 267,
"reasonCategory": "usageTrusted",
"updated": "2018-09-19T18:13:29.628Z",
"reasonDetail": "Hard-coded value"
@ -191,7 +191,7 @@
"rule": "jQuery-$(",
"path": "js/background.js",
"line": " $('.dark-overlay').on('click', () => $('.dark-overlay').remove());",
"lineNumber": 269,
"lineNumber": 268,
"reasonCategory": "usageTrusted",
"updated": "2018-09-19T21:59:32.770Z",
"reasonDetail": "Protected from arbitrary input"
@ -200,7 +200,7 @@
"rule": "jQuery-$(",
"path": "js/background.js",
"line": " removeDarkOverlay: () => $('.dark-overlay').remove(),",
"lineNumber": 271,
"lineNumber": 270,
"reasonCategory": "usageTrusted",
"updated": "2018-09-19T21:59:32.770Z",
"reasonDetail": "Protected from arbitrary input"
@ -209,7 +209,7 @@
"rule": "jQuery-$(",
"path": "js/background.js",
"line": " $('body').append(clearDataView.el);",
"lineNumber": 274,
"lineNumber": 273,
"reasonCategory": "usageTrusted",
"updated": "2018-09-19T21:59:32.770Z",
"reasonDetail": "Protected from arbitrary input"
@ -218,7 +218,7 @@
"rule": "jQuery-append(",
"path": "js/background.js",
"line": " $('body').append(clearDataView.el);",
"lineNumber": 274,
"lineNumber": 273,
"reasonCategory": "usageTrusted",
"updated": "2018-09-19T18:13:29.628Z",
"reasonDetail": "Interacting with already-existing DOM nodes"
@ -227,7 +227,7 @@
"rule": "jQuery-load(",
"path": "js/background.js",
"line": " await ConversationController.load();",
"lineNumber": 509,
"lineNumber": 404,
"reasonCategory": "falseMatch",
"updated": "2018-10-02T21:00:44.007Z"
},
@ -235,7 +235,7 @@
"rule": "jQuery-$(",
"path": "js/background.js",
"line": " el: $('body'),",
"lineNumber": 572,
"lineNumber": 467,
"reasonCategory": "usageTrusted",
"updated": "2018-10-16T23:47:48.006Z",
"reasonDetail": "Protected from arbitrary input"
@ -244,7 +244,7 @@
"rule": "jQuery-wrap(",
"path": "js/background.js",
"line": " wrap(",
"lineNumber": 830,
"lineNumber": 725,
"reasonCategory": "falseMatch",
"updated": "2018-10-18T22:23:00.485Z"
},
@ -252,7 +252,7 @@
"rule": "jQuery-wrap(",
"path": "js/background.js",
"line": " await wrap(",
"lineNumber": 1320,
"lineNumber": 1215,
"reasonCategory": "falseMatch",
"updated": "2018-10-26T22:43:23.229Z"
},
@ -303,7 +303,7 @@
"rule": "jQuery-wrap(",
"path": "js/models/messages.js",
"line": " this.send(wrap(promise));",
"lineNumber": 794,
"lineNumber": 791,
"reasonCategory": "falseMatch",
"updated": "2018-10-05T23:12:28.961Z"
},
@ -311,7 +311,7 @@
"rule": "jQuery-wrap(",
"path": "js/models/messages.js",
"line": " return wrap(",
"lineNumber": 996,
"lineNumber": 993,
"reasonCategory": "falseMatch",
"updated": "2018-10-05T23:12:28.961Z"
},
@ -445,7 +445,7 @@
"rule": "jQuery-load(",
"path": "js/signal_protocol_store.js",
"line": " await ConversationController.load();",
"lineNumber": 972,
"lineNumber": 848,
"reasonCategory": "falseMatch",
"updated": "2018-09-15T00:38:04.183Z"
},
@ -1315,7 +1315,7 @@
"rule": "jQuery-load(",
"path": "js/views/import_view.js",
"line": " return ConversationController.load()",
"lineNumber": 179,
"lineNumber": 176,
"reasonCategory": "falseMatch",
"updated": "2018-09-15T00:38:04.183Z"
},
@ -1844,7 +1844,7 @@
"rule": "jQuery-$(",
"path": "js/views/recipients_input_view.js",
"line": " this.$input = this.$('input.search');",
"lineNumber": 71,
"lineNumber": 69,
"reasonCategory": "usageTrusted",
"updated": "2018-09-19T21:59:32.770Z",
"reasonDetail": "Protected from arbitrary input"
@ -1853,7 +1853,7 @@
"rule": "jQuery-$(",
"path": "js/views/recipients_input_view.js",
"line": " this.$new_contact = this.$('.new-contact');",
"lineNumber": 72,
"lineNumber": 70,
"reasonCategory": "usageTrusted",
"updated": "2018-09-19T21:59:32.770Z",
"reasonDetail": "Protected from arbitrary input"
@ -1862,7 +1862,7 @@
"rule": "jQuery-$(",
"path": "js/views/recipients_input_view.js",
"line": " el: this.$('.recipients'),",
"lineNumber": 82,
"lineNumber": 80,
"reasonCategory": "usageTrusted",
"updated": "2018-09-19T21:59:32.770Z",
"reasonDetail": "Protected from arbitrary input"
@ -1871,7 +1871,7 @@
"rule": "jQuery-$(",
"path": "js/views/recipients_input_view.js",
"line": " this.$('.contacts').append(this.typeahead_view.el);",
"lineNumber": 97,
"lineNumber": 95,
"reasonCategory": "usageTrusted",
"updated": "2018-09-19T21:59:32.770Z",
"reasonDetail": "Protected from arbitrary input"
@ -1880,7 +1880,7 @@
"rule": "jQuery-append(",
"path": "js/views/recipients_input_view.js",
"line": " this.$('.contacts').append(this.typeahead_view.el);",
"lineNumber": 97,
"lineNumber": 95,
"reasonCategory": "usageTrusted",
"updated": "2018-09-19T18:13:29.628Z",
"reasonDetail": "Interacting with already-existing DOM nodes"