fixed extremely slow word filtering caused by v42.0
This commit is contained in:
parent
f635e03b35
commit
7c077fdb31
3 changed files with 24 additions and 3 deletions
|
|
@ -1,5 +1,7 @@
|
|||
package io.github.sspanak.tt9.db.sqlite;
|
||||
|
||||
import io.github.sspanak.tt9.preferences.settings.SettingsStore;
|
||||
|
||||
class Migration {
|
||||
static final Migration[] LIST = {
|
||||
new Migration(
|
||||
|
|
@ -13,11 +15,25 @@ class Migration {
|
|||
),
|
||||
new Migration(
|
||||
"ALTER TABLE " + Tables.LANGUAGES_META + " ADD COLUMN maxWordsPerSequence INTEGER NOT NULL DEFAULT -1"
|
||||
),
|
||||
new Migration(
|
||||
"UPDATE " + Tables.LANGUAGES_META +
|
||||
" SET maxWordsPerSequence = " + SettingsStore.SUGGESTIONS_POSITIONS_LIMIT +
|
||||
", fileHash = '0'",
|
||||
832
|
||||
)
|
||||
};
|
||||
|
||||
final String query;
|
||||
final int oldVersion;
|
||||
|
||||
private Migration(String query) {
|
||||
this.oldVersion = Integer.MAX_VALUE;
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
private Migration(String query, int oldVersion) {
|
||||
this.oldVersion = oldVersion;
|
||||
this.query = query;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -57,6 +57,11 @@ public class SQLiteOpener extends SQLiteOpenHelper {
|
|||
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
|
||||
onCreate(db);
|
||||
for (Migration migration : Migration.LIST) {
|
||||
if (oldVersion > migration.oldVersion) {
|
||||
Logger.d(LOG_TAG, "Skipping migration: '" + migration.query + "'. Required previous version: " + migration.oldVersion + " but we are at: " + oldVersion);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
db.execSQL(migration.query);
|
||||
Logger.d(LOG_TAG, "Migration succeeded: '" + migration.query);
|
||||
|
|
|
|||
|
|
@ -263,7 +263,7 @@ public class DictionaryLoader {
|
|||
WordBatch batch = new WordBatch(language, SettingsStore.DICTIONARY_IMPORT_BATCH_SIZE + 1);
|
||||
float progressRatio = (maxProgress - minProgress) / wordFile.getWords();
|
||||
int wordCount = 0;
|
||||
int maxWordCount = 0;
|
||||
int maxWordsPerSequence = 0;
|
||||
|
||||
try (BufferedReader ignored = wordFile.getReader()) {
|
||||
while (wordFile.notEOF()) {
|
||||
|
|
@ -277,7 +277,7 @@ public class DictionaryLoader {
|
|||
ArrayList<String> words = wordFile.getNextWords(digitSequence);
|
||||
batch.add(words, digitSequence, wordCount + positionShift);
|
||||
wordCount += words.size();
|
||||
maxWordCount = Math.max(maxWordCount, wordCount);
|
||||
maxWordsPerSequence = Math.max(maxWordsPerSequence, words.size());
|
||||
|
||||
if (batch.getWords().size() > SettingsStore.DICTIONARY_IMPORT_BATCH_SIZE) {
|
||||
saveWordBatch(batch);
|
||||
|
|
@ -292,7 +292,7 @@ public class DictionaryLoader {
|
|||
}
|
||||
|
||||
saveWordBatch(batch);
|
||||
InsertOps.replaceLanguageMeta(sqlite.getDb(), language.getId(), wordFile.getHash(), maxWordCount);
|
||||
InsertOps.replaceLanguageMeta(sqlite.getDb(), language.getId(), wordFile.getHash(), maxWordsPerSequence);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue