wip
This commit is contained in:
parent
9d1a82bdf0
commit
488f0dab11
6 changed files with 666 additions and 82 deletions
|
|
@ -1,55 +0,0 @@
|
|||
import fs from "node:fs/promises";
|
||||
import { db } from "@glossa/db";
|
||||
import { translations } from "@glossa/db/schema";
|
||||
import { inArray } from "drizzle-orm";
|
||||
|
||||
const wordlistPath = "./src/data/wordlists/top1000englishnouns";
|
||||
const unmatchedOutputPath =
|
||||
"./src/data/wordlists/top1000englishnouns-unmatched";
|
||||
|
||||
const main = async () => {
|
||||
// 1. Read and normalise the word list
|
||||
console.log("📖 Reading word list...");
|
||||
const raw = await fs.readFile(wordlistPath, "utf8");
|
||||
const words = raw
|
||||
.split("\n")
|
||||
.map((w) => w.trim().toLowerCase())
|
||||
.filter(Boolean);
|
||||
console.log(` ${words.length} words loaded\n`);
|
||||
|
||||
// 2. Query DB for matches
|
||||
console.log("🔍 Checking against database...");
|
||||
const rows = await db
|
||||
.select({ text: translations.text })
|
||||
.from(translations)
|
||||
.where(inArray(translations.text, words));
|
||||
|
||||
const matchedSet = new Set(rows.map((r) => r.text.toLowerCase()));
|
||||
|
||||
// 3. Split into matched / unmatched
|
||||
const matched = words.filter((w) => matchedSet.has(w));
|
||||
const unmatched = words.filter((w) => !matchedSet.has(w));
|
||||
|
||||
// 4. Terminal output
|
||||
console.log(`✅ Matched: ${matched.length}/${words.length}`);
|
||||
console.log(`❌ Unmatched: ${unmatched.length}/${words.length}`);
|
||||
console.log(
|
||||
`📊 Coverage: ${((matched.length / words.length) * 100).toFixed(1)}%\n`,
|
||||
);
|
||||
|
||||
if (unmatched.length > 0) {
|
||||
console.log("❌ Unmatched words:");
|
||||
for (const w of unmatched) {
|
||||
console.log(` ${w}`);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Write unmatched to file
|
||||
// await fs.writeFile(unmatchedOutputPath, unmatched.join("\n"), "utf8");
|
||||
console.log(`\n💾 Unmatched words written to ${unmatchedOutputPath}`);
|
||||
};
|
||||
|
||||
main().catch((error) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
@ -1,46 +1,77 @@
|
|||
/*
|
||||
Parse CLI args → resolve the word list file path
|
||||
Connect to the database
|
||||
Read the word list file into an ordered array of strings
|
||||
Look up the en→it language pair ID from language_pairs
|
||||
Batch-fetch all matching rows from translations where language_code = 'en' and text IN (words)
|
||||
Build a word → termId map from the results
|
||||
Walk the ordered word list → split into hits (word found, capture position) and misses (skip)
|
||||
Check if a deck with this name already exists → if so, delete its deck_terms then the deck itself
|
||||
Insert the new decks row
|
||||
Insert all deck_terms rows in batches (deckId, termId, position)
|
||||
Log the skipped words
|
||||
Close the DB connection
|
||||
- [x] Setup — hardcoded path, name, description, source language, POS
|
||||
- [x] Read wordlist — load the 1000 nouns
|
||||
- [x] Query terms — match to database, find which ones have translations
|
||||
- [ ] Validation — determine validated languages from the data (e.g., ["en", "it"] if all matched terms have both)
|
||||
- [ ] Check idempotency — skip if deck exists
|
||||
- [ ] Create deck — insert with discovered validated_for_languages
|
||||
- [ ] Link terms — insert deck_terms
|
||||
- [ ] Report — summary
|
||||
*/
|
||||
|
||||
import fs from "node:fs/promises";
|
||||
import { db } from "@glossa/db";
|
||||
import { translations } from "@glossa/db/schema";
|
||||
import { inArray } from "drizzle-orm";
|
||||
import { translations, terms } from "@glossa/db/schema";
|
||||
import { inArray, and, eq } from "drizzle-orm";
|
||||
|
||||
const wordlistPath = "./src/data/wordlists/top1000englishnouns";
|
||||
const pathToWordlist = "./src/data/wordlists/top1000englishnouns";
|
||||
const nameOfDeck = "top english nouns";
|
||||
const descriptionOfDeck =
|
||||
"Most frequently used English nouns for vocabulary practice";
|
||||
const sourceLanguage = "en";
|
||||
const sourcePOS = "noun";
|
||||
|
||||
const main = async () => {
|
||||
// Read and normalise the word list
|
||||
console.log("📖 Reading word list...");
|
||||
const raw = await fs.readFile(wordlistPath, "utf8");
|
||||
const readingFromWordlist = async () => {
|
||||
const raw = await fs.readFile(pathToWordlist, "utf8");
|
||||
const words = raw
|
||||
.split("\n")
|
||||
.map((w) => w.trim().toLowerCase())
|
||||
.filter(Boolean);
|
||||
console.log(` ${words.length} words loaded\n`);
|
||||
return words;
|
||||
};
|
||||
|
||||
// Query DB for matches
|
||||
console.log("🔍 Checking against database...");
|
||||
const checkingSourceWordsAgainstDB = async (words: string[]) => {
|
||||
const rows = await db
|
||||
.select({ text: translations.text, termId: translations.term_id })
|
||||
.from(translations)
|
||||
.where(inArray(translations.text, words));
|
||||
.innerJoin(terms, eq(translations.term_id, terms.id))
|
||||
.where(
|
||||
and(
|
||||
inArray(translations.text, words),
|
||||
eq(translations.language_code, sourceLanguage),
|
||||
eq(terms.pos, sourcePOS),
|
||||
),
|
||||
);
|
||||
|
||||
const matchedSet = new Set(rows.map((r) => r.text.toLowerCase()));
|
||||
const wordsInDb = words.filter((w) => matchedSet.has(w));
|
||||
// map word text to term_id
|
||||
const wordToTermId = new Map<string, string>();
|
||||
for (const row of rows) {
|
||||
const word = row.text.toLowerCase();
|
||||
if (!wordToTermId.has(word)) {
|
||||
wordToTermId.set(word, row.termId);
|
||||
}
|
||||
}
|
||||
const termIds = Array.from(wordToTermId.values());
|
||||
const missingWords = words.filter((w) => !wordToTermId.has(w));
|
||||
|
||||
console.log("wordsInDb: ", wordsInDb);
|
||||
return { termIds, missingWords };
|
||||
};
|
||||
|
||||
const writeMissingWordsToFile = async (words: string[]) => {};
|
||||
|
||||
const main = async () => {
|
||||
// Read and normalise the word list
|
||||
console.log("📖 Reading word list...");
|
||||
const sourceWords = await readingFromWordlist();
|
||||
console.log(` ${sourceWords.length} words loaded\n`);
|
||||
// check if sourceWords exist in database
|
||||
console.log("🔍 Checking against database...");
|
||||
const { termIds, missingWords } =
|
||||
await checkingSourceWordsAgainstDB(sourceWords);
|
||||
console.log("words found in db: ", termIds.length);
|
||||
console.log("words NOT found in db: ", missingWords.length);
|
||||
// write missing words to file
|
||||
await writeMissingWordsToFile(missingWords);
|
||||
};
|
||||
|
||||
main().catch((error) => {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue