From 07948ee209cc554fa4d8babfb10d49eb4c5695b1 Mon Sep 17 00:00:00 2001 From: jacob-8 Date: Thu, 2 May 2024 09:43:10 +0800 Subject: [PATCH] fix lint --- eslint.config.js | 2 + package.json | 2 +- .../scripts/algolia/addDictionariesToIndex.ts | 50 +++-- packages/scripts/algolia/algolia.ts | 31 ++- packages/scripts/algolia/updateIndex.ts | 48 ++--- packages/scripts/config-firebase.ts | 50 ++--- packages/scripts/config-supabase.ts | 9 +- packages/scripts/countAllEntries.ts | 28 +-- packages/scripts/import/import-media.ts | 76 +++---- .../scripts/import/import-spreadsheet-v4.ts | 104 ++++----- packages/scripts/import/import.ts | 20 +- packages/scripts/import/supabase-senses.ts | 104 +++++---- packages/scripts/migrate-to-supabase/auth.ts | 8 +- packages/scripts/refactor/entry-refactor.ts | 197 +++++++++--------- .../refactor/move-firestore-document.ts | 70 +++---- .../refactor/upload-old-dictionaries.ts | 24 +-- .../site/src/lib/supabase/generated.types.ts | 49 +++-- 17 files changed, 433 insertions(+), 439 deletions(-) diff --git a/eslint.config.js b/eslint.config.js index 03a7d6984..ad5093a88 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -87,6 +87,8 @@ export default antfu( 'no-console': 'off', 'ts/no-unused-vars': 'off', 'ts/no-var-requires': 'off', + 'node/prefer-global/process': 'off', + 'unused-imports/no-unused-vars': 'off', }, }, { diff --git a/package.json b/package.json index 44f50437e..5ce2d50ee 100644 --- a/package.json +++ b/package.json @@ -15,7 +15,7 @@ "test:all": "pnpm --recursive --stream test -- --run", "test:e2e": "pnpm --filter=site test:e2e", "lint": "eslint . --cache --quiet", - "lint:fix": "eslint . --cache --fix", + "lint:fix": "eslint packages/scripts/import/supabase-senses.ts packages/scripts/algolia/addDictionariesToIndex.ts packages/scripts/algolia/algolia.ts packages/scripts/algolia/updateIndex.ts packages/scripts/countAllEntries.ts packages/scripts/import/convertJsonRowToEntryFormat.ts packages/scripts/import/import-media.ts packages/scripts/import/import-spreadsheet-v4.ts packages/scripts/import/import.ts packages/scripts/import/old/import-spreadsheet.ts packages/scripts/migrate-to-supabase/auth.ts packages/scripts/refactor/entry-refactor.ts packages/scripts/refactor/move-firestore-document.ts packages/scripts/refactor/upload-old-dictionaries.ts packages/site/src/db-tests/update-sense.test.ts packages/site/src/lib/mocks/seed/write-seed-and-reset-db.ts packages/site/src/lib/supabase/generated.types.ts packages/site/src/routes/api/db/content-update/+server.ts packages/site/src/routes/[dictionaryId]/entry/[entryId]/EntryField.svelte packages/site/src/routes/[dictionaryId]/entry/[entryId]/EntryMedia.svelte packages/scripts/package.json packages/scripts/config-firebase.ts packages/scripts/config-supabase.ts --fix", "lint:inspect": "npx @eslint/config-inspector", "lint:inspect-future": "eslint --inspect-config", "reset-db": "pnpm --filter=site reset-db", diff --git a/packages/scripts/algolia/addDictionariesToIndex.ts b/packages/scripts/algolia/addDictionariesToIndex.ts index b42fbd56c..29fc69466 100644 --- a/packages/scripts/algolia/addDictionariesToIndex.ts +++ b/packages/scripts/algolia/addDictionariesToIndex.ts @@ -1,41 +1,39 @@ -import { db } from '../config-firebase'; -import { updateIndex } from './algolia'; -import { ActualDatabaseEntry } from '@living-dictionaries/types'; +import type { ActualDatabaseEntry } from '@living-dictionaries/types' +import * as prepare from '@living-dictionaries/functions/src/algolia/prepareDataForIndex' +import { db } from '../config-firebase' +import { updateIndex } from './algolia' // import { prepareDataForIndex } from '@living-dictionaries/functions/src/algolia/prepareDataForIndex'; -import * as prepare from '@living-dictionaries/functions/src/algolia/prepareDataForIndex'; -// @ts-ignore +// @ts-expect-error const prepareDataForIndex = prepare.default - .prepareDataForIndex as typeof import('@living-dictionaries/functions/src/algolia/prepareDataForIndex').prepareDataForIndex; // b/c file is declared to be commonjs by its package.json + .prepareDataForIndex as typeof import('@living-dictionaries/functions/src/algolia/prepareDataForIndex').prepareDataForIndex // b/c file is declared to be commonjs by its package.json -const indexAllDictionaries = async () => { - const dictionariesSnapshot = await db.collection(`dictionaries`).get(); - const dictionaryIds = dictionariesSnapshot.docs.map((doc) => doc.id); - console.log(dictionaryIds); - process.stdout.write(dictionaryIds + '\n'); +async function indexAllDictionaries() { + const dictionariesSnapshot = await db.collection(`dictionaries`).get() + const dictionaryIds = dictionariesSnapshot.docs.map(doc => doc.id) + console.log(dictionaryIds) + process.stdout.write(`${dictionaryIds}\n`) for (const dictionaryId of dictionaryIds) - await indexDictionary(dictionaryId); - -}; + await indexDictionary(dictionaryId) +} async function indexDictionary(dictionaryId: string) { - const entriesSnapshot = await db.collection(`dictionaries/${dictionaryId}/words`).get(); - const entries = await prepareEntriesFromSnapshot(entriesSnapshot, dictionaryId); - await updateIndex(entries); + const entriesSnapshot = await db.collection(`dictionaries/${dictionaryId}/words`).get() + const entries = await prepareEntriesFromSnapshot(entriesSnapshot, dictionaryId) + await updateIndex(entries) } -// eslint-disable-next-line no-undef async function prepareEntriesFromSnapshot(entriesSnapshot: FirebaseFirestore.QuerySnapshot, dictionaryId: string) { const entryPromises = entriesSnapshot.docs.map(async (doc) => { - const dbEntry = doc.data() as ActualDatabaseEntry; - const algoliaEntry = await prepareDataForIndex(dbEntry, dictionaryId, db); - console.log({ dbEntry, algoliaEntry}); - return { ...algoliaEntry, objectID: doc.id }; - }); - - const entries = await Promise.all(entryPromises); - return entries; + const dbEntry = doc.data() as ActualDatabaseEntry + const algoliaEntry = await prepareDataForIndex(dbEntry, dictionaryId, db) + console.log({ dbEntry, algoliaEntry }) + return { ...algoliaEntry, objectID: doc.id } + }) + + const entries = await Promise.all(entryPromises) + return entries } // indexAllDictionaries(); diff --git a/packages/scripts/algolia/algolia.ts b/packages/scripts/algolia/algolia.ts index 714d3d23a..72255de4b 100644 --- a/packages/scripts/algolia/algolia.ts +++ b/packages/scripts/algolia/algolia.ts @@ -1,32 +1,31 @@ -import algoliasearch from 'algoliasearch'; -import { projectId } from '../config-firebase'; -import { adminKey } from './algolia-admin-key.json'; -import { AlgoliaEntry } from '@living-dictionaries/types'; +import algoliasearch from 'algoliasearch' +import type { AlgoliaEntry } from '@living-dictionaries/types' +import { projectId } from '../config-firebase' +import { adminKey } from './algolia-admin-key.json' -const ALGOLIA_APP_ID = 'XCVBAYSYXD'; +const ALGOLIA_APP_ID = 'XCVBAYSYXD' -export const client = algoliasearch(ALGOLIA_APP_ID, adminKey); +export const client = algoliasearch(ALGOLIA_APP_ID, adminKey) const index = client.initIndex( - projectId === 'talking-dictionaries-dev' ? 'entries_dev' : 'entries_prod' -); + projectId === 'talking-dictionaries-dev' ? 'entries_dev' : 'entries_prod', +) -const MAX_CHUNK_SIZE = 3000; +const MAX_CHUNK_SIZE = 3000 // https://www.algolia.com/doc/api-reference/api-methods/add-objects/#examples // if forced to iterate instead of save all at once, take note of the rate limiting at 5000 backlogged requests https://www.algolia.com/doc/faq/indexing/is-there-a-rate-limit/ export async function updateIndex(entries: AlgoliaEntry[]) { try { for (let startOfChunkIndex = 0; startOfChunkIndex < entries.length; startOfChunkIndex += MAX_CHUNK_SIZE) { - const endOfChunk = startOfChunkIndex + MAX_CHUNK_SIZE; - const chunk = entries.slice(startOfChunkIndex, endOfChunk); - console.log({ startOfChunkIndex, endOfChunk, CHUNK_SIZE: MAX_CHUNK_SIZE, chunkLength: chunk.length }); + const endOfChunk = startOfChunkIndex + MAX_CHUNK_SIZE + const chunk = entries.slice(startOfChunkIndex, endOfChunk) + console.log({ startOfChunkIndex, endOfChunk, CHUNK_SIZE: MAX_CHUNK_SIZE, chunkLength: chunk.length }) - const { objectIDs } = await index.saveObjects(chunk); - console.log(`Entries indexed: ${objectIDs.length}`); + const { objectIDs } = await index.saveObjects(chunk) + console.log(`Entries indexed: ${objectIDs.length}`) } } catch (err) { - console.log(err); + console.log(err) } - } diff --git a/packages/scripts/algolia/updateIndex.ts b/packages/scripts/algolia/updateIndex.ts index 89c6a1d36..4e33b1cfa 100644 --- a/packages/scripts/algolia/updateIndex.ts +++ b/packages/scripts/algolia/updateIndex.ts @@ -1,45 +1,43 @@ -import { db } from '../config-firebase'; -import { updateIndex } from './algolia'; -import { ActualDatabaseEntry } from '@living-dictionaries/types'; +import type { ActualDatabaseEntry } from '@living-dictionaries/types' +import * as prepare from '@living-dictionaries/functions/src/algolia/prepareDataForIndex' +import { db } from '../config-firebase' +import { updateIndex } from './algolia' // import { prepareDataForIndex } from '@living-dictionaries/functions/src/algolia/prepareDataForIndex'; -import * as prepare from '@living-dictionaries/functions/src/algolia/prepareDataForIndex'; -// @ts-ignore +// @ts-expect-error const prepareDataForIndex = prepare.default - .prepareDataForIndex as typeof import('@living-dictionaries/functions/src/algolia/prepareDataForIndex').prepareDataForIndex; // b/c file is declared to be commonjs by its package.json + .prepareDataForIndex as typeof import('@living-dictionaries/functions/src/algolia/prepareDataForIndex').prepareDataForIndex // b/c file is declared to be commonjs by its package.json async function updateMostRecentEntries(count: number, { dry = true }) { - const entriesSnapshot = await db.collectionGroup('words').orderBy('ua', 'desc').limit(count).get(); - const entries = await prepareEntriesFromSnapshot(entriesSnapshot); + const entriesSnapshot = await db.collectionGroup('words').orderBy('ua', 'desc').limit(count).get() + const entries = await prepareEntriesFromSnapshot(entriesSnapshot) if (!dry) - await updateIndex(entries); + await updateIndex(entries) } - async function updateIndexByField(fieldToIndex: string, { dry = true }) { // The field must be indexed first in Firebase - const entriesSnapshot = await db.collectionGroup('words').where(fieldToIndex, '!=', null).get(); - const entries = await prepareEntriesFromSnapshot(entriesSnapshot); + const entriesSnapshot = await db.collectionGroup('words').where(fieldToIndex, '!=', null).get() + const entries = await prepareEntriesFromSnapshot(entriesSnapshot) if (!dry) - await updateIndex(entries); + await updateIndex(entries) } -// eslint-disable-next-line no-undef async function prepareEntriesFromSnapshot(entriesSnapshot: FirebaseFirestore.QuerySnapshot) { const entryPromises = entriesSnapshot.docs.map(async (doc) => { - const dbEntry = doc.data() as ActualDatabaseEntry; - const dictionaryId = doc.ref.parent.parent.id; // dictionary/words/entry-123 -> doc.ref: entry-123, doc.ref.parent: words, doc.ref.parent.parent: dictionary - const algoliaEntry = await prepareDataForIndex(dbEntry, dictionaryId, db); - const time = dbEntry.ua.toDate(); - console.log({ dbEntry, algoliaEntry, time }); - return { ...algoliaEntry, objectID: doc.id }; - }); - - const entries = await Promise.all(entryPromises); - return entries; + const dbEntry = doc.data() as ActualDatabaseEntry + const dictionaryId = doc.ref.parent.parent.id // dictionary/words/entry-123 -> doc.ref: entry-123, doc.ref.parent: words, doc.ref.parent.parent: dictionary + const algoliaEntry = await prepareDataForIndex(dbEntry, dictionaryId, db) + const time = dbEntry.ua.toDate() + console.log({ dbEntry, algoliaEntry, time }) + return { ...algoliaEntry, objectID: doc.id } + }) + + const entries = await Promise.all(entryPromises) + return entries } // updateIndexByField('nc', { dry: true }); -updateMostRecentEntries(300, { dry: false }); +updateMostRecentEntries(300, { dry: false }) diff --git a/packages/scripts/config-firebase.ts b/packages/scripts/config-firebase.ts index 40cbb2b2e..b84985f6e 100644 --- a/packages/scripts/config-firebase.ts +++ b/packages/scripts/config-firebase.ts @@ -1,44 +1,44 @@ -import { program } from 'commander'; -import { initializeApp, cert } from 'firebase-admin/app'; -import { FieldValue, getFirestore } from 'firebase-admin/firestore'; -import { getStorage } from 'firebase-admin/storage'; -import { getAuth } from 'firebase-admin/auth'; +import fs from 'node:fs' +import { program } from 'commander' +import { cert, initializeApp } from 'firebase-admin/app' +import { FieldValue, getFirestore } from 'firebase-admin/firestore' +import { getStorage } from 'firebase-admin/storage' +import { getAuth } from 'firebase-admin/auth' // import serviceAccountDev from './service-account-dev.json'; // import serviceAccountProd from './service-account-prod.json'; -import { serviceAccountDev, serviceAccountProd } from './service-accounts'; +import { serviceAccountDev, serviceAccountProd } from './service-accounts' + +/// LOGGER/// program // .version('0.0.1') .option('-e, --environment [dev/prod]', 'Firebase Project', 'dev') .allowUnknownOption() // because config is shared by multiple scripts - .parse(process.argv); + .parse(process.argv) -export const environment = program.opts().environment === 'prod' ? 'prod' : 'dev'; -export const projectId = - environment === 'prod' ? 'talking-dictionaries-alpha' : 'talking-dictionaries-dev'; +export const environment = program.opts().environment === 'prod' ? 'prod' : 'dev' +export const projectId + = environment === 'prod' ? 'talking-dictionaries-alpha' : 'talking-dictionaries-dev' -const serviceAccount = environment === 'dev' ? serviceAccountDev : serviceAccountProd; +const serviceAccount = environment === 'dev' ? serviceAccountDev : serviceAccountProd initializeApp({ // @ts-expect-error credential: cert(serviceAccount), databaseURL: `https://${projectId}.firebaseio.com`, storageBucket: `${projectId}.appspot.com`, -}); -export const db = getFirestore(); +}) +export const db = getFirestore() // const settings = { timestampsInSnapshots: true }; // db.settings(settings); -export const timestamp = FieldValue.serverTimestamp(); -export const storage = getStorage(); -export const auth = getAuth(); - -///LOGGER/// -import fs from 'fs'; -const logFile = fs.createWriteStream(`./logs/${Date.now()}.txt`, { flags: 'w' }); // 'a' to append, 'w' to truncate the file every time the process starts. +export const timestamp = FieldValue.serverTimestamp() +export const storage = getStorage() +export const auth = getAuth() +const logFile = fs.createWriteStream(`./logs/${Date.now()}.txt`, { flags: 'w' }) // 'a' to append, 'w' to truncate the file every time the process starts. console.log = function (data: any) { - logFile.write(JSON.stringify(data) + '\n'); - process.stdout.write(JSON.stringify(data) + '\n'); -}; -///END-LOGGER/// + logFile.write(`${JSON.stringify(data)}\n`) + process.stdout.write(`${JSON.stringify(data)}\n`) +} +/// END-LOGGER/// -console.log(`Running on ${environment}`); +console.log(`Running on ${environment}`) diff --git a/packages/scripts/config-supabase.ts b/packages/scripts/config-supabase.ts index 63345b2f4..3e36fd64d 100644 --- a/packages/scripts/config-supabase.ts +++ b/packages/scripts/config-supabase.ts @@ -1,8 +1,9 @@ import PG from 'pg' -import { createClient } from '@supabase/supabase-js'; -import { Database } from '@living-dictionaries/site/src/lib/supabase/database.types'; -import * as dotenv from 'dotenv'; -dotenv.config({path: '.env.supabase'}); +import { createClient } from '@supabase/supabase-js' +import type { Database } from '@living-dictionaries/site/src/lib/supabase/database.types' +import * as dotenv from 'dotenv' + +dotenv.config({ path: '.env.supabase' }) export const supabase = createClient(process.env.PUBLIC_SUPABASE_API_URL, process.env.SUPABASE_SERVICE_ROLE_KEY) diff --git a/packages/scripts/countAllEntries.ts b/packages/scripts/countAllEntries.ts index 275b38d02..1da24edea 100644 --- a/packages/scripts/countAllEntries.ts +++ b/packages/scripts/countAllEntries.ts @@ -1,25 +1,25 @@ -import { db } from './config-firebase'; +import { db } from './config-firebase' export async function countAllEntries() { - let overallEntryCount = 0; + let overallEntryCount = 0 - const dictionarySnaps = await db.collection('dictionaries').get(); - const dictionaryIds = dictionarySnaps.docs.map(doc => doc.id); + const dictionarySnaps = await db.collection('dictionaries').get() + const dictionaryIds = dictionarySnaps.docs.map(doc => doc.id) for (const dictionaryId of dictionaryIds) { - if (dictionaryId.startsWith('tdv1-')) continue; + if (dictionaryId.startsWith('tdv1-')) continue - const countData = await db.collection(`dictionaries/${dictionaryId}/words`).count().get(); - const { count: entryCount } = countData.data(); - console.log({ dictionaryId, entryCount, overallEntryCount }); - overallEntryCount += entryCount; - console.log({ dictionaryId, entryCount, overallEntryCount }); - await db.doc(`dictionaries/${dictionaryId}`).update({ entryCount }); + const countData = await db.collection(`dictionaries/${dictionaryId}/words`).count().get() + const { count: entryCount } = countData.data() + console.log({ dictionaryId, entryCount, overallEntryCount }) + overallEntryCount += entryCount + console.log({ dictionaryId, entryCount, overallEntryCount }) + await db.doc(`dictionaries/${dictionaryId}`).update({ entryCount }) } - await db.doc('stats/data').update({ overallEntryCount }); + await db.doc('stats/data').update({ overallEntryCount }) - return true; + return true } -countAllEntries().then(() => console.log('done')).catch(console.error); +countAllEntries().then(() => console.log('done')).catch(console.error) diff --git a/packages/scripts/import/import-media.ts b/packages/scripts/import/import-media.ts index cf944f192..26fa4bc10 100644 --- a/packages/scripts/import/import-media.ts +++ b/packages/scripts/import/import-media.ts @@ -1,31 +1,32 @@ -import { join, dirname } from 'path'; -import { fileURLToPath } from 'url'; -const __dirname = dirname(fileURLToPath(import.meta.url)); +import { dirname, join } from 'node:path' +import { fileURLToPath } from 'node:url' -import * as fs from 'fs'; -import { environment, storage, timestamp } from '../config-firebase.js'; -import { getImageServingUrl } from './getImageServingUrl.js'; -import { GoalDatabasePhoto } from '@living-dictionaries/types/photo.interface.js'; +import * as fs from 'node:fs' +import type { GoalDatabasePhoto } from '@living-dictionaries/types/photo.interface.js' +import { environment, storage, timestamp } from '../config-firebase.js' +import { getImageServingUrl } from './getImageServingUrl.js' -const fileBucket = `talking-dictionaries-${environment == 'prod' ? 'alpha' : 'dev'}.appspot.com`; +const __dirname = dirname(fileURLToPath(import.meta.url)) + +const fileBucket = `talking-dictionaries-${environment === 'prod' ? 'alpha' : 'dev'}.appspot.com` export async function uploadAudioFile( audioFileName: string, entryId: string, dictionaryId: string, - dry = false + dry = false, ): Promise { - const audioDir = join(__dirname, `data/${dictionaryId}/audio`); - const audioFilePath = join(audioDir, audioFileName); + const audioDir = join(__dirname, `data/${dictionaryId}/audio`) + const audioFilePath = join(audioDir, audioFileName) if (!fs.existsSync(audioFilePath)) { - console.log(`>> Missing audio file: ${audioFileName}`); - return null; + console.log(`>> Missing audio file: ${audioFileName}`) + return null } try { - const fileTypeSuffix = audioFileName.match(/\.[0-9a-z]+$/i)[0]; - const uploadedAudioPath = `${dictionaryId}/audio/${entryId}_${new Date().getTime()}${fileTypeSuffix}`; + const [fileTypeSuffix] = audioFileName.match(/\.[0-9a-z]+$/i) + const uploadedAudioPath = `${dictionaryId}/audio/${entryId}_${new Date().getTime()}${fileTypeSuffix}` if (!dry) { await storage.bucket(fileBucket).upload(audioFilePath, { @@ -33,14 +34,14 @@ export async function uploadAudioFile( metadata: { originalFileName: audioFileName, }, - }); + }) } - return uploadedAudioPath; + return uploadedAudioPath } catch (err) { console.log( - `!!! Not adding audio ${audioFileName} as the server had trouble uploading it. Double-check the file to see if there is a problem with it or perhaps there is code/server/network-connection problem. Error: ${err}` - ); - return null; + `!!! Not adding audio ${audioFileName} as the server had trouble uploading it. Double-check the file to see if there is a problem with it or perhaps there is code/server/network-connection problem. Error: ${err}`, + ) + return null } } @@ -48,36 +49,35 @@ export async function uploadImageFile( imageFileName: string, entryId: string, dictionaryId: string, - dry = false + dry = false, ): Promise { - const imageDir = join(__dirname, `data/${dictionaryId}/images`); - const imageFilePath = join(imageDir, imageFileName); + const imageDir = join(__dirname, `data/${dictionaryId}/images`) + const imageFilePath = join(imageDir, imageFileName) if (!fs.existsSync(imageFilePath)) { - console.log(`>> Missing image file: ${imageFileName}`); - return null; + console.log(`>> Missing image file: ${imageFileName}`) + return null } try { - const fileTypeSuffix = imageFileName.match(/\.[0-9a-z]+$/i)[0]; - const storagePath = `${dictionaryId}/images/${entryId}_${new Date().getTime()}${fileTypeSuffix}`; + const [fileTypeSuffix] = imageFileName.match(/\.[0-9a-z]+$/i) + const storagePath = `${dictionaryId}/images/${entryId}_${new Date().getTime()}${fileTypeSuffix}` if (dry) - return { path: storagePath, gcs: 'no-path-bc-dry-run' }; - + return { path: storagePath, gcs: 'no-path-bc-dry-run' } await storage.bucket(fileBucket).upload(imageFilePath, { destination: storagePath, metadata: { originalFileName: imageFileName, }, - }); + }) - let gcsPath; + let gcsPath try { - gcsPath = await getImageServingUrl(storagePath, environment); + gcsPath = await getImageServingUrl(storagePath, environment) } catch (err) { - console.log(`!!! Error getting image serving URL: ${err}`); - gcsPath = ''; + console.log(`!!! Error getting image serving URL: ${err}`) + gcsPath = '' } return { @@ -85,11 +85,11 @@ export async function uploadImageFile( gcs: gcsPath, ts: timestamp, // cr: // not yet included in import template - }; + } } catch (err) { console.log( - `!!! Not adding image ${imageFileName} as the server had trouble digesting it. Double-check the file to see if it is just a corrupted jpg (as some are) or if the file is good and perhaps there is code/server/network-connection problem. Error: ${err}` - ); - return null; + `!!! Not adding image ${imageFileName} as the server had trouble digesting it. Double-check the file to see if it is just a corrupted jpg (as some are) or if the file is good and perhaps there is code/server/network-connection problem. Error: ${err}`, + ) + return null } } diff --git a/packages/scripts/import/import-spreadsheet-v4.ts b/packages/scripts/import/import-spreadsheet-v4.ts index fa872bf1c..ae83ec389 100644 --- a/packages/scripts/import/import-spreadsheet-v4.ts +++ b/packages/scripts/import/import-spreadsheet-v4.ts @@ -1,101 +1,101 @@ -import type { ActualDatabaseEntry } from '@living-dictionaries/types'; -import { db, timestamp, environment } from '../config-firebase.js'; -import { uploadAudioFile, uploadImageFile } from './import-media.js'; -import { readFileSync } from 'fs'; -import { parseCSVFrom } from './parse-csv.js'; -import { convertJsonRowToEntryFormat } from './convertJsonRowToEntryFormat.js'; +import { readFileSync } from 'node:fs' +import type { ActualDatabaseEntry } from '@living-dictionaries/types' +import { db, environment, timestamp } from '../config-firebase.js' +import { uploadAudioFile, uploadImageFile } from './import-media.js' +import { parseCSVFrom } from './parse-csv.js' +import { convertJsonRowToEntryFormat } from './convertJsonRowToEntryFormat.js' -const developer_in_charge = 'qkTzJXH24Xfc57cZJRityS6OTn52'; // diego@livingtongues.org -> Diego Córdova Nieto; -type unique_speakers = Record; -const different_speakers: unique_speakers[] = []; +const developer_in_charge = 'qkTzJXH24Xfc57cZJRityS6OTn52' // diego@livingtongues.org -> Diego Córdova Nieto; +type unique_speakers = Record +const different_speakers: unique_speakers[] = [] export async function importFromSpreadsheet(dictionaryId: string, dry = false) { - const dateStamp = Date.now(); + const dateStamp = Date.now() - const file = readFileSync(`./import/data/${dictionaryId}/${dictionaryId}.csv`, 'utf8'); - const rows = parseCSVFrom(file); - const entries = await importEntriesToFirebase(dictionaryId, rows, dateStamp, dry); + const file = readFileSync(`./import/data/${dictionaryId}/${dictionaryId}.csv`, 'utf8') + const rows = parseCSVFrom(file) + const entries = await importEntriesToFirebase(dictionaryId, rows, dateStamp, dry) console.log( `Finished ${dry ? 'emulating' : 'importing'} ${entries.length} entries to ${ environment === 'dev' ? 'http://localhost:3041/' : 'livingdictionaries.app/' - }${dictionaryId} in ${(Date.now() - dateStamp) / 1000} seconds` - ); - console.log(''); - return entries; + }${dictionaryId} in ${(Date.now() - dateStamp) / 1000} seconds`, + ) + console.log('') + return entries } export async function importEntriesToFirebase( dictionaryId: string, rows: any[], dateStamp: number, - dry = false + dry = false, ) { - const entries: ActualDatabaseEntry[] = []; - let entryCount = 0; - let batchCount = 0; - let batch = db.batch(); - const colRef = db.collection(`dictionaries/${dictionaryId}/words`); - const speakerRef = db.collection('speakers'); - const dictionarySpeakerSnapshot = await speakerRef.where('contributingTo', 'array-contains', dictionaryId).get(); - dictionarySpeakerSnapshot.docs.forEach((snap) => different_speakers.push({ [snap.data().displayName]: snap.id })); - let speakerId; + const entries: ActualDatabaseEntry[] = [] + let entryCount = 0 + let batchCount = 0 + let batch = db.batch() + const colRef = db.collection(`dictionaries/${dictionaryId}/words`) + const speakerRef = db.collection('speakers') + const dictionarySpeakerSnapshot = await speakerRef.where('contributingTo', 'array-contains', dictionaryId).get() + dictionarySpeakerSnapshot.docs.forEach(snap => different_speakers.push({ [snap.data().displayName]: snap.id })) + let speakerId for (const row of rows) { if (!row.lexeme || row.lexeme === '(word/phrase)') - continue; + continue if (!dry && batchCount === 200) { - console.log('Committing batch of entries ending with: ', entryCount); - await batch.commit(); - batch = db.batch(); - batchCount = 0; + console.log('Committing batch of entries ending with: ', entryCount) + await batch.commit() + batch = db.batch() + batchCount = 0 } - const entryId = colRef.doc().id; - const entry = convertJsonRowToEntryFormat(row, dateStamp, timestamp); + const entryId = colRef.doc().id + const entry = convertJsonRowToEntryFormat(row, dateStamp, timestamp) if (row.photoFile) { - const pf = await uploadImageFile(row.photoFile, entryId, dictionaryId, dry); - if (pf) entry.pf = pf; + const pf = await uploadImageFile(row.photoFile, entryId, dictionaryId, dry) + if (pf) entry.pf = pf } if (row.soundFile) { - speakerId = different_speakers.find(speaker => Object.keys(speaker).some(key => key === row.speakerName))?.[row.speakerName]; + speakerId = different_speakers.find(speaker => Object.keys(speaker).includes(row.speakerName))?.[row.speakerName] if (row.speakerName && !speakerId) { - speakerId = speakerRef.doc().id; - different_speakers.push({[row.speakerName]: speakerId}); + speakerId = speakerRef.doc().id + different_speakers.push({ [row.speakerName]: speakerId }) batch.create(speakerRef.doc(speakerId), { displayName: row.speakerName, birthplace: row.speakerHometown || '', - decade: parseInt(row.speakerAge) || '', + decade: Number.parseInt(row.speakerAge) || '', gender: row.speakerGender || '', contributingTo: [dictionaryId], createdAt: timestamp, createdBy: developer_in_charge, updatedAt: timestamp, updatedBy: developer_in_charge, - }); + }) } - const audioFilePath = await uploadAudioFile(row.soundFile, entryId, dictionaryId, dry); + const audioFilePath = await uploadAudioFile(row.soundFile, entryId, dictionaryId, dry) if (audioFilePath) { entry.sfs = [{ path: audioFilePath, ts: new Date().getTime(), - }]; + }] if (speakerId) - entry.sfs[0].sp = [speakerId]; + entry.sfs[0].sp = [speakerId] else - entry.sf.speakerName = row.speakerName; // Keep that if for some reason we need the speakername as text only again. + entry.sf.speakerName = row.speakerName // Keep that if for some reason we need the speakername as text only again. } } - entries.push(entry); - batch.create(colRef.doc(entryId), entry); - batchCount++; - entryCount++; + entries.push(entry) + batch.create(colRef.doc(entryId), entry) + batchCount++ + entryCount++ } - console.log(`Committing final batch of entries ending with: ${entryCount}`); - if (!dry) await batch.commit(); - return entries; + console.log(`Committing final batch of entries ending with: ${entryCount}`) + if (!dry) await batch.commit() + return entries } diff --git a/packages/scripts/import/import.ts b/packages/scripts/import/import.ts index f3ee1daf4..60a9f391c 100644 --- a/packages/scripts/import/import.ts +++ b/packages/scripts/import/import.ts @@ -1,19 +1,19 @@ -import { program } from 'commander'; +import { program } from 'commander' + +// import { importFromSpreadsheet } from './import-spreadsheet-v4.js'; +import { importFromSpreadsheet } from './supabase-senses' program // .version('0.0.1') .option('-e, --environment [dev/prod]', 'Firebase Project', 'dev') .option('--id ', 'Dictionary Id') .option('--dry', 'Only log values, do not upload data and media') - .parse(process.argv); - -// import { importFromSpreadsheet } from './import-spreadsheet-v4.js'; -import { importFromSpreadsheet } from './supabase-senses'; + .parse(process.argv) -const dictionaryId = program.opts().id; -const {dry} = program.opts(); +const dictionaryId = program.opts().id +const { dry } = program.opts() if (dry) - console.log('Dry run, no data will be uploaded'); + console.log('Dry run, no data will be uploaded') -console.log(`Importing ${dictionaryId} to ${program.opts().environment}.`); -importFromSpreadsheet(dictionaryId, dry).then((entries) => console.log(entries)); +console.log(`Importing ${dictionaryId} to ${program.opts().environment}.`) +importFromSpreadsheet(dictionaryId, dry).then(entries => console.log(entries)) diff --git a/packages/scripts/import/supabase-senses.ts b/packages/scripts/import/supabase-senses.ts index 8d3c3537a..0b4e87fa9 100644 --- a/packages/scripts/import/supabase-senses.ts +++ b/packages/scripts/import/supabase-senses.ts @@ -1,104 +1,103 @@ -import type { ActualDatabaseEntry } from '@living-dictionaries/types'; -import { db, timestamp, environment } from '../config-firebase.js'; -import { uploadAudioFile, uploadImageFile } from './import-media.js'; -import { readFileSync } from 'fs'; -import { parseCSVFrom } from './parse-csv.js'; -import { convertJsonRowToEntryFormat } from './convertJsonRowToEntryFormat.js'; +import { readFileSync } from 'node:fs' +import type { ActualDatabaseEntry } from '@living-dictionaries/types' +import { db, environment, timestamp } from '../config-firebase.js' +import { uploadAudioFile, uploadImageFile } from './import-media.js' +import { parseCSVFrom } from './parse-csv.js' +import { convertJsonRowToEntryFormat } from './convertJsonRowToEntryFormat.js' -const developer_in_charge = 'qkTzJXH24Xfc57cZJRityS6OTn52'; // diego@livingtongues.org -> Diego Córdova Nieto; -type unique_speakers = Record; -const different_speakers: unique_speakers = {}; +const developer_in_charge = 'qkTzJXH24Xfc57cZJRityS6OTn52' // diego@livingtongues.org -> Diego Córdova Nieto; +type unique_speakers = Record +const different_speakers: unique_speakers = {} export async function importFromSpreadsheet(dictionaryId: string, dry = false) { - const dateStamp = Date.now(); + const dateStamp = Date.now() - const file = readFileSync(`./import/data/${dictionaryId}/${dictionaryId}.csv`, 'utf8'); - const rows = parseCSVFrom(file); - const entries = await importEntriesToFirebase(dictionaryId, rows, dateStamp, dry); + const file = readFileSync(`./import/data/${dictionaryId}/${dictionaryId}.csv`, 'utf8') + const rows = parseCSVFrom(file) + const entries = await importEntriesToFirebase(dictionaryId, rows, dateStamp, dry) console.log( `Finished ${dry ? 'emulating' : 'importing'} ${entries.length} entries to ${ environment === 'dev' ? 'http://localhost:3041/' : 'livingdictionaries.app/' - }${dictionaryId} in ${(Date.now() - dateStamp) / 1000} seconds` - ); - console.log(''); - return entries; + }${dictionaryId} in ${(Date.now() - dateStamp) / 1000} seconds`, + ) + console.log('') + return entries } export async function importEntriesToFirebase( dictionaryId: string, rows: any[], dateStamp: number, - dry = false + dry = false, ) { - const entries: ActualDatabaseEntry[] = []; - let entryCount = 0; - let batchCount = 0; - let batch = db.batch(); - const colRef = db.collection(`dictionaries/${dictionaryId}/words`); - let speakerRef; - let speakerId; + const entries: ActualDatabaseEntry[] = [] + let entryCount = 0 + let batchCount = 0 + let batch = db.batch() + const colRef = db.collection(`dictionaries/${dictionaryId}/words`) + let speakerRef + let speakerId for (const row of rows) { if (!row.lexeme || row.lexeme === '(word/phrase)') - continue; + continue if (!dry && batchCount === 200) { - console.log('Committing batch of entries ending with: ', entryCount); - await batch.commit(); - batch = db.batch(); - batchCount = 0; + console.log('Committing batch of entries ending with: ', entryCount) + await batch.commit() + batch = db.batch() + batchCount = 0 } - const entryId = colRef.doc().id; + const entryId = colRef.doc().id // It's now duplicated - const sense_regex = /^s\d+_/; - const entry = Object.keys(row).some(key => sense_regex.test(key)) ? convertJsonRowToEntryFormat({row, dateStamp, timestamp}, {entry_id: entryId, dictionary_id: dictionaryId}) : convertJsonRowToEntryFormat({row, dateStamp, timestamp}); + const sense_regex = /^s\d+_/ + const entry = Object.keys(row).some(key => sense_regex.test(key)) ? convertJsonRowToEntryFormat({ row, dateStamp, timestamp }, { entry_id: entryId, dictionary_id: dictionaryId }) : convertJsonRowToEntryFormat({ row, dateStamp, timestamp }) if (row.photoFile) { - const pf = await uploadImageFile(row.photoFile, entryId, dictionaryId, dry); - if (pf) entry.pf = pf; + const pf = await uploadImageFile(row.photoFile, entryId, dictionaryId, dry) + if (pf) entry.pf = pf } if (row.soundFile) { - speakerRef = db.collection('speakers'); + speakerRef = db.collection('speakers') if (row.speakerName && (!speakerId || !(row.speakerName in different_speakers))) { - speakerId = speakerRef.doc().id; - different_speakers[row.speakerName] = speakerId; + speakerId = speakerRef.doc().id + different_speakers[row.speakerName] = speakerId batch.create(speakerRef.doc(speakerId), { displayName: row.speakerName, birthplace: row.speakerHometown || '', - decade: parseInt(row.speakerAge) || '', + decade: Number.parseInt(row.speakerAge) || '', gender: row.speakerGender || '', contributingTo: [dictionaryId], createdAt: timestamp, createdBy: developer_in_charge, updatedAt: timestamp, updatedBy: developer_in_charge, - }); + }) } - const audioFilePath = await uploadAudioFile(row.soundFile, entryId, dictionaryId, dry); + const audioFilePath = await uploadAudioFile(row.soundFile, entryId, dictionaryId, dry) if (audioFilePath) { entry.sf = { path: audioFilePath, ts: timestamp, - }; + } if (speakerId) - entry.sf.sp = different_speakers[row.speakerName]; + entry.sf.sp = different_speakers[row.speakerName] else - entry.sf.speakerName = row.speakerName; // Keep that if for some reason we need the speakername as text only again. - + entry.sf.speakerName = row.speakerName // Keep that if for some reason we need the speakername as text only again. } } - entries.push(entry); - batch.create(colRef.doc(entryId), entry); - batchCount++; - entryCount++; + entries.push(entry) + batch.create(colRef.doc(entryId), entry) + batchCount++ + entryCount++ } - console.log(`Committing final batch of entries ending with: ${entryCount}`); - if (!dry) await batch.commit(); - return entries; + console.log(`Committing final batch of entries ending with: ${entryCount}`) + if (!dry) await batch.commit() + return entries } // Current flow: @@ -107,7 +106,6 @@ export async function importEntriesToFirebase( // add example sentence to new table (Jacob will create, so it doesn't exist yet) // add another entry_update to connect that example sentence id to the sense - // Future Supabase-only flow - ignore for now // Import entry into imports table, after which a trigger edge function will create the entry, get the entry id // use that entry id to add senses via entry_updates diff --git a/packages/scripts/migrate-to-supabase/auth.ts b/packages/scripts/migrate-to-supabase/auth.ts index fc9953710..5ec6b2ef2 100644 --- a/packages/scripts/migrate-to-supabase/auth.ts +++ b/packages/scripts/migrate-to-supabase/auth.ts @@ -1,7 +1,7 @@ -import { UserRecord } from 'firebase-admin/auth'; +import type { UserRecord } from 'firebase-admin/auth' import { auth } from '../config-firebase' -import { executeQuery } from '../config-supabase'; -import { write_users_insert } from './write-users-insert'; +import { executeQuery } from '../config-supabase' +import { write_users_insert } from './write-users-insert' migrate_users() @@ -35,6 +35,6 @@ async function get_users(): Promise { return users } catch (error) { - console.log({list_error: error}); + console.log({ list_error: error }) } } diff --git a/packages/scripts/refactor/entry-refactor.ts b/packages/scripts/refactor/entry-refactor.ts index 9d86eeabf..a96ca9826 100644 --- a/packages/scripts/refactor/entry-refactor.ts +++ b/packages/scripts/refactor/entry-refactor.ts @@ -1,171 +1,172 @@ -import { ActualDatabaseEntry } from '@living-dictionaries/types'; -import { db } from '../config-firebase'; -import { program } from 'commander'; -import { reverse_semantic_domains_mapping } from './reverse-semantic-domains-mapping'; -import { turn_dialect_strings_to_arrays } from './turn-dialects-to-arrays'; +import type { ActualDatabaseEntry } from '@living-dictionaries/types' +import { program } from 'commander' +import { db } from '../config-firebase' +import { reverse_semantic_domains_mapping } from './reverse-semantic-domains-mapping' +import { turn_dialect_strings_to_arrays } from './turn-dialects-to-arrays' + program // .version('0.0.1') .option('--id ', 'Dictionary Id') .option('--live', 'If not included, only log values') - .parse(process.argv); + .parse(process.argv) -const dictionaryId = program.opts().id; -const {live} = program.opts(); +const dictionaryId = program.opts().id +const { live } = program.opts() async function entryRefactor() { try { if (dictionaryId) { - console.log(`---Refactoring: ${dictionaryId}`); - await fetchEntries(dictionaryId); + console.log(`---Refactoring: ${dictionaryId}`) + await fetchEntries(dictionaryId) } else { - const snapshot = await db.collection('dictionaries').get(); + const snapshot = await db.collection('dictionaries').get() for (const dictionarySnap of snapshot.docs) { // If setting limits on refactoring, you can skip dictionaries beginning with letters that have already been processed: - const done = /^[abcdefghijklmn].*/; + const done = /^[abcdefghijklmn].*/ if (!done.test(dictionarySnap.id.toLowerCase())) { - console.log(`---Refactoring: ${dictionarySnap.id}`); - await fetchEntries(dictionarySnap.id); + console.log(`---Refactoring: ${dictionarySnap.id}`) + await fetchEntries(dictionarySnap.id) } } } } catch (error) { - console.error('Refactor failed!'); - console.error(error); + console.error('Refactor failed!') + console.error(error) } } async function fetchEntries(dictionaryId: string) { - const snapshot = await db.collection(`dictionaries/${dictionaryId}/words`).get(); + const snapshot = await db.collection(`dictionaries/${dictionaryId}/words`).get() for (const snap of snapshot.docs) { - const entry: ActualDatabaseEntry = { id: snap.id, ...(snap.data() as ActualDatabaseEntry) }; + const entry: ActualDatabaseEntry = { id: snap.id, ...(snap.data() as ActualDatabaseEntry) } // await turnSDintoArray(dictionaryId, entry); // await refactorGloss(dictionaryId, entry); // await notesToPluralForm(dictionaryId, entry); // turnPOSintoArray(dictionaryId, entry); // not awaiting so operations can run in parallel otherwise the function errors after about 1400 iterations // reverese_semantic_domains_in_db(dictionaryId, entry); // turnDialectsIntoArray(dictionaryId, entry); - turnSoundFileToArray(dictionaryId, entry); + turnSoundFileToArray(dictionaryId, entry) } } -const turnDialectsIntoArray = async (dictionaryId: string, entry: ActualDatabaseEntry) => { +async function turnDialectsIntoArray(dictionaryId: string, entry: ActualDatabaseEntry) { if (entry.di) { - console.log('entry dialect before:'); - console.log(entry.di); + console.log('entry dialect before:') + console.log(entry.di) if (Array.isArray(entry.di)) - return true; + return true - entry.di = turn_dialect_strings_to_arrays(entry.di); - console.log('entry dialect after:'); - console.log(entry.di); - if (!live) return; - await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry); + entry.di = turn_dialect_strings_to_arrays(entry.di) + console.log('entry dialect after:') + console.log(entry.di) + if (!live) return + await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry) } - return true; -}; + return true +} -const reverese_semantic_domains_in_db = async (dictionaryId: string, entry: ActualDatabaseEntry) => { +async function reverese_semantic_domains_in_db(dictionaryId: string, entry: ActualDatabaseEntry) { if (entry.sdn) { - console.log('entry sdn before:'); - console.log(entry.sdn); - entry.sdn = reverse_semantic_domains_mapping(entry.sdn); + console.log('entry sdn before:') + console.log(entry.sdn) + entry.sdn = reverse_semantic_domains_mapping(entry.sdn) } - console.log('entry sdn after:'); - console.log(entry.sdn); - if (!live) return; - await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry); - return true; -}; + console.log('entry sdn after:') + console.log(entry.sdn) + if (!live) return + await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry) + return true +} -const turnSDintoArray = async (dictionaryId: string, entry: ActualDatabaseEntry) => { +async function turnSDintoArray(dictionaryId: string, entry: ActualDatabaseEntry) { if (entry.sd && typeof entry.sd === 'string') { - console.log('entry sd before: ', entry.sd); - const emptyArray: string[] = []; - emptyArray.push(entry.sd); - entry.sd = emptyArray; - console.log('entry sd after: ', entry.sd); - } else if (entry.sd && entry.sd instanceof Array) { - console.log('it is an array - do nothing'); + console.log('entry sd before: ', entry.sd) + const emptyArray: string[] = [] + emptyArray.push(entry.sd) + entry.sd = emptyArray + console.log('entry sd after: ', entry.sd) + } else if (entry.sd && Array.isArray(entry.sd)) { + console.log('it is an array - do nothing') } else { - delete entry.sd; + delete entry.sd } - if (!live) return; - await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry); - return true; -}; + if (!live) return + await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry) + return true +} -let count = 1; -const turnPOSintoArray = async (dictionaryId: string, entry: ActualDatabaseEntry) => { +let count = 1 +async function turnPOSintoArray(dictionaryId: string, entry: ActualDatabaseEntry) { if (entry.ps && typeof entry.ps === 'string') { - console.log(`${count}:${dictionaryId}:${entry.id}`); - console.log(entry.ps); - entry.ps = [entry.ps]; - console.log(entry.ps); - count++; - if (live) await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry); + console.log(`${count}:${dictionaryId}:${entry.id}`) + console.log(entry.ps) + entry.ps = [entry.ps] + console.log(entry.ps) + count++ + if (live) await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry) // } else if (entry.ps && entry.ps instanceof Array) { // console.log(`${dictionaryId}:${entry.id} is already an array`); } -}; +} -const refactorGloss = async (dictionaryId: string, entry: ActualDatabaseEntry) => { - console.log(entry.gl); +async function refactorGloss(dictionaryId: string, entry: ActualDatabaseEntry) { + console.log(entry.gl) for (const key in entry.gl) { if (key === 'English') { - entry.gl.en = entry.gl[key]; - delete entry.gl[key]; + entry.gl.en = entry.gl[key] + delete entry.gl[key] } if (key === 'Spanish') { - entry.gl.es = entry.gl[key]; - delete entry.gl[key]; + entry.gl.es = entry.gl[key] + delete entry.gl[key] } if (key === 'Español') { - entry.gl.es = entry.gl[key]; - delete entry.gl[key]; + entry.gl.es = entry.gl[key] + delete entry.gl[key] } if (key === 'Bahasa Indonesia') { - entry.gl.id = entry.gl[key]; - delete entry.gl[key]; + entry.gl.id = entry.gl[key] + delete entry.gl[key] } if (key === 'French') { - entry.gl.fr = entry.gl[key]; - delete entry.gl[key]; + entry.gl.fr = entry.gl[key] + delete entry.gl[key] } if (key === 'Mandarin 中文') { - entry.gl.cmn = entry.gl[key]; - delete entry.gl[key]; + entry.gl.cmn = entry.gl[key] + delete entry.gl[key] } } - if (!live) return; - await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry); - return console.log(`${entry.id}: `, entry.gl); -}; + if (!live) return + await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry) + return console.log(`${entry.id}: `, entry.gl) +} -const notesToPluralForm = async (dictionaryId: string, entry: ActualDatabaseEntry) => { - const ntBefore = entry.nt; +async function notesToPluralForm(dictionaryId: string, entry: ActualDatabaseEntry) { + const ntBefore = entry.nt if (entry?.nt.startsWith('Plural form:')) { - entry.pl = entry.nt.replace('Plural form: ', ''); - delete entry.nt; - console.log(`${entry.id}, ntBefore:${ntBefore}, ntAfter:${entry.nt}, pl:${entry.pl}`); + entry.pl = entry.nt.replace('Plural form: ', '') + delete entry.nt + console.log(`${entry.id}, ntBefore:${ntBefore}, ntAfter:${entry.nt}, pl:${entry.pl}`) } - if (!live) return; - await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry); - return true; -}; + if (!live) return + await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry) + return true +} -const turnSoundFileToArray = async (dictionaryId: string, entry: ActualDatabaseEntry) => { - const sfBefore = entry.sf; +async function turnSoundFileToArray(dictionaryId: string, entry: ActualDatabaseEntry) { + const sfBefore = entry.sf if (entry.sf?.sp) { - entry.sfs = [{...entry.sf, sp: [entry.sf.sp]}]; - delete entry.sf; - console.log(`${entry.id}, sfBefore:${JSON.stringify(sfBefore)}, sfsAfter:${JSON.stringify(entry.sfs)}, sfNull:${entry.sf}`); + entry.sfs = [{ ...entry.sf, sp: [entry.sf.sp] }] + delete entry.sf + console.log(`${entry.id}, sfBefore:${JSON.stringify(sfBefore)}, sfsAfter:${JSON.stringify(entry.sfs)}, sfNull:${entry.sf}`) } - if (!live) return; - await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry); - return true; + if (!live) return + await db.collection(`dictionaries/${dictionaryId}/words`).doc(entry.id).set(entry) + return true } -entryRefactor(); +entryRefactor() // Single Dictionary // `pnpm entryRefactor --id babanki` to log refactor in dev diff --git a/packages/scripts/refactor/move-firestore-document.ts b/packages/scripts/refactor/move-firestore-document.ts index ff8262fd0..198a922f6 100644 --- a/packages/scripts/refactor/move-firestore-document.ts +++ b/packages/scripts/refactor/move-firestore-document.ts @@ -1,4 +1,4 @@ -import { db } from '../config-firebase'; +import { db } from '../config-firebase' // deleteDocRecursively(`dictionaries/sipu`); // copyDoc(`dictionaries/sipu`, `dictionaries/conestoga_language`, {}, true); @@ -12,93 +12,93 @@ import { db } from '../config-firebase'; export async function moveDoc( oldDocPath: string, newDocPath: string, - addData?: any + addData?: any, ): Promise { - const copied = await copyDoc(oldDocPath, newDocPath, addData, true); + const copied = await copyDoc(oldDocPath, newDocPath, addData, true) if (copied) { - await deleteDocRecursively(`${oldDocPath}`); - return true; + await deleteDocRecursively(`${oldDocPath}`) + return true } - throw new Error('Data was not copied properly to the target collection, please try again.'); + throw new Error('Data was not copied properly to the target collection, please try again.') } export async function copyDoc( oldDocPath: string, newDocPath: string, addData: any = {}, - recursive = false + recursive = false, ): Promise { - const docRef = db.doc(oldDocPath); + const docRef = db.doc(oldDocPath) const docData = await docRef .get() - .then((doc) => doc.exists && doc.data()) + .then(doc => doc.exists && doc.data()) .catch((error) => { - throw new Error(`Error reading document ${oldDocPath}: ${JSON.stringify(error)}`); - }); + throw new Error(`Error reading document ${oldDocPath}: ${JSON.stringify(error)}`) + }) if (docData) { await db .doc(newDocPath) .set({ ...docData, ...addData }) .catch((error) => { - throw new Error(`Error creating document ${newDocPath}: ${JSON.stringify(error)}`); - }); + throw new Error(`Error creating document ${newDocPath}: ${JSON.stringify(error)}`) + }) // if copying of the subcollections is needed if (recursive) { // subcollections - const subcollections = await docRef.listCollections(); + const subcollections = await docRef.listCollections() for await (const subcollectionRef of subcollections) { - const subcollectionPath = `${oldDocPath}/${subcollectionRef.id}`; + const subcollectionPath = `${oldDocPath}/${subcollectionRef.id}` await subcollectionRef .get() .then(async (snapshot) => { - const {docs} = snapshot; + const { docs } = snapshot for await (const doc of docs) { await copyDoc( `${subcollectionPath}/${doc.id}`, `${newDocPath}/${subcollectionRef.id}/${doc.id}`, - true - ); + true, + ) } }) .catch((error) => { throw new Error( - `Error reading subcollection ${subcollectionPath}: ${JSON.stringify(error)}` - ); - }); + `Error reading subcollection ${subcollectionPath}: ${JSON.stringify(error)}`, + ) + }) } } - return true; + return true } - return false; + return false } export async function deleteDocRecursively(docPath: string): Promise { - const docRef = db.doc(docPath); + const docRef = db.doc(docPath) - const subcollections = await docRef.listCollections(); + const subcollections = await docRef.listCollections() for await (const subcollectionRef of subcollections) { await subcollectionRef .get() .then(async (snapshot) => { - const {docs} = snapshot; + const { docs } = snapshot for await (const doc of docs) - await deleteDocRecursively(`${docPath}/${subcollectionRef.id}/${doc.id}`); + await deleteDocRecursively(`${docPath}/${subcollectionRef.id}/${doc.id}`) - return true; + return true }) .catch((error) => { console.error( 'Error reading subcollection', `${docPath}/${subcollectionRef.id}`, - JSON.stringify(error) - ); - return false; - }); + JSON.stringify(error), + ) + return false + }) } // when all subcollections are deleted, delete the document itself @@ -106,7 +106,7 @@ export async function deleteDocRecursively(docPath: string): Promise { .delete() .then(() => true) .catch((error) => { - console.error('Error deleting document', docPath, JSON.stringify(error)); - return false; - }); + console.error('Error deleting document', docPath, JSON.stringify(error)) + return false + }) } diff --git a/packages/scripts/refactor/upload-old-dictionaries.ts b/packages/scripts/refactor/upload-old-dictionaries.ts index 31cce092a..2dc2dfc81 100644 --- a/packages/scripts/refactor/upload-old-dictionaries.ts +++ b/packages/scripts/refactor/upload-old-dictionaries.ts @@ -1,33 +1,31 @@ -import { GeoPoint } from 'firebase-admin/firestore'; -import { IDictionary } from '@living-dictionaries/types'; -import { db } from '../config-firebase'; +import { GeoPoint } from 'firebase-admin/firestore' +import type { IDictionary } from '@living-dictionaries/types' +import { db } from '../config-firebase' import { tdLocations } from './tdv1-dictionaries'; (() => { try { tdLocations.forEach(async (dictionary) => { if (dictionary.properties.icon === 'library-15') { - const dictionaryUrl = dictionary.properties.xlink.match( - /http:\/\/talkingdictionary.org\/(.+)/ - )[1]; + const [,dictionaryUrl] = dictionary.properties.xlink.match(/http:\/\/talkingdictionary.org\/(.+)/) const data: Partial = { name: dictionary.properties.label, population: dictionary.properties.size, publishYear: dictionary.properties.date, coordinates: new GeoPoint( dictionary.geometry.coordinates[1], - dictionary.geometry.coordinates[0] + dictionary.geometry.coordinates[0], ), url: dictionary.properties.xlink, type: 'tdv1', - }; + } if (dictionary.properties.thumbnail) - data.thumbnail = dictionary.properties.thumbnail; + data.thumbnail = dictionary.properties.thumbnail - await db.doc(`dictionaries/tdv1-${dictionaryUrl}`).set(data); + await db.doc(`dictionaries/tdv1-${dictionaryUrl}`).set(data) } - }); + }) } catch (err) { - console.log(err); + console.log(err) } -})(); +})() diff --git a/packages/site/src/lib/supabase/generated.types.ts b/packages/site/src/lib/supabase/generated.types.ts index 547a7bece..140fdc6bb 100644 --- a/packages/site/src/lib/supabase/generated.types.ts +++ b/packages/site/src/lib/supabase/generated.types.ts @@ -1313,75 +1313,75 @@ export interface Database { export type Tables< PublicTableNameOrOptions extends - | keyof (Database['public']['Tables'] & Database['public']['Views']) - | { schema: keyof Database }, + | keyof (Database['public']['Tables'] & Database['public']['Views']) + | { schema: keyof Database }, TableName extends PublicTableNameOrOptions extends { schema: keyof Database } ? keyof (Database[PublicTableNameOrOptions['schema']]['Tables'] & - Database[PublicTableNameOrOptions['schema']]['Views']) + Database[PublicTableNameOrOptions['schema']]['Views']) : never = never, > = PublicTableNameOrOptions extends { schema: keyof Database } ? (Database[PublicTableNameOrOptions['schema']]['Tables'] & - Database[PublicTableNameOrOptions['schema']]['Views'])[TableName] extends { + Database[PublicTableNameOrOptions['schema']]['Views'])[TableName] extends { Row: infer R } - ? R - : never + ? R + : never : PublicTableNameOrOptions extends keyof (Database['public']['Tables'] & - Database['public']['Views']) + Database['public']['Views']) ? (Database['public']['Tables'] & - Database['public']['Views'])[PublicTableNameOrOptions] extends { + Database['public']['Views'])[PublicTableNameOrOptions] extends { Row: infer R } - ? R - : never + ? R + : never : never export type TablesInsert< PublicTableNameOrOptions extends - | keyof Database['public']['Tables'] - | { schema: keyof Database }, + | keyof Database['public']['Tables'] + | { schema: keyof Database }, TableName extends PublicTableNameOrOptions extends { schema: keyof Database } ? keyof Database[PublicTableNameOrOptions['schema']]['Tables'] : never = never, > = PublicTableNameOrOptions extends { schema: keyof Database } ? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends { - Insert: infer I - } + Insert: infer I + } ? I : never : PublicTableNameOrOptions extends keyof Database['public']['Tables'] ? Database['public']['Tables'][PublicTableNameOrOptions] extends { - Insert: infer I - } + Insert: infer I + } ? I : never : never export type TablesUpdate< PublicTableNameOrOptions extends - | keyof Database['public']['Tables'] - | { schema: keyof Database }, + | keyof Database['public']['Tables'] + | { schema: keyof Database }, TableName extends PublicTableNameOrOptions extends { schema: keyof Database } ? keyof Database[PublicTableNameOrOptions['schema']]['Tables'] : never = never, > = PublicTableNameOrOptions extends { schema: keyof Database } ? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends { - Update: infer U - } + Update: infer U + } ? U : never : PublicTableNameOrOptions extends keyof Database['public']['Tables'] ? Database['public']['Tables'][PublicTableNameOrOptions] extends { - Update: infer U - } + Update: infer U + } ? U : never : never export type Enums< PublicEnumNameOrOptions extends - | keyof Database['public']['Enums'] - | { schema: keyof Database }, + | keyof Database['public']['Enums'] + | { schema: keyof Database }, EnumName extends PublicEnumNameOrOptions extends { schema: keyof Database } ? keyof Database[PublicEnumNameOrOptions['schema']]['Enums'] : never = never, @@ -1390,4 +1390,3 @@ export type Enums< : PublicEnumNameOrOptions extends keyof Database['public']['Enums'] ? Database['public']['Enums'][PublicEnumNameOrOptions] : never -