#!/usr/bin/env node import fs from 'fs/promises'; import path from 'path'; import { fileURLToPath } from 'url'; // Get the current file path and directory const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const gmxsdkDir = path.resolve(__dirname, '../src/Managing.Fastify/src/generated/gmxsdk'); // Regex to match import statements with relative paths without extensions // This regex captures: import statements, export statements, and dynamic imports const importRegex = /((?:import|export)(?:\s+(?:[\w*\s{},]*)\s+from\s+)?['"]|import\(['"])(\.[^'")\s]*)(['"]|\))/g; // Regex to match JSON imports that don't have the assert { type: 'json' } const jsonImportRegex = /((?:import|export)(?:\s+(?:[\w*\s{},]*)\s+from\s+)?['"]|import\(['"])(\.[^'")\s]*\.json)(['"])(?!\s+assert\s*{\s*type\s*:\s*['"]json['"]\s*})/g; async function processFile(filePath) { try { // Read file content const content = await fs.readFile(filePath, 'utf8'); // Skip files that are already processed or don't need processing if (!content.match(importRegex) && !content.match(jsonImportRegex)) { return { filePath, changed: false }; } // First: Handle normal imports - add .js extensions let modifiedContent = content.replace(importRegex, (match, prefix, importPath, suffix) => { // Skip if already has an extension if (path.extname(importPath)) { return match; } // Add .js extension return `${prefix}${importPath}.js${suffix}`; }); // Second: Handle JSON imports - add assert { type: 'json' } modifiedContent = modifiedContent.replace(jsonImportRegex, (match, prefix, importPath, suffix) => { // Add the JSON assertion return `${prefix}${importPath}${suffix} assert { type: 'json' }`; }); // Only write if content changed if (content !== modifiedContent) { await fs.writeFile(filePath, modifiedContent, 'utf8'); return { filePath, changed: true }; } return { filePath, changed: false }; } catch (error) { return { filePath, error: error.message }; } } async function walkDir(dir, fileTypes = ['.ts', '.tsx', '.js', '.jsx', '.mjs']) { const entries = await fs.readdir(dir, { withFileTypes: true }); const files = []; for (const entry of entries) { const fullPath = path.join(dir, entry.name); if (entry.isDirectory()) { // Skip node_modules, .git, build, and dist directories const skipDirs = ['node_modules', '.git', 'build', 'dist', '.next']; if (!skipDirs.includes(entry.name)) { files.push(...await walkDir(fullPath, fileTypes)); } } else if (entry.isFile()) { const ext = path.extname(entry.name); if (fileTypes.includes(ext)) { files.push(fullPath); } } } return files; } async function main() { console.log(`Scanning @gmxsdk directory: ${gmxsdkDir}`); try { const files = await walkDir(gmxsdkDir); console.log(`Found ${files.length} files to process in @gmxsdk`); // Process files in chunks to avoid memory issues with large projects const chunkSize = 100; const results = []; for (let i = 0; i < files.length; i += chunkSize) { const chunk = files.slice(i, i + chunkSize); const chunkResults = await Promise.all(chunk.map(processFile)); results.push(...chunkResults); // Progress update console.log(`Processed ${Math.min(i + chunkSize, files.length)}/${files.length} files...`); } // Collect statistics const changed = results.filter(r => r.changed); const errors = results.filter(r => r.error); console.log('\nSummary:'); console.log(`- Total files scanned: ${files.length}`); console.log(`- Files modified: ${changed.length}`); console.log(`- Errors encountered: ${errors.length}`); if (changed.length > 0) { console.log('\nModified files:'); changed.forEach(({ filePath }) => { console.log(`- ${path.relative(process.cwd(), filePath)}`); }); } if (errors.length > 0) { console.log('\nErrors:'); errors.forEach(({ filePath, error }) => { console.log(`- ${path.relative(process.cwd(), filePath)}: ${error}`); }); } } catch (error) { console.error('Error:', error.message); process.exit(1); } } main().catch(console.error);