This is page 12 of 20. Use http://codebase.md/lingodotdev/lingo.dev?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .changeset
│ ├── config.json
│ └── README.md
├── .claude
│ ├── agents
│ │ └── code-architect-reviewer.md
│ └── commands
│ ├── analyze-bucket-type.md
│ └── create-bucket-docs.md
├── .editorconfig
├── .github
│ ├── dependabot.yml
│ └── workflows
│ ├── docker.yml
│ ├── lingodotdev.yml
│ ├── pr-check.yml
│ ├── pr-lint.yml
│ └── release.yml
├── .gitignore
├── .husky
│ └── commit-msg
├── .npmrc
├── .prettierignore
├── .prettierrc
├── .vscode
│ ├── extensions.json
│ ├── launch.json
│ └── settings.json
├── action.yml
├── CLAUDE.md
├── CODE_OF_CONDUCT.md
├── commitlint.config.js
├── composer.json
├── content
│ ├── banner.compiler.png
│ ├── banner.dark.png
│ └── banner.launch.png
├── CONTRIBUTING.md
├── DEBUGGING.md
├── demo
│ ├── adonisjs
│ │ ├── .editorconfig
│ │ ├── .env.example
│ │ ├── .gitignore
│ │ ├── ace.js
│ │ ├── adonisrc.ts
│ │ ├── app
│ │ │ ├── exceptions
│ │ │ │ └── handler.ts
│ │ │ └── middleware
│ │ │ └── container_bindings_middleware.ts
│ │ ├── bin
│ │ │ ├── console.ts
│ │ │ ├── server.ts
│ │ │ └── test.ts
│ │ ├── CHANGELOG.md
│ │ ├── config
│ │ │ ├── app.ts
│ │ │ ├── bodyparser.ts
│ │ │ ├── cors.ts
│ │ │ ├── hash.ts
│ │ │ ├── inertia.ts
│ │ │ ├── logger.ts
│ │ │ ├── session.ts
│ │ │ ├── shield.ts
│ │ │ ├── static.ts
│ │ │ └── vite.ts
│ │ ├── eslint.config.js
│ │ ├── inertia
│ │ │ ├── app
│ │ │ │ ├── app.tsx
│ │ │ │ └── ssr.tsx
│ │ │ ├── css
│ │ │ │ └── app.css
│ │ │ ├── lingo
│ │ │ │ ├── dictionary.js
│ │ │ │ └── meta.json
│ │ │ ├── pages
│ │ │ │ ├── errors
│ │ │ │ │ ├── not_found.tsx
│ │ │ │ │ └── server_error.tsx
│ │ │ │ └── home.tsx
│ │ │ └── tsconfig.json
│ │ ├── package.json
│ │ ├── README.md
│ │ ├── resources
│ │ │ └── views
│ │ │ └── inertia_layout.edge
│ │ ├── start
│ │ │ ├── env.ts
│ │ │ ├── kernel.ts
│ │ │ └── routes.ts
│ │ ├── tests
│ │ │ └── bootstrap.ts
│ │ ├── tsconfig.json
│ │ └── vite.config.ts
│ ├── next-app
│ │ ├── .gitignore
│ │ ├── CHANGELOG.md
│ │ ├── eslint.config.mjs
│ │ ├── next.config.ts
│ │ ├── package.json
│ │ ├── postcss.config.mjs
│ │ ├── public
│ │ │ ├── file.svg
│ │ │ ├── globe.svg
│ │ │ ├── next.svg
│ │ │ ├── vercel.svg
│ │ │ └── window.svg
│ │ ├── README.md
│ │ ├── src
│ │ │ ├── app
│ │ │ │ ├── client-component.tsx
│ │ │ │ ├── favicon.ico
│ │ │ │ ├── globals.css
│ │ │ │ ├── layout.tsx
│ │ │ │ ├── lingo-dot-dev.tsx
│ │ │ │ ├── page.tsx
│ │ │ │ └── test
│ │ │ │ └── page.tsx
│ │ │ ├── components
│ │ │ │ ├── hero-actions.tsx
│ │ │ │ ├── hero-subtitle.tsx
│ │ │ │ ├── hero-title.tsx
│ │ │ │ └── index.ts
│ │ │ └── lingo
│ │ │ ├── dictionary.js
│ │ │ └── meta.json
│ │ └── tsconfig.json
│ ├── react-router-app
│ │ ├── .dockerignore
│ │ ├── .gitignore
│ │ ├── app
│ │ │ ├── app.css
│ │ │ ├── lingo
│ │ │ │ ├── dictionary.js
│ │ │ │ └── meta.json
│ │ │ ├── root.tsx
│ │ │ ├── routes
│ │ │ │ ├── home.tsx
│ │ │ │ └── test.tsx
│ │ │ ├── routes.ts
│ │ │ └── welcome
│ │ │ ├── lingo-dot-dev.tsx
│ │ │ ├── logo-dark.svg
│ │ │ ├── logo-light.svg
│ │ │ └── welcome.tsx
│ │ ├── Dockerfile
│ │ ├── package.json
│ │ ├── public
│ │ │ └── favicon.ico
│ │ ├── react-router.config.ts
│ │ ├── README.md
│ │ ├── tsconfig.json
│ │ └── vite.config.ts
│ └── vite-project
│ ├── .gitignore
│ ├── CHANGELOG.md
│ ├── eslint.config.js
│ ├── index.html
│ ├── package.json
│ ├── public
│ │ └── vite.svg
│ ├── README.md
│ ├── src
│ │ ├── App.css
│ │ ├── App.tsx
│ │ ├── assets
│ │ │ └── react.svg
│ │ ├── components
│ │ │ └── test.tsx
│ │ ├── index.css
│ │ ├── lingo
│ │ │ ├── dictionary.js
│ │ │ └── meta.json
│ │ ├── lingo-dot-dev.tsx
│ │ ├── main.tsx
│ │ └── vite-env.d.ts
│ ├── tsconfig.app.json
│ ├── tsconfig.json
│ ├── tsconfig.node.json
│ └── vite.config.ts
├── Dockerfile
├── i18n.json
├── i18n.lock
├── integrations
│ └── directus
│ ├── .gitignore
│ ├── CHANGELOG.md
│ ├── docker-compose.yml
│ ├── Dockerfile
│ ├── package.json
│ ├── README.md
│ ├── src
│ │ ├── api.ts
│ │ ├── app.ts
│ │ └── index.spec.ts
│ ├── tsconfig.json
│ ├── tsconfig.test.json
│ └── tsup.config.ts
├── ISSUE_TEMPLATE.md
├── legacy
│ ├── cli
│ │ ├── bin
│ │ │ └── cli.mjs
│ │ ├── CHANGELOG.md
│ │ ├── package.json
│ │ └── readme.md
│ └── sdk
│ ├── CHANGELOG.md
│ ├── index.d.ts
│ ├── index.js
│ ├── package.json
│ └── README.md
├── LICENSE.md
├── mcp.md
├── package.json
├── packages
│ ├── cli
│ │ ├── assets
│ │ │ ├── failure.mp3
│ │ │ └── success.mp3
│ │ ├── bin
│ │ │ └── cli.mjs
│ │ ├── CHANGELOG.md
│ │ ├── demo
│ │ │ ├── android
│ │ │ │ ├── en
│ │ │ │ │ └── example.xml
│ │ │ │ ├── es
│ │ │ │ │ └── example.xml
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── csv
│ │ │ │ ├── example.csv
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── demo.spec.ts
│ │ │ ├── ejs
│ │ │ │ ├── en
│ │ │ │ │ └── example.ejs
│ │ │ │ ├── es
│ │ │ │ │ └── example.ejs
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── flutter
│ │ │ │ ├── en
│ │ │ │ │ └── example.arb
│ │ │ │ ├── es
│ │ │ │ │ └── example.arb
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── html
│ │ │ │ ├── en
│ │ │ │ │ └── example.html
│ │ │ │ ├── es
│ │ │ │ │ └── example.html
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── json
│ │ │ │ ├── en
│ │ │ │ │ └── example.json
│ │ │ │ ├── es
│ │ │ │ │ └── example.json
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── json-dictionary
│ │ │ │ ├── example.json
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── json5
│ │ │ │ ├── en
│ │ │ │ │ └── example.json5
│ │ │ │ ├── es
│ │ │ │ │ └── example.json5
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── jsonc
│ │ │ │ ├── en
│ │ │ │ │ └── example.jsonc
│ │ │ │ ├── es
│ │ │ │ │ └── example.jsonc
│ │ │ │ ├── i18n.json
│ │ │ │ ├── i18n.lock
│ │ │ │ └── ru
│ │ │ │ └── example.jsonc
│ │ │ ├── markdoc
│ │ │ │ ├── en
│ │ │ │ │ └── example.markdoc
│ │ │ │ ├── es
│ │ │ │ │ └── example.markdoc
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── markdown
│ │ │ │ ├── en
│ │ │ │ │ └── example.md
│ │ │ │ ├── es
│ │ │ │ │ └── example.md
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── mdx
│ │ │ │ ├── en
│ │ │ │ │ └── example.mdx
│ │ │ │ ├── es
│ │ │ │ │ └── example.mdx
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── php
│ │ │ │ ├── en
│ │ │ │ │ └── example.php
│ │ │ │ ├── es
│ │ │ │ │ └── example.php
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── po
│ │ │ │ ├── en
│ │ │ │ │ └── example.po
│ │ │ │ ├── es
│ │ │ │ │ └── example.po
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── properties
│ │ │ │ ├── en
│ │ │ │ │ └── example.properties
│ │ │ │ ├── es
│ │ │ │ │ └── example.properties
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── run_i18n.sh
│ │ │ ├── srt
│ │ │ │ ├── en
│ │ │ │ │ └── example.srt
│ │ │ │ ├── es
│ │ │ │ │ └── example.srt
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── txt
│ │ │ │ ├── en
│ │ │ │ │ └── example.txt
│ │ │ │ ├── es
│ │ │ │ │ └── example.txt
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── typescript
│ │ │ │ ├── en
│ │ │ │ │ └── example.ts
│ │ │ │ ├── es
│ │ │ │ │ └── example.ts
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── vtt
│ │ │ │ ├── en
│ │ │ │ │ └── example.vtt
│ │ │ │ ├── es
│ │ │ │ │ └── example.vtt
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── vue-json
│ │ │ │ ├── example.vue
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── xcode-strings
│ │ │ │ ├── en
│ │ │ │ │ └── example.strings
│ │ │ │ ├── es
│ │ │ │ │ └── example.strings
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── xcode-stringsdict
│ │ │ │ ├── en
│ │ │ │ │ └── example.stringsdict
│ │ │ │ ├── es
│ │ │ │ │ └── example.stringsdict
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── xcode-xcstrings
│ │ │ │ ├── example.xcstrings
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── xcode-xcstrings-v2
│ │ │ │ ├── complex-example.xcstrings
│ │ │ │ ├── example.xcstrings
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── xliff
│ │ │ │ ├── en
│ │ │ │ │ ├── example-v1.2.xliff
│ │ │ │ │ └── example-v2.xliff
│ │ │ │ ├── es
│ │ │ │ │ ├── example-v1.2.xliff
│ │ │ │ │ ├── example-v2.xliff
│ │ │ │ │ └── example.xliff
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── xml
│ │ │ │ ├── en
│ │ │ │ │ └── example.xml
│ │ │ │ ├── es
│ │ │ │ │ └── example.xml
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ ├── yaml
│ │ │ │ ├── en
│ │ │ │ │ └── example.yml
│ │ │ │ ├── es
│ │ │ │ │ └── example.yml
│ │ │ │ ├── i18n.json
│ │ │ │ └── i18n.lock
│ │ │ └── yaml-root-key
│ │ │ ├── en
│ │ │ │ └── example.yml
│ │ │ ├── es
│ │ │ │ └── example.yml
│ │ │ ├── i18n.json
│ │ │ └── i18n.lock
│ │ ├── i18n.json
│ │ ├── i18n.lock
│ │ ├── package.json
│ │ ├── README.md
│ │ ├── src
│ │ │ ├── cli
│ │ │ │ ├── cmd
│ │ │ │ │ ├── auth.ts
│ │ │ │ │ ├── ci
│ │ │ │ │ │ ├── flows
│ │ │ │ │ │ │ ├── _base.ts
│ │ │ │ │ │ │ ├── in-branch.ts
│ │ │ │ │ │ │ └── pull-request.ts
│ │ │ │ │ │ ├── index.ts
│ │ │ │ │ │ └── platforms
│ │ │ │ │ │ ├── _base.ts
│ │ │ │ │ │ ├── bitbucket.ts
│ │ │ │ │ │ ├── github.ts
│ │ │ │ │ │ ├── gitlab.ts
│ │ │ │ │ │ └── index.ts
│ │ │ │ │ ├── cleanup.ts
│ │ │ │ │ ├── config
│ │ │ │ │ │ ├── get.ts
│ │ │ │ │ │ ├── index.ts
│ │ │ │ │ │ ├── set.ts
│ │ │ │ │ │ └── unset.ts
│ │ │ │ │ ├── i18n.ts
│ │ │ │ │ ├── init.ts
│ │ │ │ │ ├── lockfile.ts
│ │ │ │ │ ├── login.ts
│ │ │ │ │ ├── logout.ts
│ │ │ │ │ ├── may-the-fourth.ts
│ │ │ │ │ ├── mcp.ts
│ │ │ │ │ ├── purge.ts
│ │ │ │ │ ├── run
│ │ │ │ │ │ ├── _const.ts
│ │ │ │ │ │ ├── _types.ts
│ │ │ │ │ │ ├── _utils.ts
│ │ │ │ │ │ ├── execute.spec.ts
│ │ │ │ │ │ ├── execute.ts
│ │ │ │ │ │ ├── frozen.ts
│ │ │ │ │ │ ├── index.ts
│ │ │ │ │ │ ├── plan.ts
│ │ │ │ │ │ ├── setup.ts
│ │ │ │ │ │ └── watch.ts
│ │ │ │ │ ├── show
│ │ │ │ │ │ ├── _shared-key-command.ts
│ │ │ │ │ │ ├── config.ts
│ │ │ │ │ │ ├── files.ts
│ │ │ │ │ │ ├── ignored-keys.ts
│ │ │ │ │ │ ├── index.ts
│ │ │ │ │ │ ├── locale.ts
│ │ │ │ │ │ └── locked-keys.ts
│ │ │ │ │ └── status.ts
│ │ │ │ ├── constants.ts
│ │ │ │ ├── index.spec.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── loaders
│ │ │ │ │ ├── _types.ts
│ │ │ │ │ ├── _utils.ts
│ │ │ │ │ ├── android.spec.ts
│ │ │ │ │ ├── android.ts
│ │ │ │ │ ├── csv.spec.ts
│ │ │ │ │ ├── csv.ts
│ │ │ │ │ ├── dato
│ │ │ │ │ │ ├── _base.ts
│ │ │ │ │ │ ├── _utils.ts
│ │ │ │ │ │ ├── api.ts
│ │ │ │ │ │ ├── extract.ts
│ │ │ │ │ │ ├── filter.ts
│ │ │ │ │ │ └── index.ts
│ │ │ │ │ ├── ejs.spec.ts
│ │ │ │ │ ├── ejs.ts
│ │ │ │ │ ├── ensure-key-order.spec.ts
│ │ │ │ │ ├── ensure-key-order.ts
│ │ │ │ │ ├── flat.spec.ts
│ │ │ │ │ ├── flat.ts
│ │ │ │ │ ├── flutter.spec.ts
│ │ │ │ │ ├── flutter.ts
│ │ │ │ │ ├── formatters
│ │ │ │ │ │ ├── _base.ts
│ │ │ │ │ │ ├── biome.ts
│ │ │ │ │ │ ├── index.ts
│ │ │ │ │ │ └── prettier.ts
│ │ │ │ │ ├── html.ts
│ │ │ │ │ ├── icu-safety.spec.ts
│ │ │ │ │ ├── ignored-keys-buckets.spec.ts
│ │ │ │ │ ├── ignored-keys.spec.ts
│ │ │ │ │ ├── ignored-keys.ts
│ │ │ │ │ ├── index.spec.ts
│ │ │ │ │ ├── index.ts
│ │ │ │ │ ├── inject-locale.spec.ts
│ │ │ │ │ ├── inject-locale.ts
│ │ │ │ │ ├── json-dictionary.spec.ts
│ │ │ │ │ ├── json-dictionary.ts
│ │ │ │ │ ├── json-sorting.test.ts
│ │ │ │ │ ├── json-sorting.ts
│ │ │ │ │ ├── json.ts
│ │ │ │ │ ├── json5.spec.ts
│ │ │ │ │ ├── json5.ts
│ │ │ │ │ ├── jsonc.spec.ts
│ │ │ │ │ ├── jsonc.ts
│ │ │ │ │ ├── locked-keys.spec.ts
│ │ │ │ │ ├── locked-keys.ts
│ │ │ │ │ ├── locked-patterns.spec.ts
│ │ │ │ │ ├── locked-patterns.ts
│ │ │ │ │ ├── markdoc.spec.ts
│ │ │ │ │ ├── markdoc.ts
│ │ │ │ │ ├── markdown.ts
│ │ │ │ │ ├── mdx.spec.ts
│ │ │ │ │ ├── mdx.ts
│ │ │ │ │ ├── mdx2
│ │ │ │ │ │ ├── _types.ts
│ │ │ │ │ │ ├── _utils.ts
│ │ │ │ │ │ ├── code-placeholder.spec.ts
│ │ │ │ │ │ ├── code-placeholder.ts
│ │ │ │ │ │ ├── frontmatter-split.spec.ts
│ │ │ │ │ │ ├── frontmatter-split.ts
│ │ │ │ │ │ ├── localizable-document.spec.ts
│ │ │ │ │ │ ├── localizable-document.ts
│ │ │ │ │ │ ├── section-split.spec.ts
│ │ │ │ │ │ ├── section-split.ts
│ │ │ │ │ │ └── sections-split-2.ts
│ │ │ │ │ ├── passthrough.ts
│ │ │ │ │ ├── php.ts
│ │ │ │ │ ├── plutil-json-loader.ts
│ │ │ │ │ ├── po
│ │ │ │ │ │ ├── _types.ts
│ │ │ │ │ │ ├── index.spec.ts
│ │ │ │ │ │ └── index.ts
│ │ │ │ │ ├── properties.ts
│ │ │ │ │ ├── root-key.ts
│ │ │ │ │ ├── srt.ts
│ │ │ │ │ ├── sync.ts
│ │ │ │ │ ├── text-file.ts
│ │ │ │ │ ├── txt.ts
│ │ │ │ │ ├── typescript
│ │ │ │ │ │ ├── cjs-interop.ts
│ │ │ │ │ │ ├── index.spec.ts
│ │ │ │ │ │ └── index.ts
│ │ │ │ │ ├── unlocalizable.spec.ts
│ │ │ │ │ ├── unlocalizable.ts
│ │ │ │ │ ├── variable
│ │ │ │ │ │ ├── index.spec.ts
│ │ │ │ │ │ └── index.ts
│ │ │ │ │ ├── vtt.ts
│ │ │ │ │ ├── vue-json.ts
│ │ │ │ │ ├── xcode-strings
│ │ │ │ │ │ ├── escape.ts
│ │ │ │ │ │ ├── parser.ts
│ │ │ │ │ │ ├── tokenizer.ts
│ │ │ │ │ │ └── types.ts
│ │ │ │ │ ├── xcode-strings.spec.ts
│ │ │ │ │ ├── xcode-strings.ts
│ │ │ │ │ ├── xcode-stringsdict.ts
│ │ │ │ │ ├── xcode-xcstrings-icu.spec.ts
│ │ │ │ │ ├── xcode-xcstrings-icu.ts
│ │ │ │ │ ├── xcode-xcstrings-lock-compatibility.spec.ts
│ │ │ │ │ ├── xcode-xcstrings-v2-loader.ts
│ │ │ │ │ ├── xcode-xcstrings.spec.ts
│ │ │ │ │ ├── xcode-xcstrings.ts
│ │ │ │ │ ├── xliff.spec.ts
│ │ │ │ │ ├── xliff.ts
│ │ │ │ │ ├── xml.ts
│ │ │ │ │ └── yaml.ts
│ │ │ │ ├── localizer
│ │ │ │ │ ├── _types.ts
│ │ │ │ │ ├── explicit.ts
│ │ │ │ │ ├── index.ts
│ │ │ │ │ └── lingodotdev.ts
│ │ │ │ ├── processor
│ │ │ │ │ ├── _base.ts
│ │ │ │ │ ├── basic.ts
│ │ │ │ │ ├── index.ts
│ │ │ │ │ └── lingo.ts
│ │ │ │ └── utils
│ │ │ │ ├── auth.ts
│ │ │ │ ├── buckets.spec.ts
│ │ │ │ ├── buckets.ts
│ │ │ │ ├── cache.ts
│ │ │ │ ├── cloudflare-status.ts
│ │ │ │ ├── config.ts
│ │ │ │ ├── delta.spec.ts
│ │ │ │ ├── delta.ts
│ │ │ │ ├── ensure-patterns.ts
│ │ │ │ ├── errors.ts
│ │ │ │ ├── exec.spec.ts
│ │ │ │ ├── exec.ts
│ │ │ │ ├── exit-gracefully.spec.ts
│ │ │ │ ├── exit-gracefully.ts
│ │ │ │ ├── exp-backoff.ts
│ │ │ │ ├── find-locale-paths.spec.ts
│ │ │ │ ├── find-locale-paths.ts
│ │ │ │ ├── fs.ts
│ │ │ │ ├── init-ci-cd.ts
│ │ │ │ ├── key-matching.spec.ts
│ │ │ │ ├── key-matching.ts
│ │ │ │ ├── lockfile.ts
│ │ │ │ ├── md5.ts
│ │ │ │ ├── observability.ts
│ │ │ │ ├── plutil-formatter.spec.ts
│ │ │ │ ├── plutil-formatter.ts
│ │ │ │ ├── settings.ts
│ │ │ │ ├── ui.ts
│ │ │ │ └── update-gitignore.ts
│ │ │ ├── compiler
│ │ │ │ └── index.ts
│ │ │ ├── locale-codes
│ │ │ │ └── index.ts
│ │ │ ├── react
│ │ │ │ ├── client.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── react-router.ts
│ │ │ │ └── rsc.ts
│ │ │ ├── sdk
│ │ │ │ └── index.ts
│ │ │ └── spec
│ │ │ └── index.ts
│ │ ├── tests
│ │ │ └── mock-storage.ts
│ │ ├── troubleshooting.md
│ │ ├── tsconfig.json
│ │ ├── tsconfig.test.json
│ │ ├── tsup.config.ts
│ │ ├── types
│ │ │ ├── vtt.d.ts
│ │ │ └── xliff.d.ts
│ │ ├── vitest.config.ts
│ │ └── WATCH_MODE.md
│ ├── compiler
│ │ ├── CHANGELOG.md
│ │ ├── package.json
│ │ ├── README.md
│ │ ├── src
│ │ │ ├── _base.ts
│ │ │ ├── _const.ts
│ │ │ ├── _loader-utils.spec.ts
│ │ │ ├── _loader-utils.ts
│ │ │ ├── _utils.spec.ts
│ │ │ ├── _utils.ts
│ │ │ ├── client-dictionary-loader.ts
│ │ │ ├── i18n-directive.spec.ts
│ │ │ ├── i18n-directive.ts
│ │ │ ├── index.spec.ts
│ │ │ ├── index.ts
│ │ │ ├── jsx-attribute-flag.spec.ts
│ │ │ ├── jsx-attribute-flag.ts
│ │ │ ├── jsx-attribute-scope-inject.spec.ts
│ │ │ ├── jsx-attribute-scope-inject.ts
│ │ │ ├── jsx-attribute-scopes-export.spec.ts
│ │ │ ├── jsx-attribute-scopes-export.ts
│ │ │ ├── jsx-attribute.spec.ts
│ │ │ ├── jsx-attribute.ts
│ │ │ ├── jsx-fragment.spec.ts
│ │ │ ├── jsx-fragment.ts
│ │ │ ├── jsx-html-lang.spec.ts
│ │ │ ├── jsx-html-lang.ts
│ │ │ ├── jsx-provider.spec.ts
│ │ │ ├── jsx-provider.ts
│ │ │ ├── jsx-remove-attributes.spec.ts
│ │ │ ├── jsx-remove-attributes.ts
│ │ │ ├── jsx-root-flag.spec.ts
│ │ │ ├── jsx-root-flag.ts
│ │ │ ├── jsx-scope-flag.spec.ts
│ │ │ ├── jsx-scope-flag.ts
│ │ │ ├── jsx-scope-inject.spec.ts
│ │ │ ├── jsx-scope-inject.ts
│ │ │ ├── jsx-scopes-export.spec.ts
│ │ │ ├── jsx-scopes-export.ts
│ │ │ ├── lib
│ │ │ │ └── lcp
│ │ │ │ ├── api
│ │ │ │ │ ├── index.ts
│ │ │ │ │ ├── prompt.spec.ts
│ │ │ │ │ ├── prompt.ts
│ │ │ │ │ ├── provider-details.spec.ts
│ │ │ │ │ ├── provider-details.ts
│ │ │ │ │ ├── shots.ts
│ │ │ │ │ ├── xml2obj.spec.ts
│ │ │ │ │ └── xml2obj.ts
│ │ │ │ ├── api.spec.ts
│ │ │ │ ├── cache.spec.ts
│ │ │ │ ├── cache.ts
│ │ │ │ ├── index.spec.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── schema.ts
│ │ │ │ ├── server.spec.ts
│ │ │ │ └── server.ts
│ │ │ ├── lingo-turbopack-loader.ts
│ │ │ ├── react-router-dictionary-loader.ts
│ │ │ ├── rsc-dictionary-loader.ts
│ │ │ └── utils
│ │ │ ├── ast-key.spec.ts
│ │ │ ├── ast-key.ts
│ │ │ ├── create-locale-import-map.spec.ts
│ │ │ ├── create-locale-import-map.ts
│ │ │ ├── env.spec.ts
│ │ │ ├── env.ts
│ │ │ ├── hash.spec.ts
│ │ │ ├── hash.ts
│ │ │ ├── index.spec.ts
│ │ │ ├── index.ts
│ │ │ ├── invokations.spec.ts
│ │ │ ├── invokations.ts
│ │ │ ├── jsx-attribute-scope.ts
│ │ │ ├── jsx-attribute.spec.ts
│ │ │ ├── jsx-attribute.ts
│ │ │ ├── jsx-content-whitespace.spec.ts
│ │ │ ├── jsx-content.spec.ts
│ │ │ ├── jsx-content.ts
│ │ │ ├── jsx-element.spec.ts
│ │ │ ├── jsx-element.ts
│ │ │ ├── jsx-expressions.test.ts
│ │ │ ├── jsx-expressions.ts
│ │ │ ├── jsx-functions.spec.ts
│ │ │ ├── jsx-functions.ts
│ │ │ ├── jsx-scope.spec.ts
│ │ │ ├── jsx-scope.ts
│ │ │ ├── jsx-variables.spec.ts
│ │ │ ├── jsx-variables.ts
│ │ │ ├── llm-api-key.ts
│ │ │ ├── llm-api-keys.spec.ts
│ │ │ ├── locales.spec.ts
│ │ │ ├── locales.ts
│ │ │ ├── module-params.spec.ts
│ │ │ ├── module-params.ts
│ │ │ ├── observability.spec.ts
│ │ │ ├── observability.ts
│ │ │ ├── rc.spec.ts
│ │ │ └── rc.ts
│ │ ├── tsconfig.json
│ │ ├── tsup.config.ts
│ │ └── vitest.config.ts
│ ├── locales
│ │ ├── CHANGELOG.md
│ │ ├── package.json
│ │ ├── README.md
│ │ ├── src
│ │ │ ├── constants.ts
│ │ │ ├── index.ts
│ │ │ ├── names
│ │ │ │ ├── index.spec.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── integration.spec.ts
│ │ │ │ └── loader.ts
│ │ │ ├── parser.spec.ts
│ │ │ ├── parser.ts
│ │ │ ├── types.ts
│ │ │ ├── validation.spec.ts
│ │ │ └── validation.ts
│ │ ├── tsconfig.json
│ │ └── tsup.config.ts
│ ├── react
│ │ ├── build.config.ts
│ │ ├── CHANGELOG.md
│ │ ├── package.json
│ │ ├── README.md
│ │ ├── src
│ │ │ ├── client
│ │ │ │ ├── attribute-component.spec.tsx
│ │ │ │ ├── attribute-component.tsx
│ │ │ │ ├── component.lingo-component.spec.tsx
│ │ │ │ ├── component.spec.tsx
│ │ │ │ ├── component.tsx
│ │ │ │ ├── context.spec.tsx
│ │ │ │ ├── context.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── loader.spec.ts
│ │ │ │ ├── loader.ts
│ │ │ │ ├── locale-switcher.spec.tsx
│ │ │ │ ├── locale-switcher.tsx
│ │ │ │ ├── locale.spec.ts
│ │ │ │ ├── locale.ts
│ │ │ │ ├── provider.spec.tsx
│ │ │ │ ├── provider.tsx
│ │ │ │ ├── utils.spec.ts
│ │ │ │ └── utils.ts
│ │ │ ├── core
│ │ │ │ ├── attribute-component.spec.tsx
│ │ │ │ ├── attribute-component.tsx
│ │ │ │ ├── component.spec.tsx
│ │ │ │ ├── component.tsx
│ │ │ │ ├── const.ts
│ │ │ │ ├── get-dictionary.spec.ts
│ │ │ │ ├── get-dictionary.ts
│ │ │ │ └── index.ts
│ │ │ ├── react-router
│ │ │ │ ├── index.ts
│ │ │ │ ├── loader.spec.ts
│ │ │ │ └── loader.ts
│ │ │ ├── rsc
│ │ │ │ ├── attribute-component.tsx
│ │ │ │ ├── component.lingo-component.spec.tsx
│ │ │ │ ├── component.spec.tsx
│ │ │ │ ├── component.tsx
│ │ │ │ ├── index.ts
│ │ │ │ ├── loader.spec.ts
│ │ │ │ ├── loader.ts
│ │ │ │ ├── provider.spec.tsx
│ │ │ │ ├── provider.tsx
│ │ │ │ ├── utils.spec.ts
│ │ │ │ └── utils.ts
│ │ │ └── test
│ │ │ └── setup.ts
│ │ ├── tsconfig.json
│ │ └── vitest.config.ts
│ ├── sdk
│ │ ├── CHANGELOG.md
│ │ ├── package.json
│ │ ├── README.md
│ │ ├── src
│ │ │ ├── abort-controller.specs.ts
│ │ │ ├── index.spec.ts
│ │ │ └── index.ts
│ │ ├── tsconfig.json
│ │ ├── tsconfig.test.json
│ │ └── tsup.config.ts
│ └── spec
│ ├── CHANGELOG.md
│ ├── package.json
│ ├── README.md
│ ├── src
│ │ ├── config.spec.ts
│ │ ├── config.ts
│ │ ├── formats.ts
│ │ ├── index.spec.ts
│ │ ├── index.ts
│ │ ├── json-schema.ts
│ │ ├── locales.spec.ts
│ │ └── locales.ts
│ ├── tsconfig.json
│ ├── tsconfig.test.json
│ └── tsup.config.ts
├── pnpm-lock.yaml
├── pnpm-workspace.yaml
├── readme
│ ├── ar.md
│ ├── bn.md
│ ├── de.md
│ ├── en.md
│ ├── es.md
│ ├── fa.md
│ ├── fr.md
│ ├── he.md
│ ├── hi.md
│ ├── it.md
│ ├── ja.md
│ ├── ko.md
│ ├── pl.md
│ ├── pt-BR.md
│ ├── ru.md
│ ├── tr.md
│ ├── uk-UA.md
│ └── zh-Hans.md
├── readme.md
├── scripts
│ ├── docs
│ │ ├── package.json
│ │ ├── README.md
│ │ ├── src
│ │ │ ├── generate-cli-docs.ts
│ │ │ ├── generate-config-docs.ts
│ │ │ ├── json-schema
│ │ │ │ ├── markdown-renderer.test.ts
│ │ │ │ ├── markdown-renderer.ts
│ │ │ │ ├── parser.test.ts
│ │ │ │ ├── parser.ts
│ │ │ │ └── types.ts
│ │ │ ├── utils.test.ts
│ │ │ └── utils.ts
│ │ ├── tsconfig.json
│ │ └── vitest.config.ts
│ └── packagist-publish.php
└── turbo.json
```
# Files
--------------------------------------------------------------------------------
/packages/cli/src/cli/loaders/dato/_utils.ts:
--------------------------------------------------------------------------------
```typescript
1 | import _ from "lodash";
2 | import { buildClient, SimpleSchemaTypes } from "@datocms/cma-client-node";
3 | import { DastDocument, DatoBlock, DatoSimpleValue, DatoValue } from "./_base";
4 | import { DastDocumentNode } from "./_base";
5 |
6 | type DatoClientParams = {
7 | apiKey: string;
8 | projectId: string;
9 | };
10 |
11 | export type DatoClient = ReturnType<typeof createDatoClient>;
12 |
13 | export default function createDatoClient(params: DatoClientParams) {
14 | if (!params.apiKey) {
15 | throw new Error(
16 | "Missing required environment variable: DATO_API_TOKEN. Please set this variable and try again.",
17 | );
18 | }
19 | const dato = buildClient({
20 | apiToken: params.apiKey,
21 | extraHeaders: {
22 | "X-Exclude-Invalid": "true",
23 | },
24 | });
25 |
26 | return {
27 | findProject: async (): Promise<SimpleSchemaTypes.Site> => {
28 | const project = await dato.site.find();
29 | return project;
30 | },
31 | updateField: async (
32 | fieldId: string,
33 | payload: SimpleSchemaTypes.FieldUpdateSchema,
34 | ): Promise<void> => {
35 | try {
36 | await dato.fields.update(fieldId, payload);
37 | } catch (_error: any) {
38 | throw new Error(
39 | [
40 | `Failed to update field in DatoCMS.`,
41 | `Field ID: ${fieldId}`,
42 | `Payload: ${JSON.stringify(payload, null, 2)}`,
43 | `Error: ${JSON.stringify(_error, null, 2)}`,
44 | ].join("\n\n"),
45 | );
46 | }
47 | },
48 | findField: async (fieldId: string): Promise<SimpleSchemaTypes.Field> => {
49 | try {
50 | const field = await dato.fields.find(fieldId);
51 | if (!field) {
52 | throw new Error(`Field ${fieldId} not found`);
53 | }
54 | return field;
55 | } catch (_error: any) {
56 | throw new Error(
57 | [
58 | `Failed to find field in DatoCMS.`,
59 | `Field ID: ${fieldId}`,
60 | `Error: ${JSON.stringify(_error, null, 2)}`,
61 | ].join("\n\n"),
62 | );
63 | }
64 | },
65 | findModels: async (): Promise<SimpleSchemaTypes.ItemType[]> => {
66 | try {
67 | const models = await dato.itemTypes.list();
68 | const modelsWithoutBlocks = models.filter(
69 | (model) => !model.modular_block,
70 | );
71 | return modelsWithoutBlocks;
72 | } catch (_error: any) {
73 | throw new Error(
74 | [
75 | `Failed to find models in DatoCMS.`,
76 | `Error: ${JSON.stringify(_error, null, 2)}`,
77 | ].join("\n\n"),
78 | );
79 | }
80 | },
81 | findModel: async (modelId: string): Promise<SimpleSchemaTypes.ItemType> => {
82 | try {
83 | const model = await dato.itemTypes.find(modelId);
84 | if (!model) {
85 | throw new Error(`Model ${modelId} not found`);
86 | }
87 | return model;
88 | } catch (_error: any) {
89 | throw new Error(
90 | [
91 | `Failed to find model in DatoCMS.`,
92 | `Model ID: ${modelId}`,
93 | `Error: ${JSON.stringify(_error, null, 2)}`,
94 | ].join("\n\n"),
95 | );
96 | }
97 | },
98 | findRecords: async (
99 | records: string[],
100 | limit: number = 100,
101 | ): Promise<SimpleSchemaTypes.Item[]> => {
102 | return dato.items
103 | .list({
104 | nested: true,
105 | version: "current",
106 | limit,
107 | filter: {
108 | projectId: params.projectId,
109 | only_valid: "true",
110 | ids: !records.length ? undefined : records.join(","),
111 | },
112 | })
113 | .catch((error: any) =>
114 | Promise.reject(error?.response?.body?.data?.[0] || error),
115 | );
116 | },
117 | findRecordsForModel: async (
118 | modelId: string,
119 | records?: string[],
120 | ): Promise<SimpleSchemaTypes.Item[]> => {
121 | try {
122 | const result = await dato.items
123 | .list({
124 | nested: true,
125 | version: "current",
126 | filter: {
127 | type: modelId,
128 | only_valid: "true",
129 | ids: !records?.length ? undefined : records.join(","),
130 | },
131 | })
132 | .catch((error: any) =>
133 | Promise.reject(error?.response?.body?.data?.[0] || error),
134 | );
135 | return result;
136 | } catch (_error: any) {
137 | throw new Error(
138 | [
139 | `Failed to find records for model in DatoCMS.`,
140 | `Model ID: ${modelId}`,
141 | `Error: ${JSON.stringify(_error, null, 2)}`,
142 | ].join("\n\n"),
143 | );
144 | }
145 | },
146 | updateRecord: async (id: string, payload: any): Promise<void> => {
147 | try {
148 | await dato.items
149 | .update(id, payload)
150 | .catch((error: any) =>
151 | Promise.reject(error?.response?.body?.data?.[0] || error),
152 | );
153 | } catch (_error: any) {
154 | if (_error?.attributes?.details?.message) {
155 | throw new Error(
156 | [
157 | `${_error.attributes.details.message}`,
158 | `Payload: ${JSON.stringify(payload, null, 2)}`,
159 | `Error: ${JSON.stringify(_error, null, 2)}`,
160 | ].join("\n\n"),
161 | );
162 | }
163 |
164 | throw new Error(
165 | [
166 | `Failed to update record in DatoCMS.`,
167 | `Record ID: ${id}`,
168 | `Payload: ${JSON.stringify(payload, null, 2)}`,
169 | `Error: ${JSON.stringify(_error, null, 2)}`,
170 | ].join("\n\n"),
171 | );
172 | }
173 | },
174 | enableFieldLocalization: async (args: {
175 | modelId: string;
176 | fieldId: string;
177 | }): Promise<void> => {
178 | try {
179 | await dato.fields
180 | .update(`${args.modelId}::${args.fieldId}`, { localized: true })
181 | .catch((error: any) =>
182 | Promise.reject(error?.response?.body?.data?.[0] || error),
183 | );
184 | } catch (_error: any) {
185 | if (_error?.attributes?.code === "NOT_FOUND") {
186 | throw new Error(
187 | [
188 | `Field "${args.fieldId}" not found in model "${args.modelId}".`,
189 | `Error: ${JSON.stringify(_error, null, 2)}`,
190 | ].join("\n\n"),
191 | );
192 | }
193 |
194 | if (_error?.attributes?.details?.message) {
195 | throw new Error(
196 | [
197 | `${_error.attributes.details.message}`,
198 | `Error: ${JSON.stringify(_error, null, 2)}`,
199 | ].join("\n\n"),
200 | );
201 | }
202 |
203 | throw new Error(
204 | [
205 | `Failed to enable field localization in DatoCMS.`,
206 | `Field ID: ${args.fieldId}`,
207 | `Model ID: ${args.modelId}`,
208 | `Error: ${JSON.stringify(_error, null, 2)}`,
209 | ].join("\n\n"),
210 | );
211 | }
212 | },
213 | };
214 | }
215 |
216 | type TraverseDatoCallbackMap = {
217 | onValue?: (
218 | path: string[],
219 | value: DatoSimpleValue,
220 | setValue: (value: DatoSimpleValue) => void,
221 | ) => void;
222 | onBlock?: (path: string[], value: DatoBlock) => void;
223 | };
224 |
225 | export function traverseDatoPayload(
226 | payload: Record<string, DatoValue>,
227 | callbackMap: TraverseDatoCallbackMap,
228 | path: string[] = [],
229 | ) {
230 | for (const fieldName of Object.keys(payload)) {
231 | const fieldValue = payload[fieldName];
232 | traverseDatoValue(payload, fieldValue, callbackMap, [...path, fieldName]);
233 | }
234 | }
235 |
236 | export function traverseDatoValue(
237 | parent: Record<string, DatoValue>,
238 | value: DatoValue,
239 | callbackMap: TraverseDatoCallbackMap,
240 | path: string[] = [],
241 | ) {
242 | if (_.isArray(value)) {
243 | for (let i = 0; i < value.length; i++) {
244 | traverseDatoValue(parent, value[i], callbackMap, [...path, i.toString()]);
245 | }
246 | } else if (_.isObject(value)) {
247 | if ("schema" in value && value.schema === "dast") {
248 | traverseDastDocument(value, callbackMap, [...path]);
249 | } else if ("type" in value && value.type === "item") {
250 | traverseDatoBlock(value, callbackMap, [...path]);
251 | } else {
252 | throw new Error(
253 | [
254 | "Unsupported dato object value type:",
255 | JSON.stringify(value, null, 2),
256 | ].join("\n\n"),
257 | );
258 | }
259 | } else {
260 | callbackMap.onValue?.(path, value, (value) => {
261 | _.set(parent, path[path.length - 1], value);
262 | });
263 | }
264 | }
265 |
266 | export function traverseDastDocument(
267 | dast: DastDocument,
268 | callbackMap: TraverseDatoCallbackMap,
269 | path: string[] = [],
270 | ) {
271 | traverseDastNode(dast.document, callbackMap, [...path, "document"]);
272 | }
273 |
274 | export function traverseDatoBlock(
275 | block: DatoBlock,
276 | callbackMap: TraverseDatoCallbackMap,
277 | path: string[] = [],
278 | ) {
279 | callbackMap.onBlock?.(path, block);
280 | traverseDatoPayload(block.attributes, callbackMap, [...path, "attributes"]);
281 | }
282 |
283 | export function traverseDastNode(
284 | node: DastDocumentNode,
285 | callbackMap: TraverseDatoCallbackMap,
286 | path: string[] = [],
287 | ) {
288 | if (node.value) {
289 | callbackMap.onValue?.(path, node.value, (value) => {
290 | _.set(node, "value", value);
291 | });
292 | }
293 | if (node.children?.length) {
294 | for (let i = 0; i < node.children.length; i++) {
295 | traverseDastNode(node.children[i], callbackMap, [...path, i.toString()]);
296 | }
297 | }
298 | }
299 |
```
--------------------------------------------------------------------------------
/integrations/directus/src/api.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { defineOperationApi } from "@directus/extensions-sdk";
2 |
3 | interface Options {
4 | item_id: string;
5 | collection: string;
6 | translation_table: string;
7 | language_table: string;
8 | replexica_api_key: string;
9 | source_language?: string;
10 | target_languages: string[];
11 | }
12 |
13 | interface Context {
14 | services: {
15 | ItemsService: any;
16 | };
17 | getSchema: () => Promise<any>;
18 | }
19 |
20 | interface TranslationResult {
21 | success: boolean;
22 | language: string;
23 | operation?: "updated" | "created";
24 | data?: any;
25 | error?: string;
26 | }
27 |
28 | interface TranslationSummary {
29 | successful: number;
30 | failed: number;
31 | updated: number;
32 | created: number;
33 | details: TranslationResult[];
34 | }
35 |
36 | export default defineOperationApi<Options>({
37 | id: "replexica-integration-directus",
38 | handler: async (
39 | {
40 | item_id,
41 | collection,
42 | translation_table,
43 | language_table,
44 | replexica_api_key,
45 | source_language = "en-US",
46 | target_languages,
47 | },
48 | context: Context,
49 | ) => {
50 | if (!replexica_api_key) {
51 | throw new Error("Replexica API Key not defined");
52 | }
53 |
54 | try {
55 | const { ReplexicaEngine } = await import("@replexica/sdk");
56 | const replexica = new ReplexicaEngine({ apiKey: replexica_api_key });
57 |
58 | const { ItemsService } = context.services;
59 | const schema = await context.getSchema();
60 |
61 | // Initialize services
62 | const languagesService = new ItemsService(language_table, { schema });
63 | const translationsService = new ItemsService(translation_table, {
64 | schema,
65 | });
66 |
67 | // Get the primary key field for the collection
68 | const collection_pk = schema.collections[collection].primary;
69 |
70 | // Get collection fields and their types
71 | const collectionFields = schema.collections[translation_table].fields;
72 |
73 | // Get all existing translations for this item
74 | const existingTranslations = await translationsService.readByQuery({
75 | fields: ["*"],
76 | filter: {
77 | [`${collection}_${collection_pk}`]: { _eq: item_id },
78 | },
79 | });
80 |
81 | const sourceTranslation = existingTranslations.find(
82 | (t: { languages_code: string }) => t.languages_code === source_language,
83 | );
84 | if (!sourceTranslation) {
85 | throw new Error("No source translation found");
86 | }
87 |
88 | // Get target languages
89 | const targetLanguages = await languagesService.readByQuery({
90 | fields: ["code", "name"],
91 | filter:
92 | target_languages && target_languages.length > 0
93 | ? { code: { _in: target_languages } }
94 | : { code: { _neq: source_language } },
95 | });
96 |
97 | if (!targetLanguages.length) {
98 | throw new Error(
99 | target_languages
100 | ? `Target language ${target_languages} not found in language table`
101 | : "No target languages found in table",
102 | );
103 | }
104 |
105 | // Prepare translation template
106 | const translationTemplate = {
107 | ...sourceTranslation,
108 | id: undefined,
109 | languages_code: undefined,
110 | date_created: undefined,
111 | date_updated: undefined,
112 | user_created: undefined,
113 | user_updated: undefined,
114 | };
115 |
116 | // Process translations
117 | const results: TranslationResult[] = await Promise.all(
118 | targetLanguages.map(
119 | async (language: { code: string; name: string }) => {
120 | try {
121 | let translatedData: Record<string, any> = {};
122 | let objectToTranslate: Record<string, any> = {};
123 | let textFields: Array<{ fieldName: string; fieldValue: string }> =
124 | [];
125 |
126 | // Separate fields into text and non-text
127 | for (const [fieldName, fieldValue] of Object.entries(
128 | translationTemplate,
129 | )) {
130 | // Skip if field is null or undefined
131 | if (fieldValue == null) {
132 | translatedData[fieldName] = fieldValue;
133 | continue;
134 | }
135 |
136 | // Skip system fields and non-translatable fields
137 | const fieldSchema = collectionFields[fieldName];
138 | if (!fieldSchema || fieldSchema.system) {
139 | translatedData[fieldName] = fieldValue;
140 | continue;
141 | }
142 |
143 | if (fieldSchema.type === "text") {
144 | textFields.push({
145 | fieldName,
146 | fieldValue: fieldValue as string,
147 | });
148 | } else {
149 | objectToTranslate[fieldName] = fieldValue;
150 | }
151 | }
152 |
153 | // Translate non-text fields in one batch
154 | if (Object.keys(objectToTranslate).length > 0) {
155 | const translatedObject = await replexica.localizeObject(
156 | objectToTranslate,
157 | {
158 | sourceLocale: source_language,
159 | targetLocale: language.code,
160 | },
161 | );
162 | translatedData = { ...translatedData, ...translatedObject };
163 | }
164 |
165 | // Translate text fields individually
166 | for (const { fieldName, fieldValue } of textFields) {
167 | try {
168 | if (isHtml(fieldValue)) {
169 | translatedData[fieldName] = await replexica.localizeHtml(
170 | fieldValue,
171 | {
172 | sourceLocale: source_language,
173 | targetLocale: language.code,
174 | },
175 | );
176 | } else {
177 | translatedData[fieldName] = await replexica.localizeText(
178 | fieldValue,
179 | {
180 | sourceLocale: source_language,
181 | targetLocale: language.code,
182 | },
183 | );
184 | }
185 | } catch (fieldError) {
186 | console.error(
187 | `Error translating field ${fieldName}:`,
188 | fieldError,
189 | );
190 | translatedData[fieldName] = fieldValue; // Keep original value on error
191 | }
192 | }
193 |
194 | // Find existing translation for this language
195 | const existingTranslation = existingTranslations.find(
196 | (t: { languages_code: string }) =>
197 | t.languages_code === language.code,
198 | );
199 |
200 | let result;
201 | if (existingTranslation) {
202 | result = await translationsService.updateOne(
203 | existingTranslation.id,
204 | {
205 | ...translatedData,
206 | languages_code: language.code,
207 | },
208 | );
209 | } else {
210 | result = await translationsService.createOne({
211 | ...translatedData,
212 | languages_code: language.code,
213 | [`${collection}_${collection_pk}`]: item_id,
214 | });
215 | }
216 |
217 | return {
218 | success: true,
219 | language: language.code,
220 | operation: existingTranslation ? "updated" : "created",
221 | data: result,
222 | };
223 | } catch (error) {
224 | return {
225 | success: false,
226 | language: language.code,
227 | error: error instanceof Error ? error.message : "Unknown error",
228 | };
229 | }
230 | },
231 | ),
232 | );
233 |
234 | const requestedLanguages = new Set(target_languages || []);
235 | const missingLanguages =
236 | target_languages?.filter(
237 | (code) =>
238 | !targetLanguages.find(
239 | (lang: { code: string }) => lang.code === code,
240 | ),
241 | ) || [];
242 |
243 | const missingResults: TranslationResult[] = missingLanguages.map(
244 | (code) => ({
245 | success: false,
246 | language: code,
247 | error: `Language ${code} not found in language table`,
248 | }),
249 | );
250 |
251 | const allResults = [...results, ...missingResults];
252 |
253 | const summary: TranslationSummary = {
254 | successful: allResults.filter((r) => r.success).length,
255 | failed: allResults.filter((r) => !r.success).length,
256 | updated: allResults.filter((r) => r.operation === "updated").length,
257 | created: allResults.filter((r) => r.operation === "created").length,
258 | details: allResults,
259 | };
260 |
261 | return summary;
262 | } catch (error) {
263 | throw new Error(
264 | `Translation process failed: ${
265 | error instanceof Error ? error.message : "Unknown error"
266 | }`,
267 | );
268 | }
269 | },
270 | });
271 |
272 | // Helper functions
273 | function isHtml(text: string): boolean {
274 | const htmlRegex = /<[a-z][\s\S]*>/i;
275 | return htmlRegex.test(text);
276 | }
277 |
```
--------------------------------------------------------------------------------
/packages/cli/src/cli/loaders/dato/extract.ts:
--------------------------------------------------------------------------------
```typescript
1 | import _ from "lodash";
2 | import { ILoader } from "../_types";
3 | import { createLoader } from "../_utils";
4 | import { DatoFilterLoaderOutput } from "./filter";
5 | import fs from "fs";
6 | import Z from "zod";
7 |
8 | export type DatoExtractLoaderOutput = {
9 | [modelId: string]: {
10 | [recordId: string]: {
11 | [fieldName: string]: string | Record<string, object>;
12 | };
13 | };
14 | };
15 |
16 | export default function createDatoExtractLoader(): ILoader<
17 | DatoFilterLoaderOutput,
18 | DatoExtractLoaderOutput
19 | > {
20 | return createLoader({
21 | async pull(locale, input) {
22 | const result: DatoExtractLoaderOutput = {};
23 |
24 | for (const [modelId, modelInfo] of _.entries(input)) {
25 | for (const [recordId, record] of _.entries(modelInfo)) {
26 | for (const [fieldName, fieldValue] of _.entries(record)) {
27 | const parsedValue = createParsedDatoValue(fieldValue);
28 | if (parsedValue) {
29 | _.set(result, [modelId, `_${recordId}`, fieldName], parsedValue);
30 | }
31 | }
32 | }
33 | }
34 |
35 | return result;
36 | },
37 | async push(locale, data, originalInput) {
38 | const result = _.cloneDeep(originalInput || {});
39 |
40 | for (const [modelId, modelInfo] of _.entries(data)) {
41 | for (const [virtualRecordId, record] of _.entries(modelInfo)) {
42 | for (const [fieldName, fieldValue] of _.entries(record)) {
43 | const [, recordId] = virtualRecordId.split("_");
44 | const originalFieldValue = _.get(originalInput, [
45 | modelId,
46 | recordId,
47 | fieldName,
48 | ]);
49 | const rawValue = createRawDatoValue(
50 | fieldValue,
51 | originalFieldValue,
52 | true,
53 | );
54 | _.set(
55 | result,
56 | [modelId, recordId, fieldName],
57 | rawValue || originalFieldValue,
58 | );
59 | }
60 | }
61 | }
62 |
63 | return result;
64 | },
65 | });
66 | }
67 |
68 | export type DatoValueRaw = any;
69 | export type DatoValueParsed = any;
70 |
71 | export function detectDatoFieldType(rawDatoValue: DatoValueRaw): string | null {
72 | if (
73 | _.has(rawDatoValue, "document") &&
74 | _.get(rawDatoValue, "schema") === "dast"
75 | ) {
76 | return "structured_text";
77 | } else if (
78 | _.has(rawDatoValue, "no_index") ||
79 | _.has(rawDatoValue, "twitter_card")
80 | ) {
81 | return "seo";
82 | } else if (_.get(rawDatoValue, "type") === "item") {
83 | return "single_block";
84 | } else if (
85 | _.isArray(rawDatoValue) &&
86 | _.every(rawDatoValue, (item) => _.get(item, "type") === "item")
87 | ) {
88 | return "rich_text";
89 | } else if (_isFile(rawDatoValue)) {
90 | return "file";
91 | } else if (
92 | _.isArray(rawDatoValue) &&
93 | _.every(rawDatoValue, (item) => _isFile(item))
94 | ) {
95 | return "gallery";
96 | } else if (_isJson(rawDatoValue)) {
97 | return "json";
98 | } else if (_.isString(rawDatoValue)) {
99 | return "string";
100 | } else if (_isVideo(rawDatoValue)) {
101 | return "video";
102 | } else if (
103 | _.isArray(rawDatoValue) &&
104 | _.every(rawDatoValue, (item) => _.isString(item))
105 | ) {
106 | return "ref_list";
107 | } else {
108 | return null;
109 | }
110 | }
111 |
112 | export function createParsedDatoValue(
113 | rawDatoValue: DatoValueRaw,
114 | ): DatoValueParsed {
115 | const fieldType = detectDatoFieldType(rawDatoValue);
116 | switch (fieldType) {
117 | default:
118 | return rawDatoValue;
119 | case "structured_text":
120 | return serializeStructuredText(rawDatoValue);
121 | case "seo":
122 | return serializeSeo(rawDatoValue);
123 | case "single_block":
124 | return serializeBlock(rawDatoValue);
125 | case "rich_text":
126 | return serializeBlockList(rawDatoValue);
127 | case "json":
128 | return JSON.parse(rawDatoValue);
129 | case "video":
130 | return serializeVideo(rawDatoValue);
131 | case "file":
132 | return serializeFile(rawDatoValue);
133 | case "gallery":
134 | return serializeGallery(rawDatoValue);
135 | case "ref_list":
136 | return null;
137 | }
138 | }
139 |
140 | export function createRawDatoValue(
141 | parsedDatoValue: DatoValueParsed,
142 | originalRawDatoValue: any,
143 | isClean = false,
144 | ): DatoValueRaw {
145 | const fieldType = detectDatoFieldType(originalRawDatoValue);
146 | switch (fieldType) {
147 | default:
148 | return parsedDatoValue;
149 | case "structured_text":
150 | return deserializeStructuredText(parsedDatoValue, originalRawDatoValue);
151 | case "seo":
152 | return deserializeSeo(parsedDatoValue, originalRawDatoValue);
153 | case "single_block":
154 | return deserializeBlock(parsedDatoValue, originalRawDatoValue, isClean);
155 | case "rich_text":
156 | return deserializeBlockList(
157 | parsedDatoValue,
158 | originalRawDatoValue,
159 | isClean,
160 | );
161 | case "json":
162 | return JSON.stringify(parsedDatoValue, null, 2);
163 | case "video":
164 | return deserializeVideo(parsedDatoValue, originalRawDatoValue);
165 | case "file":
166 | return deserializeFile(parsedDatoValue, originalRawDatoValue);
167 | case "gallery":
168 | return deserializeGallery(parsedDatoValue, originalRawDatoValue);
169 | case "ref_list":
170 | return originalRawDatoValue;
171 | }
172 | }
173 |
174 | function serializeStructuredText(rawStructuredText: any) {
175 | return serializeStructuredTextNode(rawStructuredText);
176 | // Encapsulates helper function args
177 | function serializeStructuredTextNode(
178 | node: any,
179 | path: string[] = [],
180 | acc: Record<string, any> = {},
181 | ) {
182 | if ("document" in node) {
183 | return serializeStructuredTextNode(
184 | node.document,
185 | [...path, "document"],
186 | acc,
187 | );
188 | }
189 |
190 | if (!_.isNil(node.value)) {
191 | acc[[...path, "value"].join(".")] = node.value;
192 | } else if (_.get(node, "type") === "block") {
193 | acc[[...path, "item"].join(".")] = serializeBlock(node.item);
194 | }
195 |
196 | if (node.children) {
197 | for (let i = 0; i < node.children.length; i++) {
198 | serializeStructuredTextNode(
199 | node.children[i],
200 | [...path, i.toString()],
201 | acc,
202 | );
203 | }
204 | }
205 |
206 | return acc;
207 | }
208 | }
209 |
210 | function serializeSeo(rawSeo: any) {
211 | return _.chain(rawSeo).pick(["title", "description"]).value();
212 | }
213 |
214 | function serializeBlock(rawBlock: any) {
215 | if (_.get(rawBlock, "type") === "item" && _.has(rawBlock, "id")) {
216 | return serializeBlock(rawBlock.attributes);
217 | }
218 |
219 | const result: Record<string, any> = {};
220 | for (const [attributeName, attributeValue] of _.entries(rawBlock)) {
221 | result[attributeName] = createParsedDatoValue(attributeValue);
222 | }
223 |
224 | return result;
225 | }
226 |
227 | function serializeBlockList(rawBlockList: any) {
228 | return _.chain(rawBlockList)
229 | .map((block) => serializeBlock(block))
230 | .value();
231 | }
232 |
233 | function serializeVideo(rawVideo: any) {
234 | return _.chain(rawVideo).pick(["title"]).value();
235 | }
236 |
237 | function serializeFile(rawFile: any) {
238 | return _.chain(rawFile).pick(["alt", "title"]).value();
239 | }
240 |
241 | function serializeGallery(rawGallery: any) {
242 | return _.chain(rawGallery)
243 | .map((item) => serializeFile(item))
244 | .value();
245 | }
246 |
247 | function deserializeFile(parsedFile: any, originalRawFile: any) {
248 | return _.chain(parsedFile).defaults(originalRawFile).value();
249 | }
250 |
251 | function deserializeGallery(parsedGallery: any, originalRawGallery: any) {
252 | return _.chain(parsedGallery)
253 | .map((item, i) => deserializeFile(item, originalRawGallery[i]))
254 | .value();
255 | }
256 |
257 | function deserializeVideo(parsedVideo: any, originalRawVideo: any) {
258 | return _.chain(parsedVideo).defaults(originalRawVideo).value();
259 | }
260 |
261 | function deserializeBlock(payload: any, rawNode: any, isClean = false) {
262 | const result = _.cloneDeep(rawNode);
263 |
264 | for (const [attributeName, attributeValue] of _.entries(rawNode.attributes)) {
265 | const rawValue = createRawDatoValue(
266 | payload[attributeName],
267 | attributeValue,
268 | isClean,
269 | );
270 | _.set(result, ["attributes", attributeName], rawValue);
271 | }
272 |
273 | if (isClean) {
274 | delete result["id"];
275 | }
276 |
277 | return result;
278 | }
279 |
280 | function deserializeSeo(parsedSeo: any, originalRawSeo: any) {
281 | return _.chain(parsedSeo)
282 | .pick(["title", "description"])
283 | .defaults(originalRawSeo)
284 | .value();
285 | }
286 |
287 | function deserializeBlockList(
288 | parsedBlockList: any,
289 | originalRawBlockList: any,
290 | isClean = false,
291 | ) {
292 | return _.chain(parsedBlockList)
293 | .map((block, i) =>
294 | deserializeBlock(block, originalRawBlockList[i], isClean),
295 | )
296 | .value();
297 | }
298 |
299 | function deserializeStructuredText(
300 | parsedStructuredText: Record<string, string>,
301 | originalRawStructuredText: any,
302 | ) {
303 | const result = _.cloneDeep(originalRawStructuredText);
304 |
305 | for (const [path, value] of _.entries(parsedStructuredText)) {
306 | const realPath = _.chain(path.split("."))
307 | .flatMap((s) => (!_.isNaN(_.toNumber(s)) ? ["children", s] : s))
308 | .value();
309 | const deserializedValue = createRawDatoValue(
310 | value,
311 | _.get(originalRawStructuredText, realPath),
312 | true,
313 | );
314 | _.set(result, realPath, deserializedValue);
315 | }
316 |
317 | return result;
318 | }
319 |
320 | function _isJson(rawDatoValue: DatoValueRaw): boolean {
321 | try {
322 | return (
323 | _.isString(rawDatoValue) &&
324 | rawDatoValue.startsWith("{") &&
325 | rawDatoValue.endsWith("}") &&
326 | !!JSON.parse(rawDatoValue)
327 | );
328 | } catch (e) {
329 | return false;
330 | }
331 | }
332 |
333 | function _isFile(rawDatoValue: DatoValueRaw): boolean {
334 | return (
335 | _.isObject(rawDatoValue) &&
336 | ["alt", "title", "custom_data", "focal_point", "upload_id"].every((key) =>
337 | _.has(rawDatoValue, key),
338 | )
339 | );
340 | }
341 |
342 | function _isVideo(rawDatoValue: DatoValueRaw): boolean {
343 | return (
344 | _.isObject(rawDatoValue) &&
345 | [
346 | "url",
347 | "title",
348 | "width",
349 | "height",
350 | "provider",
351 | "provider_uid",
352 | "thumbnail_url",
353 | ].every((key) => _.has(rawDatoValue, key))
354 | );
355 | }
356 |
```
--------------------------------------------------------------------------------
/packages/locales/src/names/index.spec.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, it, expect, vi, beforeEach } from "vitest";
2 | import { getCountryName, getLanguageName, getScriptName } from "./index";
3 |
4 | // Mock the loader functions
5 | vi.mock("./loader", () => ({
6 | loadTerritoryNames: vi.fn(),
7 | loadLanguageNames: vi.fn(),
8 | loadScriptNames: vi.fn(),
9 | }));
10 |
11 | import {
12 | loadTerritoryNames,
13 | loadLanguageNames,
14 | loadScriptNames,
15 | } from "./loader";
16 |
17 | const mockLoadTerritoryNames = loadTerritoryNames as ReturnType<typeof vi.fn>;
18 | const mockLoadLanguageNames = loadLanguageNames as ReturnType<typeof vi.fn>;
19 | const mockLoadScriptNames = loadScriptNames as ReturnType<typeof vi.fn>;
20 |
21 | describe("getCountryName", () => {
22 | beforeEach(() => {
23 | vi.clearAllMocks();
24 | });
25 |
26 | it("should get country name in English by default", async () => {
27 | mockLoadTerritoryNames.mockResolvedValue({
28 | US: "United States",
29 | CN: "China",
30 | DE: "Germany",
31 | });
32 |
33 | const result = await getCountryName("US");
34 |
35 | expect(result).toBe("United States");
36 | expect(mockLoadTerritoryNames).toHaveBeenCalledWith("en");
37 | });
38 |
39 | it("should get country name in Spanish", async () => {
40 | mockLoadTerritoryNames.mockResolvedValue({
41 | US: "Estados Unidos",
42 | CN: "China",
43 | DE: "Alemania",
44 | });
45 |
46 | const result = await getCountryName("US", "es");
47 |
48 | expect(result).toBe("Estados Unidos");
49 | expect(mockLoadTerritoryNames).toHaveBeenCalledWith("es");
50 | });
51 |
52 | it("should normalize country code to uppercase", async () => {
53 | mockLoadTerritoryNames.mockResolvedValue({
54 | US: "United States",
55 | CN: "China",
56 | });
57 |
58 | const result = await getCountryName("us");
59 |
60 | expect(result).toBe("United States");
61 | expect(mockLoadTerritoryNames).toHaveBeenCalledWith("en");
62 | });
63 |
64 | it("should throw error for empty country code", async () => {
65 | await expect(getCountryName("")).rejects.toThrow(
66 | "Country code is required",
67 | );
68 | expect(mockLoadTerritoryNames).not.toHaveBeenCalled();
69 | });
70 |
71 | it("should throw error for null country code", async () => {
72 | await expect(getCountryName(null as any)).rejects.toThrow(
73 | "Country code is required",
74 | );
75 | expect(mockLoadTerritoryNames).not.toHaveBeenCalled();
76 | });
77 |
78 | it("should throw error for undefined country code", async () => {
79 | await expect(getCountryName(undefined as any)).rejects.toThrow(
80 | "Country code is required",
81 | );
82 | expect(mockLoadTerritoryNames).not.toHaveBeenCalled();
83 | });
84 |
85 | it("should throw error for unknown country code", async () => {
86 | mockLoadTerritoryNames.mockResolvedValue({
87 | US: "United States",
88 | CN: "China",
89 | });
90 |
91 | await expect(getCountryName("XX")).rejects.toThrow(
92 | 'Country code "XX" not found',
93 | );
94 | });
95 |
96 | it("should handle loader errors", async () => {
97 | mockLoadTerritoryNames.mockRejectedValue(new Error("Failed to load data"));
98 |
99 | await expect(getCountryName("US")).rejects.toThrow("Failed to load data");
100 | });
101 | });
102 |
103 | describe("getLanguageName", () => {
104 | beforeEach(() => {
105 | vi.clearAllMocks();
106 | });
107 |
108 | it("should get language name in English by default", async () => {
109 | mockLoadLanguageNames.mockResolvedValue({
110 | en: "English",
111 | es: "Spanish",
112 | zh: "Chinese",
113 | });
114 |
115 | const result = await getLanguageName("en");
116 |
117 | expect(result).toBe("English");
118 | expect(mockLoadLanguageNames).toHaveBeenCalledWith("en");
119 | });
120 |
121 | it("should get language name in Spanish", async () => {
122 | mockLoadLanguageNames.mockResolvedValue({
123 | en: "inglés",
124 | es: "español",
125 | zh: "chino",
126 | });
127 |
128 | const result = await getLanguageName("en", "es");
129 |
130 | expect(result).toBe("inglés");
131 | expect(mockLoadLanguageNames).toHaveBeenCalledWith("es");
132 | });
133 |
134 | it("should normalize language code to lowercase", async () => {
135 | mockLoadLanguageNames.mockResolvedValue({
136 | en: "English",
137 | es: "Spanish",
138 | });
139 |
140 | const result = await getLanguageName("EN");
141 |
142 | expect(result).toBe("English");
143 | expect(mockLoadLanguageNames).toHaveBeenCalledWith("en");
144 | });
145 |
146 | it("should throw error for empty language code", async () => {
147 | await expect(getLanguageName("")).rejects.toThrow(
148 | "Language code is required",
149 | );
150 | expect(mockLoadLanguageNames).not.toHaveBeenCalled();
151 | });
152 |
153 | it("should throw error for null language code", async () => {
154 | await expect(getLanguageName(null as any)).rejects.toThrow(
155 | "Language code is required",
156 | );
157 | expect(mockLoadLanguageNames).not.toHaveBeenCalled();
158 | });
159 |
160 | it("should throw error for undefined language code", async () => {
161 | await expect(getLanguageName(undefined as any)).rejects.toThrow(
162 | "Language code is required",
163 | );
164 | expect(mockLoadLanguageNames).not.toHaveBeenCalled();
165 | });
166 |
167 | it("should throw error for unknown language code", async () => {
168 | mockLoadLanguageNames.mockResolvedValue({
169 | en: "English",
170 | es: "Spanish",
171 | });
172 |
173 | await expect(getLanguageName("xx")).rejects.toThrow(
174 | 'Language code "xx" not found',
175 | );
176 | });
177 |
178 | it("should handle loader errors", async () => {
179 | mockLoadLanguageNames.mockRejectedValue(new Error("Failed to load data"));
180 |
181 | await expect(getLanguageName("en")).rejects.toThrow("Failed to load data");
182 | });
183 | });
184 |
185 | describe("getScriptName", () => {
186 | beforeEach(() => {
187 | vi.clearAllMocks();
188 | });
189 |
190 | it("should get script name in English by default", async () => {
191 | mockLoadScriptNames.mockResolvedValue({
192 | Latn: "Latin",
193 | Cyrl: "Cyrillic",
194 | Hans: "Simplified",
195 | Hant: "Traditional",
196 | });
197 |
198 | const result = await getScriptName("Latn");
199 |
200 | expect(result).toBe("Latin");
201 | expect(mockLoadScriptNames).toHaveBeenCalledWith("en");
202 | });
203 |
204 | it("should get script name in Spanish", async () => {
205 | mockLoadScriptNames.mockResolvedValue({
206 | Latn: "latino",
207 | Cyrl: "cirílico",
208 | Hans: "simplificado",
209 | Hant: "tradicional",
210 | });
211 |
212 | const result = await getScriptName("Hans", "es");
213 |
214 | expect(result).toBe("simplificado");
215 | expect(mockLoadScriptNames).toHaveBeenCalledWith("es");
216 | });
217 |
218 | it("should preserve script code case", async () => {
219 | mockLoadScriptNames.mockResolvedValue({
220 | Latn: "Latin",
221 | CYRL: "Cyrillic", // Note: some script codes might be uppercase
222 | hans: "Simplified", // Note: some might be lowercase
223 | });
224 |
225 | const result1 = await getScriptName("Latn");
226 | const result2 = await getScriptName("CYRL");
227 | const result3 = await getScriptName("hans");
228 |
229 | expect(result1).toBe("Latin");
230 | expect(result2).toBe("Cyrillic");
231 | expect(result3).toBe("Simplified");
232 | });
233 |
234 | it("should throw error for empty script code", async () => {
235 | await expect(getScriptName("")).rejects.toThrow("Script code is required");
236 | expect(mockLoadScriptNames).not.toHaveBeenCalled();
237 | });
238 |
239 | it("should throw error for null script code", async () => {
240 | await expect(getScriptName(null as any)).rejects.toThrow(
241 | "Script code is required",
242 | );
243 | expect(mockLoadScriptNames).not.toHaveBeenCalled();
244 | });
245 |
246 | it("should throw error for undefined script code", async () => {
247 | await expect(getScriptName(undefined as any)).rejects.toThrow(
248 | "Script code is required",
249 | );
250 | expect(mockLoadScriptNames).not.toHaveBeenCalled();
251 | });
252 |
253 | it("should throw error for unknown script code", async () => {
254 | mockLoadScriptNames.mockResolvedValue({
255 | Latn: "Latin",
256 | Cyrl: "Cyrillic",
257 | });
258 |
259 | await expect(getScriptName("Xxxx")).rejects.toThrow(
260 | 'Script code "Xxxx" not found',
261 | );
262 | });
263 |
264 | it("should handle loader errors", async () => {
265 | mockLoadScriptNames.mockRejectedValue(new Error("Failed to load data"));
266 |
267 | await expect(getScriptName("Latn")).rejects.toThrow("Failed to load data");
268 | });
269 | });
270 |
271 | describe("Integration scenarios", () => {
272 | beforeEach(() => {
273 | vi.clearAllMocks();
274 | });
275 |
276 | it("should handle multiple languages for the same code", async () => {
277 | // Mock different responses for different languages
278 | mockLoadTerritoryNames
279 | .mockResolvedValueOnce({ US: "United States" }) // en
280 | .mockResolvedValueOnce({ US: "Estados Unidos" }) // es
281 | .mockResolvedValueOnce({ US: "États-Unis" }); // fr
282 |
283 | const result1 = await getCountryName("US", "en");
284 | const result2 = await getCountryName("US", "es");
285 | const result3 = await getCountryName("US", "fr");
286 |
287 | expect(result1).toBe("United States");
288 | expect(result2).toBe("Estados Unidos");
289 | expect(result3).toBe("États-Unis");
290 |
291 | expect(mockLoadTerritoryNames).toHaveBeenCalledTimes(3);
292 | expect(mockLoadTerritoryNames).toHaveBeenNthCalledWith(1, "en");
293 | expect(mockLoadTerritoryNames).toHaveBeenNthCalledWith(2, "es");
294 | expect(mockLoadTerritoryNames).toHaveBeenNthCalledWith(3, "fr");
295 | });
296 |
297 | it("should handle Chinese language names", async () => {
298 | mockLoadLanguageNames.mockResolvedValue({
299 | en: "英语",
300 | es: "西班牙语",
301 | fr: "法语",
302 | });
303 |
304 | const result1 = await getLanguageName("en", "zh");
305 | const result2 = await getLanguageName("es", "zh");
306 | const result3 = await getLanguageName("fr", "zh");
307 |
308 | expect(result1).toBe("英语");
309 | expect(result2).toBe("西班牙语");
310 | expect(result3).toBe("法语");
311 | });
312 |
313 | it("should handle script names with variants", async () => {
314 | mockLoadScriptNames.mockResolvedValue({
315 | Hans: "Simplified Han",
316 | Hant: "Traditional Han",
317 | Latn: "Latin",
318 | Cyrl: "Cyrillic",
319 | });
320 |
321 | const result1 = await getScriptName("Hans");
322 | const result2 = await getScriptName("Hant");
323 | const result3 = await getScriptName("Latn");
324 |
325 | expect(result1).toBe("Simplified Han");
326 | expect(result2).toBe("Traditional Han");
327 | expect(result3).toBe("Latin");
328 | });
329 | });
330 |
```
--------------------------------------------------------------------------------
/packages/locales/src/validation.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { LOCALE_REGEX } from "./constants";
2 |
3 | /**
4 | * Validation functions for locale codes and components
5 | */
6 |
7 | // ISO 639-1 language codes (most common)
8 | const VALID_LANGUAGE_CODES = new Set([
9 | "aa",
10 | "ab",
11 | "ae",
12 | "af",
13 | "ak",
14 | "am",
15 | "an",
16 | "ar",
17 | "as",
18 | "av",
19 | "ay",
20 | "az",
21 | "ba",
22 | "be",
23 | "bg",
24 | "bh",
25 | "bi",
26 | "bm",
27 | "bn",
28 | "bo",
29 | "br",
30 | "bs",
31 | "ca",
32 | "ce",
33 | "ch",
34 | "co",
35 | "cr",
36 | "cs",
37 | "cu",
38 | "cv",
39 | "cy",
40 | "da",
41 | "de",
42 | "dv",
43 | "dz",
44 | "ee",
45 | "el",
46 | "en",
47 | "eo",
48 | "es",
49 | "et",
50 | "eu",
51 | "fa",
52 | "ff",
53 | "fi",
54 | "fj",
55 | "fo",
56 | "fr",
57 | "fy",
58 | "ga",
59 | "gd",
60 | "gl",
61 | "gn",
62 | "gu",
63 | "gv",
64 | "ha",
65 | "he",
66 | "hi",
67 | "ho",
68 | "hr",
69 | "ht",
70 | "hu",
71 | "hy",
72 | "hz",
73 | "ia",
74 | "id",
75 | "ie",
76 | "ig",
77 | "ii",
78 | "ik",
79 | "io",
80 | "is",
81 | "it",
82 | "iu",
83 | "ja",
84 | "jv",
85 | "ka",
86 | "kg",
87 | "ki",
88 | "kj",
89 | "kk",
90 | "kl",
91 | "km",
92 | "kn",
93 | "ko",
94 | "kr",
95 | "ks",
96 | "ku",
97 | "kv",
98 | "kw",
99 | "ky",
100 | "la",
101 | "lb",
102 | "lg",
103 | "li",
104 | "ln",
105 | "lo",
106 | "lt",
107 | "lu",
108 | "lv",
109 | "mg",
110 | "mh",
111 | "mi",
112 | "mk",
113 | "ml",
114 | "mn",
115 | "mr",
116 | "ms",
117 | "mt",
118 | "my",
119 | "na",
120 | "nb",
121 | "nd",
122 | "ne",
123 | "ng",
124 | "nl",
125 | "nn",
126 | "no",
127 | "nr",
128 | "nv",
129 | "ny",
130 | "oc",
131 | "oj",
132 | "om",
133 | "or",
134 | "os",
135 | "pa",
136 | "pi",
137 | "pl",
138 | "ps",
139 | "pt",
140 | "qu",
141 | "rm",
142 | "rn",
143 | "ro",
144 | "ru",
145 | "rw",
146 | "sa",
147 | "sc",
148 | "sd",
149 | "se",
150 | "sg",
151 | "si",
152 | "sk",
153 | "sl",
154 | "sm",
155 | "sn",
156 | "so",
157 | "sq",
158 | "sr",
159 | "ss",
160 | "st",
161 | "su",
162 | "sv",
163 | "sw",
164 | "ta",
165 | "te",
166 | "tg",
167 | "th",
168 | "ti",
169 | "tk",
170 | "tl",
171 | "tn",
172 | "to",
173 | "tr",
174 | "ts",
175 | "tt",
176 | "tw",
177 | "ty",
178 | "ug",
179 | "uk",
180 | "ur",
181 | "uz",
182 | "ve",
183 | "vi",
184 | "vo",
185 | "wa",
186 | "wo",
187 | "xh",
188 | "yi",
189 | "yo",
190 | "za",
191 | "zh",
192 | "zu",
193 | ]);
194 |
195 | // ISO 15924 script codes (most common)
196 | const VALID_SCRIPT_CODES = new Set([
197 | "Adlm",
198 | "Afak",
199 | "Aghb",
200 | "Ahom",
201 | "Arab",
202 | "Aran",
203 | "Armi",
204 | "Armn",
205 | "Avst",
206 | "Bali",
207 | "Bamu",
208 | "Bass",
209 | "Batk",
210 | "Beng",
211 | "Bhks",
212 | "Blis",
213 | "Bopo",
214 | "Brah",
215 | "Brai",
216 | "Bugi",
217 | "Buhd",
218 | "Cakm",
219 | "Cans",
220 | "Cari",
221 | "Cham",
222 | "Cher",
223 | "Chrs",
224 | "Cirt",
225 | "Copt",
226 | "Cpmn",
227 | "Cprt",
228 | "Cyrl",
229 | "Cyrs",
230 | "Deva",
231 | "Diak",
232 | "Dogr",
233 | "Dsrt",
234 | "Dupl",
235 | "Egyd",
236 | "Egyh",
237 | "Egyp",
238 | "Elba",
239 | "Elym",
240 | "Ethi",
241 | "Gara",
242 | "Gong",
243 | "Gonm",
244 | "Goth",
245 | "Gran",
246 | "Grek",
247 | "Gujr",
248 | "Guru",
249 | "Hanb",
250 | "Hang",
251 | "Hani",
252 | "Hano",
253 | "Hans",
254 | "Hant",
255 | "Hatr",
256 | "Hebr",
257 | "Hira",
258 | "Hluw",
259 | "Hmng",
260 | "Hmnp",
261 | "Hrkt",
262 | "Hung",
263 | "Inds",
264 | "Ital",
265 | "Jamo",
266 | "Java",
267 | "Jpan",
268 | "Jurc",
269 | "Kali",
270 | "Kana",
271 | "Khar",
272 | "Khmr",
273 | "Khoj",
274 | "Kits",
275 | "Knda",
276 | "Kore",
277 | "Kpel",
278 | "Kthi",
279 | "Lana",
280 | "Laoo",
281 | "Latf",
282 | "Latg",
283 | "Latn",
284 | "Leke",
285 | "Lepc",
286 | "Limb",
287 | "Lina",
288 | "Linb",
289 | "Lisu",
290 | "Loma",
291 | "Lyci",
292 | "Lydi",
293 | "Mahj",
294 | "Maka",
295 | "Mand",
296 | "Mani",
297 | "Marc",
298 | "Maya",
299 | "Medf",
300 | "Mend",
301 | "Merc",
302 | "Mero",
303 | "Mlym",
304 | "Modi",
305 | "Mong",
306 | "Moon",
307 | "Mroo",
308 | "Mtei",
309 | "Mult",
310 | "Mymr",
311 | "Nand",
312 | "Narb",
313 | "Nbat",
314 | "Newa",
315 | "Nkgb",
316 | "Nkoo",
317 | "Nshu",
318 | "Ogam",
319 | "Olck",
320 | "Orkh",
321 | "Orya",
322 | "Osge",
323 | "Osma",
324 | "Ougr",
325 | "Palm",
326 | "Pauc",
327 | "Perm",
328 | "Phag",
329 | "Phli",
330 | "Phlp",
331 | "Phlv",
332 | "Phnx",
333 | "Plrd",
334 | "Prti",
335 | "Qaaa",
336 | "Qabx",
337 | "Rjng",
338 | "Rohg",
339 | "Roro",
340 | "Runr",
341 | "Samr",
342 | "Sara",
343 | "Sarb",
344 | "Saur",
345 | "Sgnw",
346 | "Shaw",
347 | "Shrd",
348 | "Shui",
349 | "Sidd",
350 | "Sind",
351 | "Sinh",
352 | "Sogd",
353 | "Sogo",
354 | "Sora",
355 | "Soyo",
356 | "Sund",
357 | "Sylo",
358 | "Syrc",
359 | "Syre",
360 | "Syrj",
361 | "Syrn",
362 | "Tagb",
363 | "Takr",
364 | "Tale",
365 | "Talu",
366 | "Taml",
367 | "Tang",
368 | "Tavt",
369 | "Telu",
370 | "Teng",
371 | "Tfng",
372 | "Tglg",
373 | "Thaa",
374 | "Thai",
375 | "Tibt",
376 | "Tirh",
377 | "Ugar",
378 | "Vaii",
379 | "Visp",
380 | "Wara",
381 | "Wcho",
382 | "Wole",
383 | "Xpeo",
384 | "Xsux",
385 | "Yezi",
386 | "Yiii",
387 | "Zanb",
388 | "Zinh",
389 | "Zmth",
390 | "Zsye",
391 | "Zsym",
392 | "Zxxx",
393 | "Zyyy",
394 | "Zzzz",
395 | ]);
396 |
397 | // ISO 3166-1 alpha-2 country codes (most common)
398 | const VALID_REGION_CODES = new Set([
399 | "AD",
400 | "AE",
401 | "AF",
402 | "AG",
403 | "AI",
404 | "AL",
405 | "AM",
406 | "AO",
407 | "AQ",
408 | "AR",
409 | "AS",
410 | "AT",
411 | "AU",
412 | "AW",
413 | "AX",
414 | "AZ",
415 | "BA",
416 | "BB",
417 | "BD",
418 | "BE",
419 | "BF",
420 | "BG",
421 | "BH",
422 | "BI",
423 | "BJ",
424 | "BL",
425 | "BM",
426 | "BN",
427 | "BO",
428 | "BQ",
429 | "BR",
430 | "BS",
431 | "BT",
432 | "BV",
433 | "BW",
434 | "BY",
435 | "BZ",
436 | "CA",
437 | "CC",
438 | "CD",
439 | "CF",
440 | "CG",
441 | "CH",
442 | "CI",
443 | "CK",
444 | "CL",
445 | "CM",
446 | "CN",
447 | "CO",
448 | "CR",
449 | "CU",
450 | "CV",
451 | "CW",
452 | "CX",
453 | "CY",
454 | "CZ",
455 | "DE",
456 | "DJ",
457 | "DK",
458 | "DM",
459 | "DO",
460 | "DZ",
461 | "EC",
462 | "EE",
463 | "EG",
464 | "EH",
465 | "ER",
466 | "ES",
467 | "ET",
468 | "FI",
469 | "FJ",
470 | "FK",
471 | "FM",
472 | "FO",
473 | "FR",
474 | "GA",
475 | "GB",
476 | "GD",
477 | "GE",
478 | "GF",
479 | "GG",
480 | "GH",
481 | "GI",
482 | "GL",
483 | "GM",
484 | "GN",
485 | "GP",
486 | "GQ",
487 | "GR",
488 | "GS",
489 | "GT",
490 | "GU",
491 | "GW",
492 | "GY",
493 | "HK",
494 | "HM",
495 | "HN",
496 | "HR",
497 | "HT",
498 | "HU",
499 | "ID",
500 | "IE",
501 | "IL",
502 | "IM",
503 | "IN",
504 | "IO",
505 | "IQ",
506 | "IR",
507 | "IS",
508 | "IT",
509 | "JE",
510 | "JM",
511 | "JO",
512 | "JP",
513 | "KE",
514 | "KG",
515 | "KH",
516 | "KI",
517 | "KM",
518 | "KN",
519 | "KP",
520 | "KR",
521 | "KW",
522 | "KY",
523 | "KZ",
524 | "LA",
525 | "LB",
526 | "LC",
527 | "LI",
528 | "LK",
529 | "LR",
530 | "LS",
531 | "LT",
532 | "LU",
533 | "LV",
534 | "LY",
535 | "MA",
536 | "MC",
537 | "MD",
538 | "ME",
539 | "MF",
540 | "MG",
541 | "MH",
542 | "MK",
543 | "ML",
544 | "MM",
545 | "MN",
546 | "MO",
547 | "MP",
548 | "MQ",
549 | "MR",
550 | "MS",
551 | "MT",
552 | "MU",
553 | "MV",
554 | "MW",
555 | "MX",
556 | "MY",
557 | "MZ",
558 | "NA",
559 | "NC",
560 | "NE",
561 | "NF",
562 | "NG",
563 | "NI",
564 | "NL",
565 | "NO",
566 | "NP",
567 | "NR",
568 | "NU",
569 | "NZ",
570 | "OM",
571 | "PA",
572 | "PE",
573 | "PF",
574 | "PG",
575 | "PH",
576 | "PK",
577 | "PL",
578 | "PM",
579 | "PN",
580 | "PR",
581 | "PS",
582 | "PT",
583 | "PW",
584 | "PY",
585 | "QA",
586 | "RE",
587 | "RO",
588 | "RS",
589 | "RU",
590 | "RW",
591 | "SA",
592 | "SB",
593 | "SC",
594 | "SD",
595 | "SE",
596 | "SG",
597 | "SH",
598 | "SI",
599 | "SJ",
600 | "SK",
601 | "SL",
602 | "SM",
603 | "SN",
604 | "SO",
605 | "SR",
606 | "SS",
607 | "ST",
608 | "SV",
609 | "SX",
610 | "SY",
611 | "SZ",
612 | "TC",
613 | "TD",
614 | "TF",
615 | "TG",
616 | "TH",
617 | "TJ",
618 | "TK",
619 | "TL",
620 | "TM",
621 | "TN",
622 | "TO",
623 | "TR",
624 | "TT",
625 | "TV",
626 | "TW",
627 | "TZ",
628 | "UA",
629 | "UG",
630 | "UM",
631 | "US",
632 | "UY",
633 | "UZ",
634 | "VA",
635 | "VC",
636 | "VE",
637 | "VG",
638 | "VI",
639 | "VN",
640 | "VU",
641 | "WF",
642 | "WS",
643 | "YE",
644 | "YT",
645 | "ZA",
646 | "ZM",
647 | "ZW",
648 | ]);
649 |
650 | // UN M.49 numeric region codes (most common)
651 | const VALID_NUMERIC_REGION_CODES = new Set([
652 | "001",
653 | "002",
654 | "003",
655 | "005",
656 | "009",
657 | "010",
658 | "011",
659 | "013",
660 | "014",
661 | "015",
662 | "017",
663 | "018",
664 | "019",
665 | "021",
666 | "029",
667 | "030",
668 | "034",
669 | "035",
670 | "039",
671 | "053",
672 | "054",
673 | "057",
674 | "061",
675 | "142",
676 | "143",
677 | "145",
678 | "150",
679 | "151",
680 | "154",
681 | "155",
682 | "202",
683 | "419",
684 | "AC",
685 | "BL",
686 | "BQ",
687 | "BV",
688 | "CP",
689 | "CW",
690 | "DG",
691 | "EA",
692 | "EU",
693 | "EZ",
694 | "FK",
695 | "FO",
696 | "GF",
697 | "GG",
698 | "GI",
699 | "GL",
700 | "GP",
701 | "GS",
702 | "GU",
703 | "HM",
704 | "IC",
705 | "IM",
706 | "IO",
707 | "JE",
708 | "KY",
709 | "MF",
710 | "MH",
711 | "MO",
712 | "MP",
713 | "MQ",
714 | "MS",
715 | "NC",
716 | "NF",
717 | "PF",
718 | "PM",
719 | "PN",
720 | "PR",
721 | "PS",
722 | "RE",
723 | "SH",
724 | "SJ",
725 | "SX",
726 | "TC",
727 | "TF",
728 | "TK",
729 | "TL",
730 | "UM",
731 | "VA",
732 | "VC",
733 | "VG",
734 | "VI",
735 | "WF",
736 | "YT",
737 | ]);
738 |
739 | /**
740 | * Checks if a locale string is properly formatted and uses real codes
741 | *
742 | * @param locale - The locale string to validate
743 | * @returns true if the locale is valid, false otherwise
744 | *
745 | * @example
746 | * ```typescript
747 | * isValidLocale("en-US"); // true
748 | * isValidLocale("en_US"); // true
749 | * isValidLocale("zh-Hans-CN"); // true
750 | * isValidLocale("invalid"); // false
751 | * isValidLocale("en-FAKE"); // false
752 | * isValidLocale("xyz-US"); // false
753 | * ```
754 | */
755 | export function isValidLocale(locale: string): boolean {
756 | if (typeof locale !== "string" || !locale.trim()) {
757 | return false;
758 | }
759 |
760 | try {
761 | const match = locale.match(LOCALE_REGEX);
762 | if (!match) {
763 | return false;
764 | }
765 |
766 | const [, language, script, region] = match;
767 |
768 | // Validate language code
769 | if (!isValidLanguageCode(language)) {
770 | return false;
771 | }
772 |
773 | // Validate script code if present
774 | if (script && !isValidScriptCode(script)) {
775 | return false;
776 | }
777 |
778 | // Validate region code if present
779 | if (region && !isValidRegionCode(region)) {
780 | return false;
781 | }
782 |
783 | return true;
784 | } catch {
785 | return false;
786 | }
787 | }
788 |
789 | /**
790 | * Checks if a language code is valid
791 | *
792 | * @param code - The language code to validate
793 | * @returns true if the language code is valid, false otherwise
794 | *
795 | * @example
796 | * ```typescript
797 | * isValidLanguageCode("en"); // true
798 | * isValidLanguageCode("zh"); // true
799 | * isValidLanguageCode("es"); // true
800 | * isValidLanguageCode("xyz"); // false
801 | * isValidLanguageCode("fake"); // false
802 | * ```
803 | */
804 | export function isValidLanguageCode(code: string): boolean {
805 | if (typeof code !== "string" || !code.trim()) {
806 | return false;
807 | }
808 | return VALID_LANGUAGE_CODES.has(code.toLowerCase());
809 | }
810 |
811 | /**
812 | * Checks if a script code is valid
813 | *
814 | * @param code - The script code to validate
815 | * @returns true if the script code is valid, false otherwise
816 | *
817 | * @example
818 | * ```typescript
819 | * isValidScriptCode("Hans"); // true (Simplified Chinese)
820 | * isValidScriptCode("Hant"); // true (Traditional Chinese)
821 | * isValidScriptCode("Latn"); // true (Latin alphabet)
822 | * isValidScriptCode("Cyrl"); // true (Cyrillic)
823 | * isValidScriptCode("Fake"); // false
824 | * ```
825 | */
826 | export function isValidScriptCode(code: string): boolean {
827 | if (typeof code !== "string" || !code.trim()) {
828 | return false;
829 | }
830 | return VALID_SCRIPT_CODES.has(code);
831 | }
832 |
833 | /**
834 | * Checks if a region/country code is valid
835 | *
836 | * @param code - The region code to validate
837 | * @returns true if the region code is valid, false otherwise
838 | *
839 | * @example
840 | * ```typescript
841 | * isValidRegionCode("US"); // true
842 | * isValidRegionCode("CN"); // true
843 | * isValidRegionCode("GB"); // true
844 | * isValidRegionCode("ZZ"); // false
845 | * isValidRegionCode("FAKE"); // false
846 | * ```
847 | */
848 | export function isValidRegionCode(code: string): boolean {
849 | if (typeof code !== "string" || !code.trim()) {
850 | return false;
851 | }
852 |
853 | const upperCode = code.toUpperCase();
854 | return (
855 | VALID_REGION_CODES.has(upperCode) ||
856 | VALID_NUMERIC_REGION_CODES.has(upperCode)
857 | );
858 | }
859 |
```
--------------------------------------------------------------------------------
/scripts/docs/src/json-schema/parser.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, expect, it } from "vitest";
2 | import {
3 | parseProperty,
4 | parseSchema,
5 | resolveRef,
6 | sortPropertyKeys,
7 | inferType,
8 | } from "./parser";
9 | import type { JSONSchemaObject, PropertyInfo } from "./types";
10 |
11 | describe("resolveRef", () => {
12 | it("should resolve simple reference", () => {
13 | const root = {
14 | definitions: {
15 | User: { type: "object", properties: { name: { type: "string" } } },
16 | },
17 | };
18 | const result = resolveRef("#/definitions/User", root);
19 | expect(result).toEqual({
20 | type: "object",
21 | properties: { name: { type: "string" } },
22 | });
23 | });
24 |
25 | it("should return undefined for invalid reference", () => {
26 | const root = { definitions: {} };
27 | const result = resolveRef("#/definitions/NonExistent", root);
28 | expect(result).toBeUndefined();
29 | });
30 |
31 | it("should handle deep nested references", () => {
32 | const root = {
33 | a: { b: { c: { value: "found" } } },
34 | };
35 | const result = resolveRef("#/a/b/c", root);
36 | expect(result).toEqual({ value: "found" });
37 | });
38 |
39 | it("should return undefined for non-hash references", () => {
40 | const root = {};
41 | const result = resolveRef("invalid", root);
42 | expect(result).toBeUndefined();
43 | });
44 | });
45 |
46 | describe("sortPropertyKeys", () => {
47 | it("should sort with custom order first", () => {
48 | const keys = ["gamma", "alpha", "beta"];
49 | const customOrder = ["beta", "alpha"];
50 | const result = sortPropertyKeys(keys, [], customOrder);
51 | expect(result).toEqual(["beta", "alpha", "gamma"]);
52 | });
53 |
54 | it("should prioritize required properties", () => {
55 | const keys = ["optional1", "required1", "optional2", "required2"];
56 | const required = ["required1", "required2"];
57 | const result = sortPropertyKeys(keys, required);
58 | expect(result).toEqual([
59 | "required1",
60 | "required2",
61 | "optional1",
62 | "optional2",
63 | ]);
64 | });
65 |
66 | it("should combine custom order with required sorting", () => {
67 | const keys = ["d", "c", "b", "a"];
68 | const required = ["c", "a"];
69 | const customOrder = ["b"];
70 | const result = sortPropertyKeys(keys, required, customOrder);
71 | expect(result).toEqual(["b", "a", "c", "d"]);
72 | });
73 |
74 | it("should handle empty arrays", () => {
75 | const result = sortPropertyKeys([]);
76 | expect(result).toEqual([]);
77 | });
78 | });
79 |
80 | describe("inferType", () => {
81 | const root = {};
82 |
83 | it("should handle primitive types", () => {
84 | expect(inferType({ type: "string" }, root)).toBe("string");
85 | expect(inferType({ type: "number" }, root)).toBe("number");
86 | expect(inferType({ type: "boolean" }, root)).toBe("boolean");
87 | });
88 |
89 | it("should handle array types", () => {
90 | expect(inferType({ type: "array" }, root)).toBe("array");
91 | expect(inferType({ type: "array", items: { type: "string" } }, root)).toBe(
92 | "array of string",
93 | );
94 | });
95 |
96 | it("should handle union types", () => {
97 | const schema = {
98 | type: ["string", "number"],
99 | };
100 | expect(inferType(schema, root)).toBe("string | number");
101 | });
102 |
103 | it("should handle anyOf unions", () => {
104 | const schema = {
105 | anyOf: [{ type: "string" }, { type: "number" }],
106 | };
107 | expect(inferType(schema, root)).toBe("string | number");
108 | });
109 |
110 | it("should handle $ref types", () => {
111 | const rootWithRef = {
112 | definitions: {
113 | User: { type: "object" },
114 | },
115 | };
116 | const schema = { $ref: "#/definitions/User" };
117 | expect(inferType(schema, rootWithRef)).toBe("object");
118 | });
119 |
120 | it("should handle complex array items with unions", () => {
121 | const schema = {
122 | type: "array",
123 | items: {
124 | anyOf: [{ type: "string" }, { type: "number" }],
125 | },
126 | };
127 | expect(inferType(schema, root)).toBe("array of string | number");
128 | });
129 |
130 | it("should return unknown for invalid schemas", () => {
131 | expect(inferType(null, root)).toBe("unknown");
132 | expect(inferType({}, root)).toBe("unknown");
133 | expect(inferType({ invalid: true }, root)).toBe("unknown");
134 | });
135 | });
136 |
137 | describe("parseProperty", () => {
138 | it("should parse simple property", () => {
139 | const schema = {
140 | type: "string",
141 | description: "A string property",
142 | default: "default value",
143 | };
144 | const result = parseProperty("name", schema, true);
145 |
146 | expect(result).toHaveLength(1);
147 | expect(result[0]).toEqual({
148 | name: "name",
149 | fullPath: "name",
150 | type: "string",
151 | required: true,
152 | description: "A string property",
153 | defaultValue: "default value",
154 | allowedValues: undefined,
155 | allowedKeys: undefined,
156 | });
157 | });
158 |
159 | it("should parse property with enum values", () => {
160 | const schema = {
161 | type: "string",
162 | enum: ["red", "green", "blue"],
163 | };
164 | const result = parseProperty("color", schema, false);
165 |
166 | expect(result[0].allowedValues).toEqual(["blue", "green", "red"]);
167 | });
168 |
169 | it("should parse property with allowed keys", () => {
170 | const schema = {
171 | type: "object",
172 | propertyNames: {
173 | enum: ["key1", "key2", "key3"],
174 | },
175 | };
176 | const result = parseProperty("config", schema, false);
177 |
178 | expect(result[0].allowedKeys).toEqual(["key1", "key2", "key3"]);
179 | });
180 |
181 | it("should handle parent path correctly", () => {
182 | const schema = { type: "string" };
183 | const result = parseProperty("child", schema, false, {
184 | parentPath: "parent",
185 | });
186 |
187 | expect(result[0].fullPath).toBe("parent.child");
188 | });
189 |
190 | it("should parse nested object properties", () => {
191 | const schema = {
192 | type: "object",
193 | properties: {
194 | name: { type: "string" },
195 | age: { type: "number", description: "Person's age" },
196 | },
197 | required: ["name"],
198 | };
199 | const result = parseProperty("person", schema, true);
200 |
201 | expect(result).toHaveLength(1);
202 | expect(result[0].children).toHaveLength(2);
203 | expect(result[0].children?.[0]).toEqual({
204 | name: "name",
205 | fullPath: "person.name",
206 | type: "string",
207 | required: true,
208 | description: undefined,
209 | defaultValue: undefined,
210 | allowedValues: undefined,
211 | allowedKeys: undefined,
212 | });
213 | expect(result[0].children?.[1]).toEqual({
214 | name: "age",
215 | fullPath: "person.age",
216 | type: "number",
217 | required: false,
218 | description: "Person's age",
219 | defaultValue: undefined,
220 | allowedValues: undefined,
221 | allowedKeys: undefined,
222 | });
223 | });
224 |
225 | it("should parse array with object items", () => {
226 | const schema = {
227 | type: "array",
228 | items: {
229 | type: "object",
230 | properties: {
231 | id: { type: "string" },
232 | value: { type: "number" },
233 | },
234 | required: ["id"],
235 | },
236 | };
237 | const result = parseProperty("items", schema, false);
238 |
239 | expect(result[0].children).toHaveLength(2);
240 | expect(result[0].children?.[0].fullPath).toBe("items.*.id");
241 | expect(result[0].children?.[0].required).toBe(true);
242 | expect(result[0].children?.[1].fullPath).toBe("items.*.value");
243 | expect(result[0].children?.[1].required).toBe(false);
244 | });
245 |
246 | it("should handle additionalProperties", () => {
247 | const schema = {
248 | type: "object",
249 | additionalProperties: {
250 | type: "string",
251 | description: "Dynamic property",
252 | },
253 | };
254 | const result = parseProperty("config", schema, false);
255 |
256 | expect(result[0].children).toHaveLength(1);
257 | expect(result[0].children?.[0].name).toBe("*");
258 | expect(result[0].children?.[0].fullPath).toBe("config.*");
259 | expect(result[0].children?.[0].type).toBe("string");
260 | });
261 |
262 | it("should handle markdownDescription over description", () => {
263 | const schema = {
264 | type: "string",
265 | description: "Plain description",
266 | markdownDescription: "**Markdown** description",
267 | };
268 | const result = parseProperty("field", schema, false);
269 |
270 | expect(result[0].description).toBe("**Markdown** description");
271 | });
272 |
273 | it("should return empty array for invalid schema", () => {
274 | const result = parseProperty("invalid", null, false);
275 | expect(result).toEqual([]);
276 | });
277 | });
278 |
279 | describe("parseSchema", () => {
280 | it("should parse complete schema", () => {
281 | const schema = {
282 | type: "object",
283 | properties: {
284 | version: { type: "string", default: "1.0" },
285 | config: {
286 | type: "object",
287 | properties: {
288 | debug: { type: "boolean" },
289 | },
290 | },
291 | },
292 | required: ["version"],
293 | };
294 |
295 | const result = parseSchema(schema);
296 | expect(result).toHaveLength(2);
297 | expect(result[0].name).toBe("version");
298 | expect(result[0].required).toBe(true);
299 | expect(result[1].name).toBe("config");
300 | expect(result[1].required).toBe(false);
301 | });
302 |
303 | it("should handle schema with $ref root", () => {
304 | const schema = {
305 | $ref: "#/definitions/Config",
306 | definitions: {
307 | Config: {
308 | type: "object",
309 | properties: {
310 | name: { type: "string" },
311 | },
312 | required: ["name"],
313 | },
314 | },
315 | };
316 |
317 | const result = parseSchema(schema);
318 | expect(result).toHaveLength(1);
319 | expect(result[0].name).toBe("name");
320 | expect(result[0].required).toBe(true);
321 | });
322 |
323 | it("should apply custom ordering", () => {
324 | const schema = {
325 | type: "object",
326 | properties: {
327 | gamma: { type: "string" },
328 | alpha: { type: "string" },
329 | beta: { type: "string" },
330 | },
331 | };
332 |
333 | const result = parseSchema(schema, { customOrder: ["beta", "alpha"] });
334 | expect(result.map((p: PropertyInfo) => p.name)).toEqual([
335 | "beta",
336 | "alpha",
337 | "gamma",
338 | ]);
339 | });
340 |
341 | it("should return empty array for invalid schema", () => {
342 | expect(parseSchema(null)).toEqual([]);
343 | expect(parseSchema({})).toEqual([]);
344 | expect(parseSchema({ type: "string" })).toEqual([]);
345 | });
346 |
347 | it("should handle missing definitions gracefully", () => {
348 | const schema = {
349 | $ref: "#/definitions/NonExistent",
350 | definitions: {},
351 | };
352 |
353 | const result = parseSchema(schema);
354 | expect(result).toEqual([]);
355 | });
356 | });
357 |
```
--------------------------------------------------------------------------------
/packages/compiler/src/utils/jsx-content.spec.ts:
--------------------------------------------------------------------------------
```typescript
1 | import * as t from "@babel/types";
2 | import traverse, { NodePath } from "@babel/traverse";
3 | import { parse } from "@babel/parser";
4 | import { extractJsxContent } from "./jsx-content";
5 | import { describe, it, expect } from "vitest";
6 |
7 | describe("JSX Content Utils", () => {
8 | function parseJSX(code: string): t.File {
9 | return parse(code, {
10 | sourceType: "module",
11 | plugins: ["jsx", "typescript"],
12 | });
13 | }
14 |
15 | function getJSXElementPath(code: string): NodePath<t.JSXElement> {
16 | const ast = parseJSX(code);
17 | let elementPath: NodePath<t.JSXElement> | null = null;
18 |
19 | traverse(ast, {
20 | JSXElement(path) {
21 | elementPath = path;
22 | path.stop();
23 | },
24 | });
25 |
26 | if (!elementPath) {
27 | throw new Error("No JSX element found in the code");
28 | }
29 |
30 | return elementPath;
31 | }
32 |
33 | describe("extractJsxContent", () => {
34 | describe("plain", () => {
35 | it("should extract plain text content from JSX element", () => {
36 | const path = getJSXElementPath("<div>Hello world</div>");
37 | const content = extractJsxContent(path);
38 | expect(content).toBe("Hello world");
39 | });
40 |
41 | it("should return empty string for elements with no content", () => {
42 | const path = getJSXElementPath("<div></div>");
43 | const content = extractJsxContent(path);
44 | expect(content).toBe("");
45 | });
46 | });
47 |
48 | describe("whitespaces", () => {
49 | it("should handle multiple whitespaces", () => {
50 | const path = getJSXElementPath("<div> Hello world </div>");
51 | const content = extractJsxContent(path);
52 | expect(content).toBe("Hello world");
53 | });
54 |
55 | it("should handle multi-line content with whitespaces", () => {
56 | const path = getJSXElementPath("<div>\n Hello\n crazy world!</div>");
57 | const content = extractJsxContent(path);
58 | expect(content).toBe("Hello crazy world!");
59 | });
60 |
61 | it("should handle whitespaces between elements", () => {
62 | const path = getJSXElementPath(
63 | "<div>\n Hello <strong>crazy</strong> world! <Icons.Rocket /></div>",
64 | );
65 | const content = extractJsxContent(path);
66 | expect(content).toBe(
67 | "Hello <element:strong>crazy</element:strong> world! <element:Icons.Rocket></element:Icons.Rocket>",
68 | );
69 | });
70 |
71 | it("should handle explicit whitespaces", () => {
72 | const path = getJSXElementPath(
73 | '<div>\n Hello{" "}<strong>crazy {" "}world</strong></div>',
74 | );
75 | const content = extractJsxContent(path);
76 | expect(content).toBe(
77 | "Hello <element:strong>crazy world</element:strong>",
78 | );
79 | });
80 |
81 | it("should handle new lines between elements and explicit whitespaces", () => {
82 | const path = getJSXElementPath(
83 | '<div>\n Hello \n <strong>crazy</strong>\n <em>world</em>{" "}\n<u>forever</u></div>',
84 | );
85 | const content = extractJsxContent(path);
86 | expect(content).toBe(
87 | "Hello<element:strong>crazy</element:strong><element:em>world</element:em> <element:u>forever</element:u>",
88 | );
89 | });
90 | });
91 |
92 | describe("variables", () => {
93 | it("should extract content with simple identifiers like {count}", () => {
94 | const path = getJSXElementPath("<div>Items: {count}</div>");
95 | const content = extractJsxContent(path);
96 | expect(content).toBe("Items: {count}");
97 | });
98 |
99 | it("should handle multiple expressions", () => {
100 | const path = getJSXElementPath(
101 | "<div>{count} items in {category}</div>",
102 | );
103 | const content = extractJsxContent(path);
104 | expect(content).toBe("{count} items in {category}");
105 | });
106 |
107 | it("should handle nested elements", () => {
108 | const path = getJSXElementPath(
109 | "<div>Total: <strong>{count}</strong> items</div>",
110 | );
111 | const content = extractJsxContent(path);
112 | expect(content).toBe(
113 | "Total: <element:strong>{count}</element:strong> items",
114 | );
115 | });
116 |
117 | it("should handle object variables", () => {
118 | const path = getJSXElementPath(
119 | "<div>User: <strong>{user.profile.name}</strong> has {user.private.details.items.count} items</div>",
120 | );
121 | const content = extractJsxContent(path);
122 | expect(content).toBe(
123 | "User: <element:strong>{user.profile.name}</element:strong> has {user.private.details.items.count} items",
124 | );
125 | });
126 |
127 | it("should handle dynamic variables", () => {
128 | const path = getJSXElementPath(
129 | "<div>User <strong>{data[currentUserType][currentUserIndex].name}</strong> has {items.counts[type]} items of type <em>{typeNames[type]}</em></div>",
130 | );
131 | const content = extractJsxContent(path);
132 | expect(content).toBe(
133 | "User <element:strong>{data[currentUserType][currentUserIndex].name}</element:strong> has {items.counts[type]} items of type <element:em>{typeNames[type]}</element:em>",
134 | );
135 | });
136 | });
137 |
138 | describe("nested elements", () => {
139 | it("should handle multiple nested elements with correct indices", () => {
140 | const path = getJSXElementPath(
141 | "<div><strong>Hello</strong> and <em>welcome</em> to <code>my app</code></div>",
142 | );
143 | const content = extractJsxContent(path);
144 | expect(content).toBe(
145 | "<element:strong>Hello</element:strong> and <element:em>welcome</element:em> to <element:code>my app</element:code>",
146 | );
147 | });
148 |
149 | it("should handle deeply nested elements", () => {
150 | const path = getJSXElementPath(
151 | "<div><a>Hello <strong>wonderful <i><b>very</b>nested</i></strong> world</a> of the <u>universe</u></div>",
152 | );
153 | const content = extractJsxContent(path);
154 | expect(content).toBe(
155 | "<element:a>Hello <element:strong>wonderful <element:i><element:b>very</element:b>nested</element:i></element:strong> world</element:a> of the <element:u>universe</element:u>",
156 | );
157 | });
158 | });
159 |
160 | describe("function calls", () => {
161 | it("should extract function calls with placeholders", () => {
162 | const path = getJSXElementPath(
163 | "<div>Hello {getName(user)} you have {getCount()} items</div>",
164 | );
165 | const content = extractJsxContent(path);
166 | expect(content).toBe(
167 | "Hello <function:getName/> you have <function:getCount/> items",
168 | );
169 | });
170 |
171 | it("should handle mixed function calls and variables", () => {
172 | const path = getJSXElementPath(
173 | "<div>{user.name} called {getFunction()} and {getData(user.id)}</div>",
174 | );
175 | const content = extractJsxContent(path);
176 | expect(content).toBe(
177 | "{user.name} called <function:getFunction/> and <function:getData/>",
178 | );
179 | });
180 |
181 | it("should handle nested elements with function calls and variables", () => {
182 | const path = getJSXElementPath(
183 | '<div><strong>{formatName(getName(user))}</strong> has <a href="#"><em>{getCount()}</em> unread messages</a> and <em>{count} in total</em></div>',
184 | );
185 | const content = extractJsxContent(path);
186 | expect(content).toBe(
187 | "<element:strong><function:formatName/></element:strong> has <element:a><element:em><function:getCount/></element:em> unread messages</element:a> and <element:em>{count} in total</element:em>",
188 | );
189 | });
190 |
191 | it("should handle functions with chained names", () => {
192 | const path = getJSXElementPath(
193 | "<div>{getCount()} items: {user.details.products.items.map((item) => item.value).filter(value => value > 0)}</div>",
194 | );
195 | const content = extractJsxContent(path);
196 | expect(content).toBe(
197 | "<function:getCount/> items: <function:user.details.products.items.map/>",
198 | );
199 | });
200 |
201 | it("should handle multiple usages of the same function", () => {
202 | const path = getJSXElementPath(
203 | "<div>{getCount(foo)} is more than {getCount(bar)}</div>",
204 | );
205 | const content = extractJsxContent(path);
206 | expect(content).toBe(
207 | "<function:getCount/> is more than <function:getCount/>",
208 | );
209 | });
210 |
211 | it("should handle function calls on classes with 'new' keyword", () => {
212 | const path = getJSXElementPath(
213 | "<div>© {new Date().getFullYear()} vitest</div>",
214 | );
215 | const content = extractJsxContent(path);
216 | expect(content).toBe("© <function:Date.getFullYear/> vitest");
217 | });
218 | });
219 |
220 | describe("expressions", () => {
221 | it("should handle mixed content with expressions and text", () => {
222 | const path = getJSXElementPath(
223 | "<div>You have {count} new messages and {count * 2} total items.</div>",
224 | );
225 | const content = extractJsxContent(path);
226 | expect(content).toBe(
227 | "You have {count} new messages and <expression/> total items.",
228 | );
229 | });
230 |
231 | it("should handle complex expressions", () => {
232 | const path = getJSXElementPath(
233 | "<div>{isAdmin ? 'Admin' : 'User'} - {items.filter(i => i.active).length > 0}</div>",
234 | );
235 | const content = extractJsxContent(path);
236 | expect(content).toBe("<expression/> - <expression/>");
237 | });
238 |
239 | it("should handle mixed variables, functions and expressions", () => {
240 | const path = getJSXElementPath(
241 | "<div>{count + 1} by {user.name}, processed by {getName()} {length > 0}</div>",
242 | );
243 | const content = extractJsxContent(path);
244 | expect(content).toBe(
245 | "<expression/> by {user.name}, processed by <function:getName/> <expression/>",
246 | );
247 | });
248 |
249 | it("should handle expressions in nested elements", () => {
250 | const path = getJSXElementPath(
251 | "<div><p>Count: {items.length + offset}</p><span>Active: {items.filter(i => i.active).length > 0}</span></div>",
252 | );
253 | const content = extractJsxContent(path);
254 | expect(content).toBe(
255 | "<element:p>Count: <expression/></element:p><element:span>Active: <expression/></element:span>",
256 | );
257 | });
258 | });
259 | });
260 | });
261 |
```
--------------------------------------------------------------------------------
/packages/cli/src/cli/loaders/typescript/index.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { parse } from "@babel/parser";
2 | import _ from "lodash";
3 | import babelTraverseModule from "@babel/traverse";
4 | import type { NodePath } from "@babel/traverse";
5 | import * as t from "@babel/types";
6 | import babelGenerateModule from "@babel/generator";
7 | import { ILoader } from "../_types";
8 | import { createLoader } from "../_utils";
9 | import { resolveCjsExport } from "./cjs-interop";
10 |
11 | const traverse = resolveCjsExport(babelTraverseModule, "@babel/traverse");
12 | const generate = resolveCjsExport(babelGenerateModule, "@babel/generator");
13 |
14 | export default function createTypescriptLoader(): ILoader<
15 | string,
16 | Record<string, any>
17 | > {
18 | return createLoader({
19 | pull: async (locale, input) => {
20 | if (!input) {
21 | return {};
22 | }
23 |
24 | const ast = parseTypeScript(input);
25 | const extractedStrings = extractStringsFromDefaultExport(ast);
26 | return extractedStrings;
27 | },
28 | push: async (
29 | locale,
30 | data,
31 | originalInput,
32 | defaultLocale,
33 | pullInput,
34 | pullOutput,
35 | ) => {
36 | const ast = parseTypeScript(originalInput || "");
37 | const finalData = _.merge({}, pullOutput, data);
38 | updateStringsInDefaultExport(ast, finalData);
39 |
40 | const { code } = generate(ast, {
41 | jsescOption: {
42 | minimal: true,
43 | },
44 | });
45 | return code;
46 | },
47 | });
48 | }
49 |
50 | /**
51 | * Parse TypeScript code into an AST
52 | */
53 | function parseTypeScript(input: string) {
54 | return parse(input, {
55 | sourceType: "module",
56 | plugins: ["typescript"],
57 | });
58 | }
59 |
60 | /**
61 | * Extract the localizable (string literal) content from the default export
62 | * and return it as a nested object that mirrors the original structure.
63 | */
64 | function extractStringsFromDefaultExport(ast: t.File): Record<string, any> {
65 | let extracted: Record<string, any> = {};
66 |
67 | traverse(ast, {
68 | ExportDefaultDeclaration(path: NodePath<t.ExportDefaultDeclaration>) {
69 | const { declaration } = path.node;
70 |
71 | const decl = unwrapTSAsExpression(declaration);
72 |
73 | if (t.isObjectExpression(decl)) {
74 | extracted = objectExpressionToObject(decl);
75 | } else if (t.isArrayExpression(decl)) {
76 | extracted = arrayExpressionToArray(decl) as unknown as Record<
77 | string,
78 | any
79 | >;
80 | } else if (t.isIdentifier(decl)) {
81 | // Handle: const foo = {...}; export default foo;
82 | const binding = path.scope.bindings[decl.name];
83 | if (
84 | binding &&
85 | t.isVariableDeclarator(binding.path.node) &&
86 | binding.path.node.init
87 | ) {
88 | const initRaw = binding.path.node.init;
89 | const init = initRaw ? unwrapTSAsExpression(initRaw) : initRaw;
90 | if (t.isObjectExpression(init)) {
91 | extracted = objectExpressionToObject(init);
92 | } else if (t.isArrayExpression(init)) {
93 | extracted = arrayExpressionToArray(init) as unknown as Record<
94 | string,
95 | any
96 | >;
97 | }
98 | }
99 | }
100 | },
101 | });
102 |
103 | return extracted;
104 | }
105 |
106 | /**
107 | * Helper: unwraps nested TSAsExpression nodes (e.g. `obj as const`)
108 | * to get to the underlying expression/node we care about.
109 | */
110 | function unwrapTSAsExpression<T extends t.Node>(node: T): t.Node {
111 | let current: t.Node = node;
112 | // TSAsExpression is produced for `expr as const` assertions.
113 | // We want to get to the underlying expression so that the rest of the
114 | // loader logic can work unchanged.
115 | // There could theoretically be multiple nested `as const` assertions, so we
116 | // unwrap in a loop.
117 | // eslint-disable-next-line no-constant-condition
118 | while (t.isTSAsExpression(current)) {
119 | current = current.expression;
120 | }
121 | return current;
122 | }
123 |
124 | /**
125 | * Recursively converts an `ObjectExpression` into a plain JavaScript object that
126 | * only contains the string-literal values we care about. Non-string primitives
127 | * (numbers, booleans, etc.) are ignored.
128 | */
129 | function objectExpressionToObject(
130 | objectExpression: t.ObjectExpression,
131 | ): Record<string, any> {
132 | const obj: Record<string, any> = {};
133 |
134 | objectExpression.properties.forEach((prop) => {
135 | if (!t.isObjectProperty(prop)) return;
136 |
137 | const key = getPropertyKey(prop);
138 |
139 | if (t.isStringLiteral(prop.value)) {
140 | obj[key] = prop.value.value;
141 | } else if (
142 | t.isTemplateLiteral(prop.value) &&
143 | prop.value.expressions.length === 0
144 | ) {
145 | // Handle template literals without expressions as plain strings
146 | obj[key] = prop.value.quasis[0].value.cooked ?? "";
147 | } else if (t.isObjectExpression(prop.value)) {
148 | const nested = objectExpressionToObject(prop.value);
149 | if (Object.keys(nested).length > 0) {
150 | obj[key] = nested;
151 | }
152 | } else if (t.isArrayExpression(prop.value)) {
153 | const arr = arrayExpressionToArray(prop.value);
154 | if (arr.length > 0) {
155 | obj[key] = arr;
156 | }
157 | }
158 | });
159 |
160 | return obj;
161 | }
162 |
163 | /**
164 | * Recursively converts an `ArrayExpression` into a JavaScript array that
165 | * contains string literals and nested objects/arrays when relevant.
166 | */
167 | function arrayExpressionToArray(arrayExpression: t.ArrayExpression): any[] {
168 | const arr: any[] = [];
169 |
170 | arrayExpression.elements.forEach((element) => {
171 | if (!element) return; // holes in the array
172 |
173 | if (t.isStringLiteral(element)) {
174 | arr.push(element.value);
175 | } else if (
176 | t.isTemplateLiteral(element) &&
177 | element.expressions.length === 0
178 | ) {
179 | arr.push(element.quasis[0].value.cooked ?? "");
180 | } else if (t.isObjectExpression(element)) {
181 | const nestedObj = objectExpressionToObject(element);
182 | arr.push(nestedObj);
183 | } else if (t.isArrayExpression(element)) {
184 | arr.push(arrayExpressionToArray(element));
185 | }
186 | });
187 |
188 | return arr;
189 | }
190 |
191 | // ------------------ updating helpers (nested data) ------------------------
192 |
193 | function updateStringsInDefaultExport(
194 | ast: t.File,
195 | data: Record<string, any>,
196 | ): boolean {
197 | let modified = false;
198 |
199 | traverse(ast, {
200 | ExportDefaultDeclaration(path: NodePath<t.ExportDefaultDeclaration>) {
201 | const { declaration } = path.node;
202 |
203 | const decl = unwrapTSAsExpression(declaration);
204 |
205 | if (t.isObjectExpression(decl)) {
206 | modified = updateStringsInObjectExpression(decl, data) || modified;
207 | } else if (t.isArrayExpression(decl)) {
208 | if (Array.isArray(data)) {
209 | modified = updateStringsInArrayExpression(decl, data) || modified;
210 | }
211 | } else if (t.isIdentifier(decl)) {
212 | modified = updateStringsInExportedIdentifier(path, data) || modified;
213 | }
214 | },
215 | });
216 |
217 | return modified;
218 | }
219 |
220 | function updateStringsInObjectExpression(
221 | objectExpression: t.ObjectExpression,
222 | data: Record<string, any>,
223 | ): boolean {
224 | let modified = false;
225 |
226 | objectExpression.properties.forEach((prop) => {
227 | if (!t.isObjectProperty(prop)) return;
228 |
229 | const key = getPropertyKey(prop);
230 | const incomingVal = data?.[key];
231 |
232 | if (incomingVal === undefined) {
233 | // nothing to update for this key
234 | return;
235 | }
236 |
237 | if (t.isStringLiteral(prop.value) && typeof incomingVal === "string") {
238 | if (prop.value.value !== incomingVal) {
239 | prop.value.value = incomingVal;
240 | modified = true;
241 | }
242 | } else if (
243 | t.isTemplateLiteral(prop.value) &&
244 | prop.value.expressions.length === 0 &&
245 | typeof incomingVal === "string"
246 | ) {
247 | const currentVal = prop.value.quasis[0].value.cooked ?? "";
248 | if (currentVal !== incomingVal) {
249 | // Replace the existing template literal with an updated one
250 | prop.value.quasis[0].value.raw = incomingVal;
251 | prop.value.quasis[0].value.cooked = incomingVal;
252 | modified = true;
253 | }
254 | } else if (
255 | t.isObjectExpression(prop.value) &&
256 | typeof incomingVal === "object" &&
257 | !Array.isArray(incomingVal)
258 | ) {
259 | const subModified = updateStringsInObjectExpression(
260 | prop.value,
261 | incomingVal,
262 | );
263 | modified = subModified || modified;
264 | } else if (t.isArrayExpression(prop.value) && Array.isArray(incomingVal)) {
265 | const subModified = updateStringsInArrayExpression(
266 | prop.value,
267 | incomingVal,
268 | );
269 | modified = subModified || modified;
270 | }
271 | });
272 |
273 | return modified;
274 | }
275 |
276 | function updateStringsInArrayExpression(
277 | arrayExpression: t.ArrayExpression,
278 | incoming: any[],
279 | ): boolean {
280 | let modified = false;
281 |
282 | arrayExpression.elements.forEach((element, index) => {
283 | if (!element) return;
284 |
285 | const incomingVal = incoming?.[index];
286 | if (incomingVal === undefined) return;
287 |
288 | if (t.isStringLiteral(element) && typeof incomingVal === "string") {
289 | if (element.value !== incomingVal) {
290 | element.value = incomingVal;
291 | modified = true;
292 | }
293 | } else if (
294 | t.isTemplateLiteral(element) &&
295 | element.expressions.length === 0 &&
296 | typeof incomingVal === "string"
297 | ) {
298 | const currentVal = element.quasis[0].value.cooked ?? "";
299 | if (currentVal !== incomingVal) {
300 | element.quasis[0].value.raw = incomingVal;
301 | element.quasis[0].value.cooked = incomingVal;
302 | modified = true;
303 | }
304 | } else if (
305 | t.isObjectExpression(element) &&
306 | typeof incomingVal === "object" &&
307 | !Array.isArray(incomingVal)
308 | ) {
309 | const subModified = updateStringsInObjectExpression(element, incomingVal);
310 | modified = subModified || modified;
311 | } else if (t.isArrayExpression(element) && Array.isArray(incomingVal)) {
312 | const subModified = updateStringsInArrayExpression(element, incomingVal);
313 | modified = subModified || modified;
314 | }
315 | });
316 |
317 | return modified;
318 | }
319 |
320 | function updateStringsInExportedIdentifier(
321 | path: NodePath<t.ExportDefaultDeclaration>,
322 | data: Record<string, any>,
323 | ): boolean {
324 | const exportName = (path.node.declaration as t.Identifier).name;
325 | const binding = path.scope.bindings[exportName];
326 |
327 | if (!binding || !binding.path.node) return false;
328 |
329 | if (t.isVariableDeclarator(binding.path.node) && binding.path.node.init) {
330 | const initRaw = binding.path.node.init;
331 | const init = initRaw ? unwrapTSAsExpression(initRaw) : initRaw;
332 | if (t.isObjectExpression(init)) {
333 | return updateStringsInObjectExpression(init, data);
334 | } else if (t.isArrayExpression(init)) {
335 | return updateStringsInArrayExpression(init, data as any[]);
336 | }
337 | }
338 |
339 | return false;
340 | }
341 |
342 | /**
343 | * Get the string key from an object property
344 | */
345 | function getPropertyKey(prop: t.ObjectProperty): string {
346 | if (t.isIdentifier(prop.key)) {
347 | return prop.key.name;
348 | } else if (t.isStringLiteral(prop.key)) {
349 | return prop.key.value;
350 | } else if (t.isNumericLiteral(prop.key)) {
351 | return String(prop.key.value);
352 | }
353 | return String(prop.key);
354 | }
355 |
```
--------------------------------------------------------------------------------
/packages/compiler/src/lib/lcp/cache.spec.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
2 | import { resolve } from "path";
3 | import { LCPCache, LCPCacheParams } from "./cache";
4 | import * as fs from "fs";
5 | import * as prettier from "prettier";
6 | import { LCPSchema } from "./schema";
7 | import { LCP_DICTIONARY_FILE_NAME } from "../../_const";
8 |
9 | vi.mock("fs");
10 | vi.mock("prettier");
11 |
12 | // cached JSON is stored in JS file, we need to add export default to make it valid JS file
13 | function toCachedString(cache: any) {
14 | return `export default ${JSON.stringify(cache, null, 2)};`;
15 | }
16 |
17 | describe("LCPCache", () => {
18 | const lcp: LCPSchema = {
19 | version: 0.1,
20 | files: {
21 | "test.ts": {
22 | scopes: {
23 | key1: {
24 | hash: "123",
25 | },
26 | newKey: {
27 | hash: "111",
28 | },
29 | },
30 | },
31 | "old.ts": {
32 | scopes: {
33 | oldKey: {
34 | hash: "456",
35 | },
36 | },
37 | },
38 | "new.ts": {
39 | scopes: {
40 | brandNew: {
41 | hash: "222",
42 | },
43 | },
44 | },
45 | },
46 | };
47 | const params: LCPCacheParams = {
48 | sourceRoot: ".",
49 | lingoDir: ".lingo",
50 | lcp,
51 | };
52 | const cachePath = resolve(
53 | process.cwd(),
54 | params.sourceRoot,
55 | params.lingoDir,
56 | LCP_DICTIONARY_FILE_NAME,
57 | );
58 |
59 | beforeEach(() => {
60 | vi.clearAllMocks();
61 | vi.mocked(prettier.format).mockImplementation(
62 | async (value: string) => value,
63 | );
64 | });
65 |
66 | describe("readLocaleDictionary", () => {
67 | it("returns empty dictionary when no cache exists", () => {
68 | vi.mocked(fs.existsSync).mockReturnValue(false);
69 |
70 | const dictionary = LCPCache.readLocaleDictionary("en", params);
71 |
72 | expect(dictionary).toEqual({
73 | version: 0.1,
74 | locale: "en",
75 | files: {},
76 | });
77 | });
78 |
79 | it("returns empty dictionary when cache exists but has no entries for requested locale", () => {
80 | vi.mocked(fs.existsSync).mockReturnValue(true);
81 | vi.mocked(fs.readFileSync).mockReturnValue(
82 | toCachedString({
83 | version: 0.1,
84 | files: {
85 | "test.ts": {
86 | entries: {
87 | key1: {
88 | content: {
89 | fr: "Bonjour",
90 | },
91 | },
92 | },
93 | },
94 | },
95 | }),
96 | );
97 |
98 | const dictionary = LCPCache.readLocaleDictionary("en", params);
99 |
100 | expect(dictionary).toEqual({
101 | version: 0.1,
102 | locale: "en",
103 | files: {},
104 | });
105 | });
106 |
107 | it("returns dictionary entries with matching hashfor requested locale when cache exists", () => {
108 | vi.mocked(fs.existsSync).mockReturnValue(true);
109 | vi.mocked(fs.readFileSync).mockReturnValue(
110 | toCachedString({
111 | version: 0.1,
112 | files: {
113 | "test.ts": {
114 | entries: {
115 | key1: {
116 | content: {
117 | en: "Hello",
118 | fr: "Bonjour",
119 | },
120 | hash: "123",
121 | },
122 | newKey: {
123 | content: {
124 | en: "New",
125 | fr: "Nouveau",
126 | },
127 | hash: "888",
128 | },
129 | },
130 | },
131 | "somewhere-else.ts": {
132 | entries: {
133 | somethingElse: {
134 | content: {
135 | en: "Something else",
136 | fr: "Autre chose",
137 | },
138 | hash: "222",
139 | },
140 | },
141 | },
142 | },
143 | }),
144 | );
145 |
146 | const dictionary = LCPCache.readLocaleDictionary("en", params);
147 |
148 | expect(dictionary).toEqual({
149 | version: 0.1,
150 | locale: "en",
151 | files: {
152 | "new.ts": {
153 | entries: {
154 | brandNew: "Something else", // found in somewhere-else.ts under different key via matching hash
155 | },
156 | },
157 | "test.ts": {
158 | entries: {
159 | key1: "Hello", // found in test.ts under the same key via matching hash
160 | },
161 | },
162 | },
163 | });
164 | });
165 | });
166 |
167 | describe("writeLocaleDictionary", () => {
168 | it("creates new cache when no cache exists", async () => {
169 | vi.mocked(fs.existsSync).mockReturnValue(false);
170 | vi.mocked(fs.writeFileSync);
171 |
172 | const dictionary = {
173 | version: 0.1,
174 | locale: "en",
175 | files: {
176 | "test.ts": {
177 | entries: {
178 | key1: "Hello",
179 | },
180 | },
181 | },
182 | };
183 |
184 | await LCPCache.writeLocaleDictionary(dictionary, params);
185 |
186 | expect(fs.writeFileSync).toHaveBeenCalledWith(
187 | cachePath,
188 | toCachedString({
189 | version: 0.1,
190 | files: {
191 | "test.ts": {
192 | entries: {
193 | key1: {
194 | content: {
195 | en: "Hello",
196 | },
197 | hash: "123",
198 | },
199 | },
200 | },
201 | },
202 | }),
203 | );
204 | });
205 |
206 | it("adds new locale to existing cache", async () => {
207 | vi.mocked(fs.existsSync).mockReturnValue(true);
208 | vi.mocked(fs.readFileSync).mockReturnValue(
209 | toCachedString({
210 | version: 0.1,
211 | files: {
212 | "test.ts": {
213 | entries: {
214 | key1: {
215 | content: {
216 | en: "Hello",
217 | },
218 | hash: "123",
219 | },
220 | },
221 | },
222 | },
223 | }),
224 | );
225 | vi.mocked(fs.writeFileSync);
226 |
227 | const dictionary = {
228 | version: 0.1,
229 | locale: "fr",
230 | files: {
231 | "test.ts": {
232 | entries: {
233 | key1: "Bonjour",
234 | },
235 | },
236 | },
237 | };
238 |
239 | await LCPCache.writeLocaleDictionary(dictionary, params);
240 |
241 | expect(fs.writeFileSync).toHaveBeenCalledWith(
242 | cachePath,
243 | toCachedString({
244 | version: 0.1,
245 | files: {
246 | "test.ts": {
247 | entries: {
248 | key1: {
249 | content: {
250 | en: "Hello",
251 | fr: "Bonjour",
252 | },
253 | hash: "123",
254 | },
255 | },
256 | },
257 | },
258 | }),
259 | );
260 | });
261 |
262 | it("overrides existing locale entries in cache", async () => {
263 | vi.mocked(fs.existsSync).mockReturnValue(true);
264 | vi.mocked(fs.readFileSync).mockReturnValue(
265 | toCachedString({
266 | version: 0.1,
267 | files: {
268 | "test.ts": {
269 | entries: {
270 | key1: {
271 | content: {
272 | en: "Hello",
273 | fr: "Bonjour",
274 | },
275 | hash: "123",
276 | },
277 | },
278 | },
279 | },
280 | }),
281 | );
282 | vi.mocked(fs.writeFileSync);
283 |
284 | const dictionary = {
285 | version: 0.1,
286 | locale: "en",
287 | files: {
288 | "test.ts": {
289 | entries: {
290 | key1: "Hi",
291 | },
292 | },
293 | },
294 | };
295 |
296 | await LCPCache.writeLocaleDictionary(dictionary, params);
297 |
298 | expect(fs.writeFileSync).toHaveBeenCalledWith(
299 | cachePath,
300 | toCachedString({
301 | version: 0.1,
302 | files: {
303 | "test.ts": {
304 | entries: {
305 | key1: {
306 | content: {
307 | en: "Hi",
308 | fr: "Bonjour",
309 | },
310 | hash: "123",
311 | },
312 | },
313 | },
314 | },
315 | }),
316 | );
317 | });
318 |
319 | it("handles different files and entries between cache and dictionary", async () => {
320 | vi.mocked(fs.existsSync).mockReturnValue(true);
321 | vi.mocked(fs.readFileSync).mockReturnValue(
322 | toCachedString({
323 | version: 0.1,
324 | files: {
325 | "old.ts": {
326 | entries: {
327 | oldKey: {
328 | content: {
329 | en: "Old",
330 | fr: "Vieux",
331 | },
332 | hash: "456",
333 | },
334 | },
335 | },
336 | "test.ts": {
337 | entries: {
338 | key1: {
339 | content: {
340 | en: "Hello",
341 | fr: "Bonjour",
342 | },
343 | hash: "123",
344 | },
345 | newKey: {
346 | content: {
347 | en: "New",
348 | fr: "Nouveau",
349 | },
350 | hash: "111",
351 | },
352 | },
353 | },
354 | },
355 | }),
356 | );
357 | vi.mocked(fs.writeFileSync);
358 |
359 | const dictionary = {
360 | version: 0.1,
361 | locale: "en",
362 | files: {
363 | "test.ts": {
364 | entries: {
365 | key1: "Hi",
366 | newKey: "Newer",
367 | },
368 | },
369 | "new.ts": {
370 | entries: {
371 | brandNew: "Brand New",
372 | },
373 | },
374 | },
375 | };
376 |
377 | await LCPCache.writeLocaleDictionary(dictionary, params);
378 |
379 | expect(fs.writeFileSync).toHaveBeenCalledWith(
380 | cachePath,
381 | toCachedString({
382 | version: 0.1,
383 | files: {
384 | "new.ts": {
385 | entries: {
386 | brandNew: {
387 | content: {
388 | en: "Brand New",
389 | },
390 | hash: "222",
391 | },
392 | },
393 | },
394 | "test.ts": {
395 | entries: {
396 | key1: {
397 | content: {
398 | en: "Hi",
399 | fr: "Bonjour",
400 | },
401 | hash: "123",
402 | },
403 | newKey: {
404 | content: {
405 | en: "Newer",
406 | fr: "Nouveau",
407 | },
408 | hash: "111",
409 | },
410 | },
411 | },
412 | },
413 | }),
414 | );
415 | });
416 |
417 | it("formats the cache with prettier", async () => {
418 | vi.mocked(prettier.resolveConfig).mockResolvedValue({});
419 | vi.mocked(prettier.format).mockResolvedValue("formatted");
420 |
421 | const dictionary = {
422 | version: 0.1,
423 | locale: "en",
424 | files: {
425 | "test.ts": {
426 | entries: {
427 | key1: "Hi",
428 | },
429 | },
430 | },
431 | };
432 |
433 | await LCPCache.writeLocaleDictionary(dictionary, params);
434 |
435 | expect(prettier.resolveConfig).toHaveBeenCalledTimes(1);
436 | expect(prettier.format).toHaveBeenCalledTimes(1);
437 | expect(fs.writeFileSync).toHaveBeenCalledWith(cachePath, "formatted");
438 | });
439 | });
440 | });
441 |
```
--------------------------------------------------------------------------------
/packages/cli/src/cli/cmd/run/execute.spec.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, it, expect, vi, beforeEach } from "vitest";
2 | import pLimit from "p-limit";
3 |
4 | /**
5 | * Tests for the per-file I/O locking mechanism in execute.ts
6 | *
7 | * This tests the critical race condition fix where multiple concurrent tasks
8 | * writing to the same file (e.g., xcode-xcstrings with multiple locales)
9 | * could cause "Cannot convert undefined or null to object" errors.
10 | */
11 | describe("execute.ts - Per-file I/O locking", () => {
12 | describe("getFileIoLimiter", () => {
13 | it("should create separate limiters for different files", () => {
14 | const perFileIoLimiters = new Map();
15 | const getFileIoLimiter = (bucketPathPattern: string) => {
16 | const lockKey = bucketPathPattern;
17 | if (!perFileIoLimiters.has(lockKey)) {
18 | perFileIoLimiters.set(lockKey, pLimit(1));
19 | }
20 | return perFileIoLimiters.get(lockKey)!;
21 | };
22 |
23 | const limiter1 = getFileIoLimiter("example.xcstrings");
24 | const limiter2 = getFileIoLimiter("messages.json");
25 | const limiter3 = getFileIoLimiter("example.xcstrings");
26 |
27 | // Same file should return same limiter instance
28 | expect(limiter1).toBe(limiter3);
29 | // Different files should have different limiters
30 | expect(limiter1).not.toBe(limiter2);
31 | });
32 |
33 | it("should use pattern as-is without manipulation", () => {
34 | const perFileIoLimiters = new Map();
35 | const getFileIoLimiter = (bucketPathPattern: string) => {
36 | const lockKey = bucketPathPattern;
37 | if (!perFileIoLimiters.has(lockKey)) {
38 | perFileIoLimiters.set(lockKey, pLimit(1));
39 | }
40 | return perFileIoLimiters.get(lockKey)!;
41 | };
42 |
43 | // Test various pattern formats
44 | const patterns = [
45 | "example.xcstrings", // Single-file, no locale
46 | "src/[locale]/messages.json", // Multi-file with [locale]
47 | "locales/[locale].json", // Multi-file with [locale]
48 | "[locale]-config.json", // Multi-file starting with [locale]
49 | "locale-data.json", // Contains word "locale" but not placeholder
50 | ];
51 |
52 | const limiters = patterns.map((p) => getFileIoLimiter(p));
53 |
54 | // All should be unique (no patterns accidentally grouped)
55 | const uniqueLimiters = new Set(limiters);
56 | expect(uniqueLimiters.size).toBe(patterns.length);
57 | });
58 | });
59 |
60 | describe("Per-file serialization", () => {
61 | it("should serialize I/O operations for the same file", async () => {
62 | const perFileIoLimiters = new Map();
63 | const getFileIoLimiter = (bucketPathPattern: string) => {
64 | const lockKey = bucketPathPattern;
65 | if (!perFileIoLimiters.has(lockKey)) {
66 | perFileIoLimiters.set(lockKey, pLimit(1));
67 | }
68 | return perFileIoLimiters.get(lockKey)!;
69 | };
70 |
71 | const operations: { id: number; start: number; end: number }[] = [];
72 |
73 | // Simulate 3 concurrent tasks writing to the same file
74 | const tasks = [
75 | { id: 1, file: "example.xcstrings" },
76 | { id: 2, file: "example.xcstrings" },
77 | { id: 3, file: "example.xcstrings" },
78 | ];
79 |
80 | await Promise.all(
81 | tasks.map(async (task) => {
82 | const limiter = getFileIoLimiter(task.file);
83 | await limiter(async () => {
84 | const start = Date.now();
85 | await new Promise((resolve) => setTimeout(resolve, 50));
86 | const end = Date.now();
87 | operations.push({ id: task.id, start, end });
88 | });
89 | }),
90 | );
91 |
92 | // Verify operations were serialized (no overlap)
93 | operations.sort((a, b) => a.start - b.start);
94 | for (let i = 0; i < operations.length - 1; i++) {
95 | const current = operations[i];
96 | const next = operations[i + 1];
97 | // Next operation should start after current ends (serialized)
98 | expect(next.start).toBeGreaterThanOrEqual(current.end);
99 | }
100 | });
101 |
102 | it("should allow concurrent I/O operations for different files", async () => {
103 | const perFileIoLimiters = new Map();
104 | const getFileIoLimiter = (bucketPathPattern: string) => {
105 | const lockKey = bucketPathPattern;
106 | if (!perFileIoLimiters.has(lockKey)) {
107 | perFileIoLimiters.set(lockKey, pLimit(1));
108 | }
109 | return perFileIoLimiters.get(lockKey)!;
110 | };
111 |
112 | const operations: {
113 | id: number;
114 | file: string;
115 | start: number;
116 | end: number;
117 | }[] = [];
118 |
119 | // Simulate concurrent tasks writing to different files
120 | const tasks = [
121 | { id: 1, file: "example.xcstrings" },
122 | { id: 2, file: "messages.json" },
123 | { id: 3, file: "strings.xml" },
124 | ];
125 |
126 | await Promise.all(
127 | tasks.map(async (task) => {
128 | const limiter = getFileIoLimiter(task.file);
129 | await limiter(async () => {
130 | const start = Date.now();
131 | await new Promise((resolve) => setTimeout(resolve, 50));
132 | const end = Date.now();
133 | operations.push({ id: task.id, file: task.file, start, end });
134 | });
135 | }),
136 | );
137 |
138 | // Verify that at least some operations overlapped (ran concurrently)
139 | operations.sort((a, b) => a.start - b.start);
140 | let hasOverlap = false;
141 | for (let i = 0; i < operations.length - 1; i++) {
142 | const current = operations[i];
143 | const next = operations[i + 1];
144 | // If next starts before current ends, they overlapped
145 | if (next.start < current.end) {
146 | hasOverlap = true;
147 | break;
148 | }
149 | }
150 | expect(hasOverlap).toBe(true);
151 | });
152 | });
153 |
154 | describe("Race condition prevention", () => {
155 | it("should prevent concurrent read/write race conditions", async () => {
156 | const perFileIoLimiters = new Map();
157 | const getFileIoLimiter = (bucketPathPattern: string) => {
158 | const lockKey = bucketPathPattern;
159 | if (!perFileIoLimiters.has(lockKey)) {
160 | perFileIoLimiters.set(lockKey, pLimit(1));
161 | }
162 | return perFileIoLimiters.get(lockKey)!;
163 | };
164 |
165 | // Simulate a shared file state
166 | let fileContent: Record<string, string> = {};
167 | const operations: string[] = [];
168 |
169 | // Multiple tasks reading and writing to the same file
170 | const tasks = Array.from({ length: 5 }, (_, i) => ({
171 | id: i + 1,
172 | file: "example.xcstrings",
173 | }));
174 |
175 | await Promise.all(
176 | tasks.map(async (task) => {
177 | const limiter = getFileIoLimiter(task.file);
178 | await limiter(async () => {
179 | // Read
180 | operations.push(
181 | `Task ${task.id}: Read ${JSON.stringify(fileContent)}`,
182 | );
183 | const currentContent = { ...fileContent };
184 |
185 | // Simulate processing
186 | await new Promise((resolve) => setTimeout(resolve, 10));
187 |
188 | // Write
189 | currentContent[`key${task.id}`] = `value${task.id}`;
190 | fileContent = currentContent;
191 | operations.push(
192 | `Task ${task.id}: Write ${JSON.stringify(fileContent)}`,
193 | );
194 | });
195 | }),
196 | );
197 |
198 | // Verify all keys were written (no lost updates)
199 | expect(Object.keys(fileContent).length).toBe(5); // 5 new keys
200 | expect(fileContent).toHaveProperty("key1");
201 | expect(fileContent).toHaveProperty("key2");
202 | expect(fileContent).toHaveProperty("key3");
203 | expect(fileContent).toHaveProperty("key4");
204 | expect(fileContent).toHaveProperty("key5");
205 | });
206 | });
207 |
208 | describe("Hints handling", () => {
209 | it("should not block hints reading unnecessarily", async () => {
210 | const perFileIoLimiters = new Map();
211 | const getFileIoLimiter = (bucketPathPattern: string) => {
212 | const lockKey = bucketPathPattern;
213 | if (!perFileIoLimiters.has(lockKey)) {
214 | perFileIoLimiters.set(lockKey, pLimit(1));
215 | }
216 | return perFileIoLimiters.get(lockKey)!;
217 | };
218 |
219 | const fileIoLimiter = getFileIoLimiter("example.xcstrings");
220 |
221 | // Simulate the actual execution order
222 | const sourceData = await fileIoLimiter(async () => {
223 | // Simulate file read
224 | await new Promise((resolve) => setTimeout(resolve, 10));
225 | return { key1: "value1" };
226 | });
227 |
228 | const hints = await fileIoLimiter(async () => {
229 | // Hints don't read file, just process in-memory data
230 | return { key1: { hint: "hint1" } };
231 | });
232 |
233 | const targetData = await fileIoLimiter(async () => {
234 | // Simulate file read
235 | await new Promise((resolve) => setTimeout(resolve, 10));
236 | return { key1: "translated1" };
237 | });
238 |
239 | // All should complete successfully
240 | expect(sourceData).toEqual({ key1: "value1" });
241 | expect(hints).toEqual({ key1: { hint: "hint1" } });
242 | expect(targetData).toEqual({ key1: "translated1" });
243 | });
244 | });
245 |
246 | describe("Edge cases", () => {
247 | it("should handle empty pattern gracefully", () => {
248 | const perFileIoLimiters = new Map();
249 | const getFileIoLimiter = (bucketPathPattern: string) => {
250 | const lockKey = bucketPathPattern;
251 | if (!perFileIoLimiters.has(lockKey)) {
252 | perFileIoLimiters.set(lockKey, pLimit(1));
253 | }
254 | return perFileIoLimiters.get(lockKey)!;
255 | };
256 |
257 | const limiter1 = getFileIoLimiter("");
258 | const limiter2 = getFileIoLimiter("");
259 |
260 | expect(limiter1).toBe(limiter2);
261 | });
262 |
263 | it("should handle patterns with special characters", () => {
264 | const perFileIoLimiters = new Map();
265 | const getFileIoLimiter = (bucketPathPattern: string) => {
266 | const lockKey = bucketPathPattern;
267 | if (!perFileIoLimiters.has(lockKey)) {
268 | perFileIoLimiters.set(lockKey, pLimit(1));
269 | }
270 | return perFileIoLimiters.get(lockKey)!;
271 | };
272 |
273 | const patterns = [
274 | "file with spaces.json",
275 | "path/with/nested/dirs.json",
276 | "файл-с-unicode.json",
277 | "file-with-[brackets].json",
278 | "file.with.dots.in.name.json",
279 | ];
280 |
281 | patterns.forEach((pattern) => {
282 | expect(() => getFileIoLimiter(pattern)).not.toThrow();
283 | });
284 | });
285 |
286 | it("should maintain separate limiters across many files", () => {
287 | const perFileIoLimiters = new Map();
288 | const getFileIoLimiter = (bucketPathPattern: string) => {
289 | const lockKey = bucketPathPattern;
290 | if (!perFileIoLimiters.has(lockKey)) {
291 | perFileIoLimiters.set(lockKey, pLimit(1));
292 | }
293 | return perFileIoLimiters.get(lockKey)!;
294 | };
295 |
296 | // Create limiters for 100 different files
297 | const limiters = Array.from({ length: 100 }, (_, i) =>
298 | getFileIoLimiter(`file${i}.json`),
299 | );
300 |
301 | // All should be unique
302 | const uniqueLimiters = new Set(limiters);
303 | expect(uniqueLimiters.size).toBe(100);
304 |
305 | // Map should contain 100 entries
306 | expect(perFileIoLimiters.size).toBe(100);
307 | });
308 | });
309 | });
310 |
```
--------------------------------------------------------------------------------
/scripts/docs/src/json-schema/parser.ts:
--------------------------------------------------------------------------------
```typescript
1 | import type {
2 | JSONSchemaObject,
3 | PropertyInfo,
4 | SchemaParsingOptions,
5 | } from "./types";
6 |
7 | export function resolveRef(ref: string, root: unknown): unknown {
8 | if (!ref.startsWith("#/")) return undefined;
9 | const pathSegments = ref
10 | .slice(2) // remove "#/"
11 | .split("/")
12 | .map((seg) => decodeURIComponent(seg));
13 |
14 | let current = root;
15 | for (const segment of pathSegments) {
16 | if (current && typeof current === "object" && segment in current) {
17 | current = (current as Record<string, unknown>)[segment];
18 | } else {
19 | return undefined;
20 | }
21 | }
22 | return current;
23 | }
24 |
25 | export function sortPropertyKeys(
26 | keys: string[],
27 | requiredKeys: string[] = [],
28 | customOrder: string[] = [],
29 | ): string[] {
30 | const keySet = new Set(keys);
31 | const requiredSet = new Set(requiredKeys);
32 |
33 | // Start with custom ordered keys that exist in the properties
34 | const orderedKeys: string[] = [];
35 | for (const key of customOrder) {
36 | if (keySet.has(key)) {
37 | orderedKeys.push(key);
38 | keySet.delete(key);
39 | }
40 | }
41 |
42 | // Handle remaining keys - separate into required and optional
43 | const remainingKeys = Array.from(keySet);
44 | const remainingRequired: string[] = [];
45 | const remainingOptional: string[] = [];
46 |
47 | for (const key of remainingKeys) {
48 | if (requiredSet.has(key)) {
49 | remainingRequired.push(key);
50 | } else {
51 | remainingOptional.push(key);
52 | }
53 | }
54 |
55 | // Sort alphabetically within each group
56 | remainingRequired.sort((a, b) => a.localeCompare(b));
57 | remainingOptional.sort((a, b) => a.localeCompare(b));
58 |
59 | return [...orderedKeys, ...remainingRequired, ...remainingOptional];
60 | }
61 |
62 | export function inferType(schema: unknown, root: unknown): string {
63 | if (!schema || typeof schema !== "object") return "unknown";
64 |
65 | const schemaObj = schema as JSONSchemaObject;
66 |
67 | // Handle $ref at the root level
68 | if (schemaObj.$ref) {
69 | return inferTypeFromRef(schemaObj.$ref, root);
70 | }
71 |
72 | // Handle type property
73 | if (schemaObj.type) {
74 | return inferTypeFromType(schemaObj, root);
75 | }
76 |
77 | // Handle union types (anyOf) at the top level
78 | if (Array.isArray(schemaObj.anyOf)) {
79 | return inferTypeFromAnyOf(schemaObj.anyOf, root);
80 | }
81 |
82 | return "unknown";
83 | }
84 |
85 | function inferTypeFromRef(ref: string, root: unknown): string {
86 | const resolved = resolveRef(ref, root);
87 | if (resolved) {
88 | return inferType(resolved, root);
89 | }
90 | return String(ref).split("/").pop() || "unknown";
91 | }
92 |
93 | function inferTypeFromType(schemaObj: JSONSchemaObject, root: unknown): string {
94 | // Handle array of types
95 | if (Array.isArray(schemaObj.type)) {
96 | return schemaObj.type.join(" | ");
97 | }
98 |
99 | if (schemaObj.type === "array") {
100 | return inferTypeFromArray(schemaObj, root);
101 | }
102 |
103 | return String(schemaObj.type);
104 | }
105 |
106 | function inferTypeFromArray(
107 | schemaObj: JSONSchemaObject,
108 | root: unknown,
109 | ): string {
110 | const items = schemaObj.items;
111 | if (!items || typeof items !== "object") {
112 | return "array";
113 | }
114 |
115 | const itemsObj = items as JSONSchemaObject;
116 |
117 | // Array with $ref items
118 | if (itemsObj.$ref) {
119 | return `array of ${inferTypeFromRef(itemsObj.$ref, root)}`;
120 | }
121 |
122 | // Array with anyOf union types
123 | if (Array.isArray(itemsObj.anyOf)) {
124 | const types = itemsObj.anyOf.map((item) => inferType(item, root));
125 | return `array of ${types.join(" | ")}`;
126 | }
127 |
128 | // Array with direct type(s)
129 | if (itemsObj.type) {
130 | if (Array.isArray(itemsObj.type)) {
131 | return `array of ${itemsObj.type.join(" | ")}`;
132 | }
133 | return `array of ${itemsObj.type}`;
134 | }
135 |
136 | // Array of object or unknown
137 | return `array of ${inferType(items, root)}`;
138 | }
139 |
140 | function inferTypeFromAnyOf(anyOfArr: unknown[], root: unknown): string {
141 | const types = anyOfArr.map((item) => inferType(item, root));
142 | return types.join(" | ");
143 | }
144 |
145 | function extractAllowedValues(schema: JSONSchemaObject): unknown[] | undefined {
146 | if (!Array.isArray(schema.enum)) return undefined;
147 | return Array.from(new Set(schema.enum)).sort((a, b) =>
148 | String(a).localeCompare(String(b)),
149 | );
150 | }
151 |
152 | function extractAllowedKeys(schema: JSONSchemaObject): string[] | undefined {
153 | if (
154 | !schema.propertyNames ||
155 | typeof schema.propertyNames !== "object" ||
156 | !Array.isArray(schema.propertyNames.enum)
157 | ) {
158 | return undefined;
159 | }
160 | const allowedKeys = schema.propertyNames.enum as string[];
161 | if (allowedKeys.length === 0) return undefined;
162 | return Array.from(new Set(allowedKeys)).sort((a, b) => a.localeCompare(b));
163 | }
164 |
165 | export function parseProperty(
166 | name: string,
167 | schema: unknown,
168 | required: boolean,
169 | options: SchemaParsingOptions = {},
170 | ): PropertyInfo[] {
171 | if (!schema || typeof schema !== "object") return [];
172 |
173 | const { parentPath = "", rootSchema = schema } = options;
174 | const schemaObj = schema as JSONSchemaObject;
175 | const fullPath = parentPath ? `${parentPath}.${name}` : name;
176 |
177 | const description = schemaObj.markdownDescription ?? schemaObj.description;
178 |
179 | const property: PropertyInfo = {
180 | name,
181 | fullPath,
182 | type: inferType(schema, rootSchema),
183 | required,
184 | description,
185 | defaultValue: schemaObj.default,
186 | allowedValues: extractAllowedValues(schemaObj),
187 | allowedKeys: extractAllowedKeys(schemaObj),
188 | };
189 |
190 | const result: PropertyInfo[] = [property];
191 |
192 | // Add children for nested properties
193 | const children = parseNestedProperties(schema, fullPath, rootSchema);
194 | if (children.length > 0) {
195 | property.children = children;
196 | }
197 |
198 | return result;
199 | }
200 |
201 | function parseNestedProperties(
202 | schema: unknown,
203 | fullPath: string,
204 | rootSchema: unknown,
205 | ): PropertyInfo[] {
206 | if (!schema || typeof schema !== "object") return [];
207 |
208 | const schemaObj = schema as JSONSchemaObject;
209 | const children: PropertyInfo[] = [];
210 |
211 | // Recurse into nested properties for objects
212 | if (schemaObj.type === "object") {
213 | if (schemaObj.properties && typeof schemaObj.properties === "object") {
214 | const properties = schemaObj.properties;
215 | const nestedRequired = Array.isArray(schemaObj.required)
216 | ? schemaObj.required
217 | : [];
218 | const sortedKeys = sortPropertyKeys(
219 | Object.keys(properties),
220 | nestedRequired,
221 | );
222 | for (const key of sortedKeys) {
223 | children.push(
224 | ...parseProperty(key, properties[key], nestedRequired.includes(key), {
225 | parentPath: fullPath,
226 | rootSchema,
227 | }),
228 | );
229 | }
230 | }
231 |
232 | // Handle schemas that use `additionalProperties`
233 | if (
234 | schemaObj.additionalProperties &&
235 | typeof schemaObj.additionalProperties === "object"
236 | ) {
237 | children.push(
238 | ...parseProperty("*", schemaObj.additionalProperties, false, {
239 | parentPath: fullPath,
240 | rootSchema,
241 | }),
242 | );
243 | }
244 | }
245 |
246 | // Recurse into items for arrays of objects
247 | if (schemaObj.type === "array" && schemaObj.items) {
248 | const items = schemaObj.items as JSONSchemaObject;
249 | const itemSchema = items.$ref
250 | ? resolveRef(items.$ref, rootSchema) || items
251 | : items;
252 |
253 | // Handle union types in array items (anyOf)
254 | if (Array.isArray(items.anyOf)) {
255 | items.anyOf.forEach((unionItem) => {
256 | let resolvedItem = unionItem;
257 | if (unionItem && typeof unionItem === "object") {
258 | const unionItemObj = unionItem as JSONSchemaObject;
259 | if (unionItemObj.$ref) {
260 | resolvedItem =
261 | resolveRef(unionItemObj.$ref, rootSchema) || unionItem;
262 | }
263 | }
264 |
265 | if (
266 | resolvedItem &&
267 | typeof resolvedItem === "object" &&
268 | ((resolvedItem as JSONSchemaObject).type === "object" ||
269 | (resolvedItem as JSONSchemaObject).properties)
270 | ) {
271 | const resolvedItemObj = resolvedItem as JSONSchemaObject;
272 | const nestedRequired = Array.isArray(resolvedItemObj.required)
273 | ? resolvedItemObj.required
274 | : [];
275 | const properties = resolvedItemObj.properties || {};
276 | const sortedKeys = sortPropertyKeys(
277 | Object.keys(properties),
278 | nestedRequired,
279 | );
280 | for (const key of sortedKeys) {
281 | children.push(
282 | ...parseProperty(
283 | key,
284 | properties[key],
285 | nestedRequired.includes(key),
286 | {
287 | parentPath: `${fullPath}.*`,
288 | rootSchema,
289 | },
290 | ),
291 | );
292 | }
293 | }
294 | });
295 | } else if (
296 | itemSchema &&
297 | typeof itemSchema === "object" &&
298 | ((itemSchema as JSONSchemaObject).type === "object" ||
299 | (itemSchema as JSONSchemaObject).properties)
300 | ) {
301 | // Handle regular object items (non-union)
302 | const itemSchemaObj = itemSchema as JSONSchemaObject;
303 | const nestedRequired = Array.isArray(itemSchemaObj.required)
304 | ? itemSchemaObj.required
305 | : [];
306 | const properties = itemSchemaObj.properties || {};
307 | const sortedKeys = sortPropertyKeys(
308 | Object.keys(properties),
309 | nestedRequired,
310 | );
311 | for (const key of sortedKeys) {
312 | children.push(
313 | ...parseProperty(key, properties[key], nestedRequired.includes(key), {
314 | parentPath: `${fullPath}.*`,
315 | rootSchema,
316 | }),
317 | );
318 | }
319 |
320 | // Handle additionalProperties inside array items if present
321 | if (
322 | itemSchemaObj.additionalProperties &&
323 | typeof itemSchemaObj.additionalProperties === "object"
324 | ) {
325 | children.push(
326 | ...parseProperty("*", itemSchemaObj.additionalProperties, false, {
327 | parentPath: `${fullPath}.*`,
328 | rootSchema,
329 | }),
330 | );
331 | }
332 | }
333 | }
334 |
335 | return children;
336 | }
337 |
338 | export function parseSchema(
339 | schema: unknown,
340 | options: SchemaParsingOptions = {},
341 | ): PropertyInfo[] {
342 | if (!schema || typeof schema !== "object") {
343 | return [];
344 | }
345 |
346 | const schemaObj = schema as JSONSchemaObject;
347 | const { customOrder = [] } = options;
348 | const rootRef = schemaObj.$ref as string | undefined;
349 | const rootName: string = rootRef
350 | ? (rootRef.split("/").pop() ?? "I18nConfig")
351 | : "I18nConfig";
352 |
353 | let rootSchema: unknown;
354 | if (
355 | rootRef &&
356 | schemaObj.definitions &&
357 | typeof schemaObj.definitions === "object"
358 | ) {
359 | const definitions = schemaObj.definitions as Record<string, unknown>;
360 | rootSchema = definitions[rootName];
361 | } else {
362 | rootSchema = schema;
363 | }
364 |
365 | if (!rootSchema || typeof rootSchema !== "object") {
366 | console.log(`Could not find root schema: ${rootName}`);
367 | return [];
368 | }
369 |
370 | const rootSchemaObj = rootSchema as JSONSchemaObject;
371 | const required = Array.isArray(rootSchemaObj.required)
372 | ? rootSchemaObj.required
373 | : [];
374 |
375 | if (
376 | !rootSchemaObj.properties ||
377 | typeof rootSchemaObj.properties !== "object"
378 | ) {
379 | return [];
380 | }
381 |
382 | const properties = rootSchemaObj.properties;
383 | const sortedKeys = sortPropertyKeys(
384 | Object.keys(properties),
385 | required,
386 | customOrder,
387 | );
388 | const result: PropertyInfo[] = [];
389 |
390 | for (const key of sortedKeys) {
391 | result.push(
392 | ...parseProperty(key, properties[key], required.includes(key), {
393 | rootSchema: schema,
394 | }),
395 | );
396 | }
397 |
398 | return result;
399 | }
400 |
```