From 23fe0f0c36975c623586beec81364fb26e85b973 Mon Sep 17 00:00:00 2001 From: jorenbroekema Date: Thu, 8 Aug 2024 09:25:01 +0200 Subject: [PATCH] feat: add tokenMap properties and convertTokenData util --- .changeset/ninety-adults-sing.md | 5 + .../__snapshots__/customFormats.test.snap.js | 80 ++++++++---- __perf_tests__/basic.test.js | 3 +- __tests__/StyleDictionary.test.js | 4 +- .../createPropertyFormatter.test.js | 8 +- __tests__/filterTokens.test.js | 19 ++- __tests__/formats/all.test.js | 8 +- __tests__/formats/androidCompose.test.js | 6 +- __tests__/formats/androidResources.test.js | 11 +- __tests__/formats/es6Constants.test.js | 4 +- __tests__/formats/es6Module.test.js | 15 ++- __tests__/formats/es6ModuleMinify.test.js | 4 +- __tests__/formats/javascriptModule.test.js | 4 +- .../formats/javascriptModuleFlat.test.js | 4 +- __tests__/formats/javascriptObject.test.js | 4 +- __tests__/formats/javascriptUmd.test.js | 4 +- __tests__/formats/json.test.js | 15 ++- __tests__/formats/jsonFlat.test.js | 7 +- __tests__/formats/jsonNested.test.js | 4 +- __tests__/formats/lessIcons.test.js | 4 +- __tests__/formats/lessVariables.test.js | 4 +- __tests__/formats/scssIcons.test.js | 4 +- __tests__/formats/scssMaps.test.js | 6 +- __tests__/formats/scssVariables.test.js | 11 +- __tests__/formats/stylusVariable.test.js | 4 +- __tests__/formats/swiftFile.test.js | 10 +- .../formats/typeScriptEs6Declarations.test.js | 6 +- .../typeScriptModuleDeclarations.test.js | 4 +- __tests__/utils/convertTokenData.test.js | 110 ++++++++++++++++ __tests__/utils/flattenTokens.test.js | 64 ++++++++- .../getting-started/using_the_npm_module.md | 2 +- .../docs/reference/Hooks/Formats/index.md | 26 ++-- .../docs/reference/Utils/format-helpers.md | 27 ++-- .../src/content/docs/reference/Utils/index.md | 2 +- .../content/docs/reference/Utils/tokens.md | 105 ++++++++++++++- docs/src/content/docs/reference/api.md | 3 +- lib/StyleDictionary.js | 78 +++++++---- lib/filterTokens.js | 5 + lib/utils/convertToTokenObject.js | 43 ++++++ lib/utils/convertTokenData.js | 123 ++++++++++++++++++ lib/utils/flattenTokens.js | 44 ++++--- lib/utils/index.js | 2 + types/Config.ts | 1 + types/DesignToken.ts | 6 + 44 files changed, 725 insertions(+), 178 deletions(-) create mode 100644 .changeset/ninety-adults-sing.md create mode 100644 __tests__/utils/convertTokenData.test.js create mode 100644 lib/utils/convertToTokenObject.js create mode 100644 lib/utils/convertTokenData.js diff --git a/.changeset/ninety-adults-sing.md b/.changeset/ninety-adults-sing.md new file mode 100644 index 000000000..0077c8990 --- /dev/null +++ b/.changeset/ninety-adults-sing.md @@ -0,0 +1,5 @@ +--- +'style-dictionary': minor +--- + +Add `tokenMap` properties to Dictionary, which is a JavaScript Map structure of the tokens, which makes it easy to iterate as well as access tokens. Also add `convertTokenData` utility that allows to seemlessly convert between Map, Object or Array of tokens, and deprecate the `flattenTokens` utility in favor of that one. diff --git a/__integration__/__snapshots__/customFormats.test.snap.js b/__integration__/__snapshots__/customFormats.test.snap.js index d2ca28e1c..69ec2448c 100644 --- a/__integration__/__snapshots__/customFormats.test.snap.js +++ b/__integration__/__snapshots__/customFormats.test.snap.js @@ -114,7 +114,8 @@ snapshots["integration custom formats inline custom with new args should match s "size", "padding", "small" - ] + ], + "key": "{size.padding.small}" }, { "value": "1rem", @@ -135,7 +136,8 @@ snapshots["integration custom formats inline custom with new args should match s "size", "padding", "medium" - ] + ], + "key": "{size.padding.medium}" }, { "value": "1rem", @@ -156,7 +158,8 @@ snapshots["integration custom formats inline custom with new args should match s "size", "padding", "large" - ] + ], + "key": "{size.padding.large}" }, { "value": "1rem", @@ -177,9 +180,11 @@ snapshots["integration custom formats inline custom with new args should match s "size", "padding", "xl" - ] + ], + "key": "{size.padding.xl}" } ], + "tokenMap": {}, "unfilteredTokens": { "size": { "padding": { @@ -290,7 +295,8 @@ snapshots["integration custom formats inline custom with new args should match s "size", "padding", "small" - ] + ], + "key": "{size.padding.small}" }, { "value": "1rem", @@ -311,7 +317,8 @@ snapshots["integration custom formats inline custom with new args should match s "size", "padding", "medium" - ] + ], + "key": "{size.padding.medium}" }, { "value": "1rem", @@ -332,7 +339,8 @@ snapshots["integration custom formats inline custom with new args should match s "size", "padding", "large" - ] + ], + "key": "{size.padding.large}" }, { "value": "1rem", @@ -353,9 +361,11 @@ snapshots["integration custom formats inline custom with new args should match s "size", "padding", "xl" - ] + ], + "key": "{size.padding.xl}" } - ] + ], + "unfilteredTokenMap": {} }, "allTokens": [ { @@ -377,7 +387,8 @@ snapshots["integration custom formats inline custom with new args should match s "size", "padding", "small" - ] + ], + "key": "{size.padding.small}" }, { "value": "1rem", @@ -398,7 +409,8 @@ snapshots["integration custom formats inline custom with new args should match s "size", "padding", "medium" - ] + ], + "key": "{size.padding.medium}" }, { "value": "1rem", @@ -419,7 +431,8 @@ snapshots["integration custom formats inline custom with new args should match s "size", "padding", "large" - ] + ], + "key": "{size.padding.large}" }, { "value": "1rem", @@ -440,7 +453,8 @@ snapshots["integration custom formats inline custom with new args should match s "size", "padding", "xl" - ] + ], + "key": "{size.padding.xl}" } ], "tokens": { @@ -1075,7 +1089,8 @@ snapshots["integration custom formats register custom format with new args shoul "size", "padding", "small" - ] + ], + "key": "{size.padding.small}" }, { "value": "1rem", @@ -1096,7 +1111,8 @@ snapshots["integration custom formats register custom format with new args shoul "size", "padding", "medium" - ] + ], + "key": "{size.padding.medium}" }, { "value": "1rem", @@ -1117,7 +1133,8 @@ snapshots["integration custom formats register custom format with new args shoul "size", "padding", "large" - ] + ], + "key": "{size.padding.large}" }, { "value": "1rem", @@ -1138,9 +1155,11 @@ snapshots["integration custom formats register custom format with new args shoul "size", "padding", "xl" - ] + ], + "key": "{size.padding.xl}" } ], + "tokenMap": {}, "unfilteredTokens": { "size": { "padding": { @@ -1251,7 +1270,8 @@ snapshots["integration custom formats register custom format with new args shoul "size", "padding", "small" - ] + ], + "key": "{size.padding.small}" }, { "value": "1rem", @@ -1272,7 +1292,8 @@ snapshots["integration custom formats register custom format with new args shoul "size", "padding", "medium" - ] + ], + "key": "{size.padding.medium}" }, { "value": "1rem", @@ -1293,7 +1314,8 @@ snapshots["integration custom formats register custom format with new args shoul "size", "padding", "large" - ] + ], + "key": "{size.padding.large}" }, { "value": "1rem", @@ -1314,9 +1336,11 @@ snapshots["integration custom formats register custom format with new args shoul "size", "padding", "xl" - ] + ], + "key": "{size.padding.xl}" } - ] + ], + "unfilteredTokenMap": {} }, "allTokens": [ { @@ -1338,7 +1362,8 @@ snapshots["integration custom formats register custom format with new args shoul "size", "padding", "small" - ] + ], + "key": "{size.padding.small}" }, { "value": "1rem", @@ -1359,7 +1384,8 @@ snapshots["integration custom formats register custom format with new args shoul "size", "padding", "medium" - ] + ], + "key": "{size.padding.medium}" }, { "value": "1rem", @@ -1380,7 +1406,8 @@ snapshots["integration custom formats register custom format with new args shoul "size", "padding", "large" - ] + ], + "key": "{size.padding.large}" }, { "value": "1rem", @@ -1401,7 +1428,8 @@ snapshots["integration custom formats register custom format with new args shoul "size", "padding", "xl" - ] + ], + "key": "{size.padding.xl}" } ], "tokens": { diff --git a/__perf_tests__/basic.test.js b/__perf_tests__/basic.test.js index c4055512d..644c753df 100644 --- a/__perf_tests__/basic.test.js +++ b/__perf_tests__/basic.test.js @@ -108,7 +108,8 @@ describe('cliBuildWithJsConfig', () => { expect(end - start).to.be.below(70); }); - // TODO: aim for <1000ms (maybe a bit more margin due to CI CPUs being slower) + // TODO: this should be way lower in the future when transform/resolve are using tokenMap + // and refs are getting cached it('should run tons of refs within 2750ms', async () => { // 9000 tokens, 6000 refs // (first layer is raw values, other 2 layers are refs to previous layer) diff --git a/__tests__/StyleDictionary.test.js b/__tests__/StyleDictionary.test.js index 220177b4b..f5c6033a7 100644 --- a/__tests__/StyleDictionary.test.js +++ b/__tests__/StyleDictionary.test.js @@ -17,7 +17,7 @@ import chalk from 'chalk'; import { fileToJSON, clearOutput, fileExists, clearSDMeta } from './__helpers.js'; import { resolve } from '../lib/resolve.js'; import GroupMessages from '../lib/utils/groupMessages.js'; -import flattenTokens from '../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../lib/utils/convertTokenData.js'; import { stripMeta } from '../lib/utils/stripMeta.js'; import formats from '../lib/common/formats.js'; import { restore, stubMethod } from 'hanbi'; @@ -1145,7 +1145,7 @@ ${dictionary.allTokens.map((tok) => ` ${tok.name}: "${tok.value}";`).join('\n') }, { tokens: tokens, - allTokens: flattenTokens(tokens), + allTokens: convertTokenData(tokens, { output: 'array' }), }, ); await expect(output).to.matchSnapshot(); diff --git a/__tests__/common/formatHelpers/createPropertyFormatter.test.js b/__tests__/common/formatHelpers/createPropertyFormatter.test.js index 7086504d8..057bf443b 100644 --- a/__tests__/common/formatHelpers/createPropertyFormatter.test.js +++ b/__tests__/common/formatHelpers/createPropertyFormatter.test.js @@ -12,7 +12,7 @@ */ import { expect } from 'chai'; import createPropertyFormatter from '../../../lib/common/formatHelpers/createPropertyFormatter.js'; -import flattenTokens from '../../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../../lib/utils/convertTokenData.js'; import { outputReferencesFilter } from '../../../lib/utils/references/outputReferencesFilter.js'; const dictionary = { @@ -257,7 +257,7 @@ describe('common', () => { }, }; const tokens = { ...unfilteredTokens }; - const allTokens = flattenTokens(tokens); + const allTokens = convertTokenData(tokens, { output: 'array' }); const propFormatter = createPropertyFormatter({ dictionary: { tokens, @@ -317,7 +317,7 @@ describe('common', () => { }; const tokens = { ...unfilteredTokens }; delete tokens.foo; - const allTokens = flattenTokens(tokens); + const allTokens = convertTokenData(tokens, { output: 'array' }); const propFormatter = createPropertyFormatter({ dictionary: { tokens, @@ -378,7 +378,7 @@ describe('common', () => { }; const tokens = { ...unfilteredTokens }; delete tokens.foo; - const allTokens = flattenTokens(tokens, true); + const allTokens = convertTokenData(tokens, { output: 'array', usesDtcg: true }); const propFormatter = createPropertyFormatter({ dictionary: { tokens, diff --git a/__tests__/filterTokens.test.js b/__tests__/filterTokens.test.js index c643b7f8e..bd1836a35 100644 --- a/__tests__/filterTokens.test.js +++ b/__tests__/filterTokens.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import filterTokens from '../lib/filterTokens.js'; import { clearOutput } from './__helpers.js'; -import flattenTokens from '../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../lib/utils/convertTokenData.js'; const colorRed = { value: '#FF0000', @@ -123,7 +123,7 @@ const random_meta_tokens = { const random_meta_dictionary = { tokens: random_meta_tokens, - allTokens: flattenTokens(random_meta_tokens), + allTokens: convertTokenData(random_meta_tokens, { output: 'array' }), }; const falsy_values = { @@ -133,12 +133,12 @@ const falsy_values = { const dictionary = { tokens, - allTokens: flattenTokens(tokens), + allTokens: convertTokenData(tokens, { output: 'array' }), }; const falsy_dictionary = { tokens: falsy_values, - allTokens: flattenTokens(falsy_values), + allTokens: convertTokenData(falsy_values, { output: 'array' }), }; describe('filterTokens', () => { @@ -161,7 +161,10 @@ describe('filterTokens', () => { expect(token).to.not.equal(colorRed); expect(token).not.to.not.equal(colorBlue); }); - expect(filteredDictionary.allTokens).to.eql([sizeSmall, sizeLarge]); + expect(filteredDictionary.allTokens).to.eql([ + { ...sizeSmall, key: '{size.small}' }, + { ...sizeLarge, key: '{size.large}' }, + ]); expect(filteredDictionary.tokens).to.have.property('size'); expect(filteredDictionary.tokens).to.not.have.property('color'); }); @@ -173,7 +176,7 @@ describe('filterTokens', () => { filteredDictionary.allTokens.forEach((token) => { expect(token).to.not.equal(not_kept); }); - expect(filteredDictionary.allTokens).to.eql([kept]); + expect(filteredDictionary.allTokens).to.eql([{ ...kept, key: '{kept}' }]); expect(filteredDictionary.tokens).to.have.property('kept'); expect(filteredDictionary.tokens).to.not.have.property('not_kept'); }); @@ -184,7 +187,9 @@ describe('filterTokens', () => { }; const filteredDictionary = await filterTokens(random_meta_dictionary, filter); - expect(filteredDictionary.allTokens).to.eql([random_meta_tokens.foo.bar]); + expect(filteredDictionary.allTokens).to.eql([ + { ...random_meta_tokens.foo.bar, key: '{foo.bar}' }, + ]); expect(filteredDictionary.tokens).to.have.nested.property('foo.bar'); expect(filteredDictionary.tokens).to.not.have.property('qux'); }); diff --git a/__tests__/formats/all.test.js b/__tests__/formats/all.test.js index 983b8aac5..de2233866 100644 --- a/__tests__/formats/all.test.js +++ b/__tests__/formats/all.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; import { deepmerge } from '../../lib/utils/deepmerge.js'; const file = { @@ -45,7 +45,7 @@ describe('formats', async () => { it('should match ' + key + ' snapshot', async () => { const output = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), @@ -65,7 +65,7 @@ describe('formats', async () => { }); const output = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file: _file, platform: {}, }), @@ -78,7 +78,7 @@ describe('formats', async () => { it('should return ' + key + ' as a string', async () => { const output = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/androidCompose.test.js b/__tests__/formats/androidCompose.test.js index 64197edb5..173a579eb 100644 --- a/__tests__/formats/androidCompose.test.js +++ b/__tests__/formats/androidCompose.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const tokens = { size: { @@ -75,7 +75,7 @@ describe('formats', () => { it('should match default snapshot', async () => { const f = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), @@ -101,7 +101,7 @@ describe('formats', () => { }; const f = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/androidResources.test.js b/__tests__/formats/androidResources.test.js index fdb01385f..a85564f27 100644 --- a/__tests__/formats/androidResources.test.js +++ b/__tests__/formats/androidResources.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const tokens = { size: { @@ -100,7 +100,7 @@ describe('formats', () => { it('should match default snapshot', async () => { const f = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), @@ -114,7 +114,7 @@ describe('formats', () => { const file = { options: { resourceType: 'dimen' } }; const f = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), @@ -136,7 +136,10 @@ describe('formats', () => { }; const f = await format( createFormatArgs({ - dictionary: { tokens: customTokens, allTokens: flattenTokens(customTokens) }, + dictionary: { + tokens: customTokens, + allTokens: convertTokenData(customTokens, { output: 'array' }), + }, file, platform: {}, }), diff --git a/__tests__/formats/es6Constants.test.js b/__tests__/formats/es6Constants.test.js index 0a2c50aeb..b2440228f 100644 --- a/__tests__/formats/es6Constants.test.js +++ b/__tests__/formats/es6Constants.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: '__output/', @@ -44,7 +44,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/es6Module.test.js b/__tests__/formats/es6Module.test.js index b6738174a..4296b91fa 100644 --- a/__tests__/formats/es6Module.test.js +++ b/__tests__/formats/es6Module.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: '__output/', @@ -89,7 +89,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), @@ -103,7 +103,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, options: { @@ -120,7 +120,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, options: { @@ -139,7 +139,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, options: { @@ -158,7 +158,10 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens: DTCGTokens, allTokens: flattenTokens(DTCGTokens) }, + dictionary: { + tokens: DTCGTokens, + allTokens: convertTokenData(tokens, { output: 'array', usesDtcg: true }), + }, file, platform: {}, options: { diff --git a/__tests__/formats/es6ModuleMinify.test.js b/__tests__/formats/es6ModuleMinify.test.js index d9ffe0497..6fc78aa3c 100644 --- a/__tests__/formats/es6ModuleMinify.test.js +++ b/__tests__/formats/es6ModuleMinify.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: '__output/', @@ -40,7 +40,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/javascriptModule.test.js b/__tests__/formats/javascriptModule.test.js index 8d5037fff..ef205c51d 100644 --- a/__tests__/formats/javascriptModule.test.js +++ b/__tests__/formats/javascriptModule.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: '__output/', @@ -37,7 +37,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/javascriptModuleFlat.test.js b/__tests__/formats/javascriptModuleFlat.test.js index 77eb342fb..80a0d01aa 100644 --- a/__tests__/formats/javascriptModuleFlat.test.js +++ b/__tests__/formats/javascriptModuleFlat.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: '__output/', @@ -40,7 +40,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/javascriptObject.test.js b/__tests__/formats/javascriptObject.test.js index e46ee869d..8a0aa249f 100644 --- a/__tests__/formats/javascriptObject.test.js +++ b/__tests__/formats/javascriptObject.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: '__output/', @@ -35,7 +35,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/javascriptUmd.test.js b/__tests__/formats/javascriptUmd.test.js index 74d4a2fbb..f46d1096e 100644 --- a/__tests__/formats/javascriptUmd.test.js +++ b/__tests__/formats/javascriptUmd.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: '__output/', @@ -37,7 +37,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/json.test.js b/__tests__/formats/json.test.js index 87bfd94a3..9fb918fd5 100644 --- a/__tests__/formats/json.test.js +++ b/__tests__/formats/json.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: '__output/', @@ -86,7 +86,7 @@ describe('formats', () => { await expect( format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), @@ -100,7 +100,7 @@ describe('formats', () => { await expect( format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, options: { @@ -117,7 +117,7 @@ describe('formats', () => { await expect( format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, options: { @@ -136,7 +136,7 @@ describe('formats', () => { await expect( format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, options: { @@ -155,7 +155,10 @@ describe('formats', () => { await expect( format( createFormatArgs({ - dictionary: { tokens: DTCGTokens, allTokens: flattenTokens(DTCGTokens) }, + dictionary: { + tokens: DTCGTokens, + allTokens: convertTokenData(DTCGTokens, { output: 'array' }), + }, file, platform: {}, options: { diff --git a/__tests__/formats/jsonFlat.test.js b/__tests__/formats/jsonFlat.test.js index fa2333da2..86ed035b8 100644 --- a/__tests__/formats/jsonFlat.test.js +++ b/__tests__/formats/jsonFlat.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const colorTokenName = 'color-base-red-400'; const colorTokenValue = '#EF5350'; @@ -48,7 +48,10 @@ describe('formats', () => { await expect( format( createFormatArgs({ - dictionary: { tokens: colorTokens, allTokens: flattenTokens(colorTokens) }, + dictionary: { + tokens: colorTokens, + allTokens: convertTokenData(colorTokens, { output: 'array' }), + }, file, platform: {}, }), diff --git a/__tests__/formats/jsonNested.test.js b/__tests__/formats/jsonNested.test.js index 2030c06f4..3b0fcb576 100644 --- a/__tests__/formats/jsonNested.test.js +++ b/__tests__/formats/jsonNested.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: 'output/', @@ -43,7 +43,7 @@ describe('formats', function () { await expect( format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/lessIcons.test.js b/__tests__/formats/lessIcons.test.js index a691247a8..707df30ed 100644 --- a/__tests__/formats/lessIcons.test.js +++ b/__tests__/formats/lessIcons.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; import { isNode } from '../../lib/utils/isNode.js'; const file = { @@ -61,7 +61,7 @@ describe('formats', () => { it('should have a valid less syntax and match snapshot', async () => { const result = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform, }), diff --git a/__tests__/formats/lessVariables.test.js b/__tests__/formats/lessVariables.test.js index ffa9454f0..048f4e931 100644 --- a/__tests__/formats/lessVariables.test.js +++ b/__tests__/formats/lessVariables.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; import { isNode } from '../../lib/utils/isNode.js'; const file = { @@ -49,7 +49,7 @@ describe('formats', () => { it('should have a valid less syntax and match snapshot', async () => { const result = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/scssIcons.test.js b/__tests__/formats/scssIcons.test.js index c60e90ab6..2f4dc20c5 100644 --- a/__tests__/formats/scssIcons.test.js +++ b/__tests__/formats/scssIcons.test.js @@ -14,7 +14,7 @@ import { expect } from 'chai'; import { compileString } from 'sass'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: '__output/', @@ -61,7 +61,7 @@ describe('formats', () => { it('should have a valid scss syntax and match snapshot', async () => { const result = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform, }), diff --git a/__tests__/formats/scssMaps.test.js b/__tests__/formats/scssMaps.test.js index 833f6f772..2f8c90136 100644 --- a/__tests__/formats/scssMaps.test.js +++ b/__tests__/formats/scssMaps.test.js @@ -14,7 +14,7 @@ import { expect } from 'chai'; import { compileString } from 'sass'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const tokens = { size: { @@ -86,7 +86,7 @@ describe('formats', () => { const format = formats[key].bind(file); const output = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), @@ -122,7 +122,7 @@ describe('formats', () => { }; const result = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/scssVariables.test.js b/__tests__/formats/scssVariables.test.js index 5dbedf8d5..34217904c 100644 --- a/__tests__/formats/scssVariables.test.js +++ b/__tests__/formats/scssVariables.test.js @@ -14,7 +14,7 @@ import { expect } from 'chai'; import { compileString } from 'sass'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: '__output/', @@ -49,7 +49,7 @@ describe('formats', () => { it('should have a valid scss syntax and match snapshot', async () => { const result = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), @@ -62,10 +62,13 @@ describe('formats', () => { }); it('should optionally use !default', async () => { - const themeableDictionary = { tokens, allTokens: flattenTokens(tokens) }; + const themeableDictionary = { + tokens, + allTokens: convertTokenData(tokens, { output: 'array' }), + }; const formattedScss = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/stylusVariable.test.js b/__tests__/formats/stylusVariable.test.js index 6e868aa07..61072c981 100644 --- a/__tests__/formats/stylusVariable.test.js +++ b/__tests__/formats/stylusVariable.test.js @@ -14,7 +14,7 @@ import { expect } from 'chai'; // import stylus from 'stylus'; see comment in test below import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: '__output/', @@ -49,7 +49,7 @@ describe('formats', () => { it('should have a valid stylus syntax and match snapshot', async () => { const result = format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/swiftFile.test.js b/__tests__/formats/swiftFile.test.js index 2fb4899c4..71ef8bfa5 100644 --- a/__tests__/formats/swiftFile.test.js +++ b/__tests__/formats/swiftFile.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const originalFile = { destination: '__output/', @@ -54,7 +54,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), @@ -69,7 +69,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), @@ -84,7 +84,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), @@ -99,7 +99,7 @@ describe('formats', () => { await expect( await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/formats/typeScriptEs6Declarations.test.js b/__tests__/formats/typeScriptEs6Declarations.test.js index 9d990b0c0..5e6590cc9 100644 --- a/__tests__/formats/typeScriptEs6Declarations.test.js +++ b/__tests__/formats/typeScriptEs6Declarations.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: '__output/', @@ -43,7 +43,7 @@ describe('formats', () => { it('should be a valid TS file', async () => { const output = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), @@ -68,7 +68,7 @@ describe('formats', () => { const output = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file: customFile, platform: {}, }), diff --git a/__tests__/formats/typeScriptModuleDeclarations.test.js b/__tests__/formats/typeScriptModuleDeclarations.test.js index 3072a63ee..c0a4a5dc4 100644 --- a/__tests__/formats/typeScriptModuleDeclarations.test.js +++ b/__tests__/formats/typeScriptModuleDeclarations.test.js @@ -13,7 +13,7 @@ import { expect } from 'chai'; import formats from '../../lib/common/formats.js'; import createFormatArgs from '../../lib/utils/createFormatArgs.js'; -import flattenTokens from '../../lib/utils/flattenTokens.js'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; const file = { destination: '__output/', @@ -34,7 +34,7 @@ describe('formats', () => { it('should be a valid TS file', async () => { const output = await format( createFormatArgs({ - dictionary: { tokens, allTokens: flattenTokens(tokens) }, + dictionary: { tokens, allTokens: convertTokenData(tokens, { output: 'array' }) }, file, platform: {}, }), diff --git a/__tests__/utils/convertTokenData.test.js b/__tests__/utils/convertTokenData.test.js new file mode 100644 index 000000000..29162e2fa --- /dev/null +++ b/__tests__/utils/convertTokenData.test.js @@ -0,0 +1,110 @@ +/* + * Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with + * the License. A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR + * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions + * and limitations under the License. + */ +import { expect } from 'chai'; +import { convertTokenData } from '../../lib/utils/convertTokenData.js'; +import { convertToDTCG } from '../../lib/utils/convertToDTCG.js'; + +const data = { + normal: { + object: { + colors: { + red: { + 500: { + value: '#f00', + foo: 'bar', + // key is usually only added by the convertTokenData + // and not part of the input, but for the sake of testing, + // we put it here initialiy + key: '{colors.red.500}', + }, + }, + green: { + 500: { + value: '#0f0', + foo: 'baz', + key: '{colors.green.500}', + }, + }, + blue: { + 500: { + value: '#00f', + foo: 'qux', + key: '{colors.blue.500}', + }, + }, + }, + }, + map: new Map([ + ['{colors.red.500}', { value: '#f00', foo: 'bar', key: '{colors.red.500}' }], + ['{colors.green.500}', { value: '#0f0', foo: 'baz', key: '{colors.green.500}' }], + ['{colors.blue.500}', { value: '#00f', foo: 'qux', key: '{colors.blue.500}' }], + ]), + array: [ + { key: '{colors.red.500}', value: '#f00', foo: 'bar' }, + { key: '{colors.green.500}', value: '#0f0', foo: 'baz' }, + { key: '{colors.blue.500}', value: '#00f', foo: 'qux' }, + ], + }, +}; + +// Bit ugly but converting the above data to DTCG style +const copyNormal = structuredClone(data.normal); +data.DTCG = {}; + +// object +data.DTCG.object = convertToDTCG(copyNormal.object, { applyTypesToGroup: false }); + +// map +data.DTCG.map = new Map(); +Array.from(copyNormal.map.entries()).forEach(([key, val]) => { + data.DTCG.map.set(key, { foo: val.foo, $value: val.value, key: val.key }); +}); + +// array +data.DTCG.array = copyNormal.array.map((item) => ({ + key: item.key, + foo: item.foo, + $value: item.value, +})); + +describe('utils', () => { + describe('convertTokenData', () => { + // Use the test below here if you need to debug a specific conversion, which is a bit harder to do + // in the dynamically generated tests below it. + // it.only(`should convert object to map for DTCG tokens`, () => { + // expect(convertTokenData(data['DTCG']['object'], { output: 'map', usesDtcg: true })).to.eql( + // data['DTCG']['map'], + // ); + // }); + + // The below produces: (mode * outputs * inputs) matrix -> (2 * 3 * 3) = 18 tests + // For mode "normal" and mode "DTCG" tokens + Object.keys(data).forEach((mode) => { + describe(`${mode} tests`, () => { + // For outputs array map and object + Object.keys(data[mode]).forEach((output) => { + describe(`convert to ${output}`, () => { + // Verify each input (array map and object) produces the correct result + Object.keys(data[mode]).forEach((input) => { + it(`should convert ${input} to ${output}`, () => { + expect( + convertTokenData(data[mode][input], { output, usesDtcg: mode === 'DTCG' }), + ).to.eql(data[mode][output]); + }); + }); + }); + }); + }); + }); + }); +}); diff --git a/__tests__/utils/flattenTokens.test.js b/__tests__/utils/flattenTokens.test.js index b236d8f37..d4305dd86 100644 --- a/__tests__/utils/flattenTokens.test.js +++ b/__tests__/utils/flattenTokens.test.js @@ -40,7 +40,10 @@ describe('utils', () => { }, }; - const expected_ret = [tokens.black, tokens.white]; + const expected_ret = [ + { key: '{black}', ...tokens.black }, + { key: '{white}', ...tokens.white }, + ]; const sortedExpectedRet = expected_ret.sort(sortBy('value')); const ret = flattenTokens(tokens); @@ -60,7 +63,10 @@ describe('utils', () => { }, }; - const expected_ret = [tokens.color.black, tokens.color.white]; + const expected_ret = [ + { key: '{color.black}', ...tokens.color.black }, + { key: '{color.white}', ...tokens.color.white }, + ]; const sortedExpectedRet = expected_ret.sort(sortBy('value')); const ret = flattenTokens(tokens); @@ -85,11 +91,63 @@ describe('utils', () => { }; const ret = flattenTokens(tokens, { usesDtcg: true }); - const expected_ret = [tokens.color.black, tokens.color.white]; + const expected_ret = [ + { key: '{color.black}', ...tokens.color.black }, + { key: '{color.white}', ...tokens.color.white }, + ]; + const sortedExpectedRet = expected_ret.sort(sortBy('value')); const sortedRet = ret.sort(sortBy('value')); expect(sortedRet).to.eql(sortedExpectedRet); }); + + it('should pass a key prop to flattened tokens matching the ancestor tree', () => { + const tokens = { + dimension: { + scale: { + value: '2', + type: 'sizing', + }, + sm: { + value: '{dimension.xs} * {dimension.scale}', + type: 'sizing', + }, + foo: { + bar: { + baz: { + value: '2', + type: 'sizing', + }, + }, + qux: { value: '2', type: 'sizing' }, + }, + lg: { + value: '{dimension.md} * {dimension.scale}', + type: 'sizing', + }, + }, + spacing: { + sm: { + value: '{dimension.sm}', + type: 'spacing', + }, + lg: { + value: '{dimension.lg}', + type: 'spacing', + }, + }, + }; + const ret = flattenTokens(tokens); + expect(ret.map((r) => r.key)).to.eql([ + '{dimension.scale}', + '{dimension.sm}', + '{dimension.foo.bar.baz}', + '{dimension.foo.qux}', + '{dimension.lg}', + '{spacing.sm}', + '{spacing.lg}', + ]); + }); }); }); diff --git a/docs/src/content/docs/getting-started/using_the_npm_module.md b/docs/src/content/docs/getting-started/using_the_npm_module.md index 986e9af1e..0cf8ec964 100644 --- a/docs/src/content/docs/getting-started/using_the_npm_module.md +++ b/docs/src/content/docs/getting-started/using_the_npm_module.md @@ -106,7 +106,7 @@ The [multi-brand-multi-platform example](https://github.com/amzn/style-dictionar There is also a utils entrypoint on the NPM module that contains helper utils. ```js title="import-utils.js" -import { flattenTokens } from 'style-dictionary/utils'; +import { convertTokenData } from 'style-dictionary/utils'; ``` For more details, [read the utils docs](/reference/utils) diff --git a/docs/src/content/docs/reference/Hooks/Formats/index.md b/docs/src/content/docs/reference/Hooks/Formats/index.md index a31e98f8d..b0a38617f 100644 --- a/docs/src/content/docs/reference/Hooks/Formats/index.md +++ b/docs/src/content/docs/reference/Hooks/Formats/index.md @@ -156,7 +156,7 @@ export default { options: { // Look here 👇 outputReferences: (token, { dictionary, usesDtcg }) => { - // `dictionary` contains `allTokens`, `tokens` and `unfilteredTokens` props + // `dictionary` contains `allTokens`, `tokens`, `tokenMap`, `unfilteredTokens`, `unfilteredAllTokens` and `unfilteredTokenMap` props // `usesDtcg` tells you whether the Design Token Community Group spec is used with $ prefixes ($value, $type etc.) // return true or false }, @@ -272,17 +272,19 @@ You might be wondering why the return type of a format function is `unknown`. [More information about this here](#custom-return-types) ::: -| Param | Type | Description | -| ------------------------------------- | -------------------- | ----------------------------------------------------------------------------------------------------- | -| `args` | `Object` | A single argument to support named parameters and destructuring. | -| `args.dictionary` | `Dictionary` | Transformed Dictionary object containing allTokens, tokens and unfilteredTokens. | -| `args.dictionary.allTokens` | `TransformedToken[]` | Flattened array of all tokens, easiest to loop over and export to a flat format. | -| `args.dictionary.tokens` | `TransformedTokens` | All tokens, still in unflattened object format. | -| `args.dictionary.unfilteredAllTokens` | `TransformedToken[]` | Flattened array of all tokens, including tokens that were filtered out by filters. | -| `args.dictionary.unfilteredTokens` | `TransformedTokens` | All tokens, still in unflattened object format, including tokens that were filtered out by filters. | -| `args.platform` | `Platform` | [Platform config](/reference/config#platform) | -| `args.file` | `File` | [File config](/reference/config#file) | -| `args.options` | `Object` | Merged object with SD [Config](/reference/config#properties) & [FormatOptions](#format-configuration) | +| Param | Type | Description | +| ------------------------------------- | ---------------------------------- | --------------------------------------------------------------------------------------------------------------------- | +| `args` | `Object` | A single argument to support named parameters and destructuring. | +| `args.dictionary` | `Dictionary` | Transformed Dictionary object containing allTokens, tokens and unfilteredTokens. | +| `args.dictionary.allTokens` | `TransformedToken[]` | Flattened array of all tokens, easiest to loop over and export to a flat format. | +| `args.dictionary.tokens` | `TransformedTokens` | All tokens, still in unflattened object format. | +| `args.dictionary.tokenMap` | `Record` | All tokens as a JavaScript Map that's keyed, making it easy to access a single token as well as iterate through them. | +| `args.dictionary.unfilteredAllTokens` | `TransformedToken[]` | Flattened array of all tokens, including tokens that were filtered out by filters. | +| `args.dictionary.unfilteredTokens` | `TransformedTokens` | All tokens, still in unflattened object format, including tokens that were filtered out by filters. | +| `args.dictionary.unfilteredTokenMap` | `TransformedTokens` | All tokens as a JavaScript Map, including tokens that were filtered out by filters. | +| `args.platform` | `Platform` | [Platform config](/reference/config#platform) | +| `args.file` | `File` | [File config](/reference/config#file) | +| `args.options` | `Object` | Merged object with SD [Config](/reference/config#properties) & [FormatOptions](#format-configuration) | Example: diff --git a/docs/src/content/docs/reference/Utils/format-helpers.md b/docs/src/content/docs/reference/Utils/format-helpers.md index c403dde6f..330eee8e0 100644 --- a/docs/src/content/docs/reference/Utils/format-helpers.md +++ b/docs/src/content/docs/reference/Utils/format-helpers.md @@ -34,18 +34,21 @@ to use as the function on `dictionary.allTokens.map`. The formatting is configurable either by supplying a `format` option or a `formatting` object which uses: prefix, indentation, separator, suffix, and commentStyle. -| Param | Type | Description | -| ------------------------------------- | ------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `options` | `Object` | A single argument to support named parameters and destructuring. | -| `options.outputReferences` | `boolean \| OutputReferencesFunction` | Whether or not to output references. You will want to pass this from the `options` object sent to the format function. Also allows passing a function to conditionally output references on a per token basis. | -| `options.outputReferenceFallbacks` | `boolean` | Whether or not to output css variable fallback values when using output references. You will want to pass this from the `options` object sent to the format function. | -| `options.dictionary` | `Dictionary` | Transformed Dictionary object containing allTokens, tokens and unfilteredTokens. | -| `options.dictionary.allTokens` | `TransformedToken[]` | Flattened array of all tokens, easiest to loop over and export to a flat format. | -| `options.dictionary.tokens` | `TransformedTokens` | All tokens, still in unflattened object format. | -| `options.dictionary.unfilteredTokens` | `TransformedTokens` | All tokens, still in unflattened object format, including tokens that were filtered out by filters. | -| `options.format` | `string` | Available formats are: 'css', 'sass', 'less', and 'stylus'. If you want to customize the format and can't use one of those predefined formats, use the `formatting` option | -| `options.formatting` | `FormattingOptions` | Custom formatting properties that define parts of a declaration line in code. The configurable strings are: `prefix`, `indentation`, `separator`, `suffix`, `lineSeparator`, `fileHeaderTimestamp`, `header`, `footer`, `commentStyle` and `commentPosition`. Those are used to generate a line like this: `${indentation}${prefix}${token.name}${separator} ${prop.value}${suffix}`. The remaining formatting options are used for the fileHeader helper. | -| `options.themeable` | `boolean` | Whether tokens should default to being themeable. Defaults to false. | +| Param | Type | Description | +| ---------------------------------------- | ------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `options` | `Object` | A single argument to support named parameters and destructuring. | +| `options.outputReferences` | `boolean \| OutputReferencesFunction` | Whether or not to output references. You will want to pass this from the `options` object sent to the format function. Also allows passing a function to conditionally output references on a per token basis. | +| `options.outputReferenceFallbacks` | `boolean` | Whether or not to output css variable fallback values when using output references. You will want to pass this from the `options` object sent to the format function. | +| `options.dictionary` | `Dictionary` | Transformed Dictionary object containing allTokens, tokens and unfilteredTokens. | +| `options.dictionary.tokens` | `TransformedTokens` | All tokens, still in unflattened object format. | +| `options.dictionary.allTokens` | `TransformedToken[]` | Flattened array of all tokens, easiest to loop over and export to a flat format. | +| `options.dictionary.tokenMap` | `Record` | All tokens as JavaScript. Map | +| `options.dictionary.unfilteredTokens` | `TransformedTokens` | All tokens, still in unflattened object format, including tokens that were filtered out by filters. | +| `options.dictionary.unfilteredAllTokens` | `TransformedToken[]` | Flattened array of all tokens, easiest to loop over and export to a flat, including tokens that were filtered out by filters. | +| `options.dictionary.unfilteredTokenMap` | `Record` | All tokens as JavaScript Map, still in unflattened object format, including tokens that were filtered out by filters. | +| `options.format` | `string` | Available formats are: 'css', 'sass', 'less', and 'stylus'. If you want to customize the format and can't use one of those predefined formats, use the `formatting` option | +| `options.formatting` | `FormattingOptions` | Custom formatting properties that define parts of a declaration line in code. The configurable strings are: `prefix`, `indentation`, `separator`, `suffix`, `lineSeparator`, `fileHeaderTimestamp`, `header`, `footer`, `commentStyle` and `commentPosition`. Those are used to generate a line like this: `${indentation}${prefix}${token.name}${separator} ${prop.value}${suffix}`. The remaining formatting options are used for the fileHeader helper. | +| `options.themeable` | `boolean` | Whether tokens should default to being themeable. Defaults to false. | Example: diff --git a/docs/src/content/docs/reference/Utils/index.md b/docs/src/content/docs/reference/Utils/index.md index 8213d3365..24590425d 100644 --- a/docs/src/content/docs/reference/Utils/index.md +++ b/docs/src/content/docs/reference/Utils/index.md @@ -15,7 +15,7 @@ npm install -D style-dictionary ## Usage ```js title="script.js" -import { flattenTokens, resolveReferences, typeDtcgDelegate } from 'style-dictionary/utils'; +import { convertTokenData, resolveReferences, typeDtcgDelegate } from 'style-dictionary/utils'; ``` Check out the next pages for more details about the specific categories of utility functions. diff --git a/docs/src/content/docs/reference/Utils/tokens.md b/docs/src/content/docs/reference/Utils/tokens.md index a56966115..b1c75da18 100644 --- a/docs/src/content/docs/reference/Utils/tokens.md +++ b/docs/src/content/docs/reference/Utils/tokens.md @@ -2,10 +2,14 @@ title: Tokens --- -These utilities have to do with processing/formatting tokens object. +These utilities have to do with processing/formatting tokens. ## flattenTokens +:::caution +Deprecated in favor of [`convertTokenData`](/reference/utils/tokens#convertTokenData), see below +::: + Flatten dictionary tokens object to an array of flattened tokens. :::note @@ -38,13 +42,14 @@ const sd = new StyleDictionary({ }, }, }); +await sd.hasInitialized; -const flat = flattenTokens(sd); +const flat = flattenTokens(sd.tokens); /** * [ - * { value: '#000', type: 'color', name: 'colors-black' }, - * { value: '2px', type: 'dimension', name: 'spacing-2' }, - * { value: 'solid {spacing.2} {colors.black}', name: 'border' } + * { key: '{colors.black}', value: '#000', type: 'color', name: 'colors-black' }, + * { key: '{spacing.2}', value: '2px', type: 'dimension', name: 'spacing-2' }, + * { key: '{border}', value: 'solid {spacing.2} {colors.black}', name: 'border' } * ] */ ``` @@ -53,6 +58,96 @@ const flat = flattenTokens(sd); You can pass a second argument `usesDtcg`, if set to true, the flattenTokens utility will assume DTCG syntax (`$value` props). ::: +The key is added so that it is easy to transform the flattened array back to a nested object if needed later, by using the [convertTokenData](/reference/utils/tokens#convertTokenData) utility. + +## convertTokenData + +Convert tokens from one data structure to another. + +Available data structures: + +- `Array` (available as `allTokens` on `dictionary`) -> easy to iterate e.g. for outputting flat formats +- `Object` (available as `tokens` on `dictionary`) -> similar to JSON input format e.g. DTCG format, useful for outputting nested / deep formats such as JSON +- `Map` (available as `tokenMap` on `dictionary`) -> easy to iterate & access, optimal for token processing and will be used internally in Style Dictionary in the future + +> All 3 structures can be converted to one another + +| Param | Type | Description | +| ------------------ | ------------------------------- | ------------------------------------------------------------ | +| `tokens` | `Tokens \| Token[] \| TokenMap` | The input tokens data as either `Object`, `Array` or `Map`. | +| `options` | `Object` | Options object, with multiple properties. | +| `options.usesDtcg` | `boolean` | Whether the input data uses DTCG syntax, `false` by default. | +| `options.output` | `'object' \| 'array' \| 'map'` | Output data format | + +We are currently considering making the `Map` structure the de-facto standard in a future v5, making the `Object`/`Array` versions available only through this utility. This is to optimize the library's base functionality. + +This utility auto-detects the input data type and allows you to specify the desired output data type. +You can optionally pass `usesDtcg` flag as well if you use DTCG format, this is necessary for converting from Object to `Map`/`Array`, since we need to know whether to use the `$value` or `value` to identify tokens in the `Object`. + +```javascript title="build-tokens.js" +import StyleDictionary from 'style-dictionary'; +import { convertTokenData } from 'style-dictionary/utils'; + +const sd = new StyleDictionary({ + tokens: { + colors: { + black: { + value: '#000', + type: 'color', + name: 'colors-black', + }, + }, + spacing: { + 2: { + value: '2px', + type: 'dimension', + name: 'spacing-2', + }, + }, + border: { + value: 'solid {spacing.2} {colors.black}', + name: 'border', + }, + }, +}); +await sd.hasInitialized; + +const flatArray = convertTokenData(sd.tokens, { output: 'array' }); +/** + * [ + * { key: '{colors.black}', value: '#000', type: 'color', name: 'colors-black' }, + * { key: '{spacing.2}', value: '2px', type: 'dimension', name: 'spacing-2' }, + * { key: '{border}', value: 'solid {spacing.2} {colors.black}', name: 'border' } + * ] + */ + +/** + * Using the flatArray as input here is cheaper than using sd.tokens, since in order for + * it to convert a tokens Object to a Map, it would first flatten it to an Array. + * + * However, you definitely CAN use the sd.tokens as input as well + */ +const map = convertTokenData(flatArray, { output: 'map' }); +/** + * Map(3): { + * '{colors.black}' => { value: '#000', type: 'color', name: 'colors-black' }, + * '{spacing.2}' => { value: '2px', type: 'dimension', name: 'spacing-2' }, + * '{border}' => { value: 'solid {spacing.2} {colors.black}', name: 'border' } + * } + */ +const borderToken = map.get('{border}'); // easy to access a token since it's keyed + +/** + * Same as above, you could use `sd.tokens` or `map` as inputs as well + * `sd.tokens` is cheapest since it's already an object and just returns it, no conversion happens + * `array` is just slightly cheaper than map since map needs to call .values() Iterator to iterate + */ +const object = convertTokenData(flatArray, { output: 'object' }); +/** + * Same as original tokens input, we basically went full circle + */ +``` + ## stripMeta Allows you to strip meta data from design tokens, useful if you want to output clean nested formats. diff --git a/docs/src/content/docs/reference/api.md b/docs/src/content/docs/reference/api.md index a562ba9e4..d232d98ad 100644 --- a/docs/src/content/docs/reference/api.md +++ b/docs/src/content/docs/reference/api.md @@ -365,6 +365,7 @@ type getPlatform = ( dictionary: { tokens: DesignTokens; allTokens: DesignToken[]; + tokenMap: Map; }; }>; ``` @@ -376,7 +377,7 @@ Wrapper around [`exportPlatform`](#exportplatform), returns a bit more data. Returns an object with `platformConfig` and `dictionary` properties: - `platformConfig` a processed version of the user config for the platform -- `dictionary` an object with `tokens` after transformations and reference resolutions, and an `allTokens` property which is a flattened (Array) version of that. +- `dictionary` an object with `tokens` after transformations and reference resolutions, and an `allTokens` property which is a flattened (Array) version of that. There's also a `tokenMap` property which combines best of both worlds, it is a JavaScript Map that's keyed, making it easy to access a single token as well as iterate through them. This is useful if you want to use a Style Dictionary in JS build tools like Webpack. diff --git a/lib/StyleDictionary.js b/lib/StyleDictionary.js index 0a2f68c86..52ed5119e 100644 --- a/lib/StyleDictionary.js +++ b/lib/StyleDictionary.js @@ -21,13 +21,13 @@ import deepExtend from './utils/deepExtend.js'; import resolveObject from './utils/resolveObject.js'; import getName from './utils/references/getName.js'; import GroupMessages, { verbosityInfo } from './utils/groupMessages.js'; -import flattenTokens from './utils/flattenTokens.js'; import { detectDtcgSyntax } from './utils/detectDtcgSyntax.js'; import { preprocess } from './utils/preprocess.js'; import { typeDtcgDelegate } from './utils/typeDtcgDelegate.js'; import createFormatArgs from './utils/createFormatArgs.js'; import { deepmerge } from './utils/deepmerge.js'; import { expandTokens } from './utils/expandObjectTokens.js'; +import { convertTokenData } from './utils/convertTokenData.js'; import { resolve } from './resolve.js'; import { Register } from './Register.js'; @@ -112,8 +112,10 @@ export default class StyleDictionary extends Register { this.options = {}; /** @type {PreprocessedTokens} */ this.tokens = {}; - /** @type {PreprocessedTokens[]} */ + /** @type {Token[]} */ this.allTokens = []; + /** @type {Map} */ + this.tokenMap = new Map(); /** @type {boolean | undefined} */ this.usesDtcg = undefined; /** @type {LogConfig} */ @@ -302,7 +304,8 @@ export default class StyleDictionary extends Register { const result = await combineJSON( this.source, true, - /** @param {Token} prop */ + // TODO: abstract into its own reusable interface, since it is used in deepExtend util as well + /** @param {{ target: Tokens, copy: Tokens, path: string[], key: string }} prop */ function Collision(prop) { GroupMessages.add( PROPERTY_VALUE_COLLISIONS, @@ -361,7 +364,9 @@ export default class StyleDictionary extends Register { preprocessedTokens = expandTokens(preprocessedTokens, this.options); } this.tokens = preprocessedTokens; - this.allTokens = flattenTokens(/** @type {PreprocessedTokens} */ (this.tokens), this.usesDtcg); + this.allTokens = convertTokenData(this.tokens, { output: 'array', usesDtcg: this.usesDtcg }); + this.tokenMap = convertTokenData(this.allTokens, { output: 'map', usesDtcg: this.usesDtcg }); + this.hasInitializedResolve(null); // For chaining @@ -436,21 +441,34 @@ export default class StyleDictionary extends Register { await this.hasInitialized; const platformConfig = this.getPlatformConfig(platform); - let platformProcessedTokens = /** @type {PreprocessedTokens} */ (this.tokens); - - platformProcessedTokens = await preprocess( - platformProcessedTokens, - platformConfig.preprocessors, - this.hooks.preprocessors, - platformConfig, - ); + /** @type {PreprocessedTokens | TransformedTokens} */ + let tokens = this.tokens; + /** @type {Map | Map} */ + let tokenMap = this.tokenMap; + /** @type {Token[] | TransformedTokens[]} */ + let allTokens = /** */ this.allTokens; + + // let tokensChanged = false; + if (platformConfig.preprocessors && platformConfig.preprocessors.length > 0) { + tokens = await preprocess( + tokens, + platformConfig.preprocessors, + this.hooks.preprocessors, + platformConfig, + ); + // tokensChanged = true; + } if (this.shouldRunExpansion(platformConfig.expand)) { - platformProcessedTokens = expandTokens(platformProcessedTokens, this.options, platformConfig); + tokens = expandTokens(tokens, this.options, platformConfig); + // tokensChanged = true; } - let exportableResult = /** @type {PreprocessedTokens|TransformedTokens} */ ( - platformProcessedTokens - ); + // TODO: re-enable below when Map is source of truth for transforming/resolving + + // // re-convert to Map when preprocessors/expand have ran on the tokens object + // if (tokensChanged) { + // tokenMap = convertTokenData(tokens, { output: 'map', usesDtcg: this.usesDtcg }); + // } /** * @type {string[]} @@ -510,8 +528,9 @@ export default class StyleDictionary extends Register { // values like "1px solid {color.border.base}" we want to // transform the original value (color.border.base) before // replacing that value in the string. - const transformed = await transformObject( - exportableResult, + // TODO: use tokenMap instead and refactor to flat transform util, this will be much cheaper/faster + tokens = await transformObject( + tokens, platformConfig, this.options, transformationContext, @@ -524,7 +543,8 @@ export default class StyleDictionary extends Register { const ignorePathsToResolve = deferredPropValueTransforms.map((p) => getName([p, this.usesDtcg ? '$value' : 'value']), ); - exportableResult = resolveObject(transformed, { + // TODO: use tokenMap instead and refactor to flat resolve util, this will be much cheaper/faster + tokens = resolveObject(tokens, { ignorePaths: ignorePathsToResolve, usesDtcg: this.usesDtcg, }); @@ -540,7 +560,8 @@ export default class StyleDictionary extends Register { // if we didn't resolve any deferred references then we have a circular reference // the resolveObject method will find the circular references // we do this in case there are multiple circular references - resolveObject(transformed, { + // TODO: use tokenMap instead and refactor to flat resolve util, this will be much cheaper/faster + resolveObject(tokens, { usesDtcg: this.usesDtcg, }); finished = true; @@ -586,7 +607,17 @@ export default class StyleDictionary extends Register { console.log(chalk.rgb(255, 140, 0).bold(err)); } } - return { tokens: exportableResult, allTokens: flattenTokens(exportableResult, this.usesDtcg) }; + + // TODO: When the transform / resolve have applied to the tokenMap instead of tokens, use the map and convert back to array / tokens obj + allTokens = convertTokenData(tokens, { output: 'array', usesDtcg: this.usesDtcg }); + tokenMap = convertTokenData(allTokens, { output: 'map', usesDtcg: this.usesDtcg }); + // TODO: re-enable below when the Map is the source of truth while transforming/resolving + // tokens = convertTokenData(allTokens, { output: 'object', usesDtcg: this.usesDtcg }); + return { + tokens, + allTokens: /** @type {TransformedToken[]} */ (allTokens), + tokenMap: /** @type {Map} */ (tokenMap), + }; } /** @@ -652,12 +683,13 @@ export default class StyleDictionary extends Register { const filteredTokens = await filterTokens(dictionary, filter, this.options); const filteredDictionary = { - ...dictionary, tokens: filteredTokens.tokens, allTokens: filteredTokens.allTokens, + tokenMap: filteredTokens.tokenMap, // keep the unfiltered tokens object for reference resolution unfilteredTokens: dictionary.tokens, - unfilteredAllTokens: flattenTokens(dictionary.tokens, this.usesDtcg), + unfilteredAllTokens: dictionary.allTokens, + unfilteredTokenMap: dictionary.tokenMap, }; // if tokens object is empty, return without creating a file diff --git a/lib/filterTokens.js b/lib/filterTokens.js index 738254656..d91d77736 100644 --- a/lib/filterTokens.js +++ b/lib/filterTokens.js @@ -11,6 +11,7 @@ * and limitations under the License. */ import isPlainObject from 'is-plain-obj'; +import { convertTokenData } from './utils/convertTokenData.js'; /** * @typedef {import('../types/DesignToken.d.ts').Dictionary} Dictionary @@ -96,10 +97,14 @@ export default async function filterTokens(dictionary, filter, options = {}) { } else { const allTokens = await asyncFilter(dictionary.allTokens ?? [], filter, options); const tokens = await filterTokenObject(dictionary.tokens, filter, options); + const tokenMap = /** @type {Map} */ ( + convertTokenData(allTokens, { output: 'map' }) + ); return { allTokens, tokens, + tokenMap, }; } } diff --git a/lib/utils/convertToTokenObject.js b/lib/utils/convertToTokenObject.js new file mode 100644 index 000000000..9c6f6b494 --- /dev/null +++ b/lib/utils/convertToTokenObject.js @@ -0,0 +1,43 @@ +/** + * @typedef {import('../../types/DesignToken.ts').DesignToken} Token + * @typedef {import('../../types/DesignToken.ts').TransformedToken} TransformedToken + * @typedef {import('../../types/DesignToken.ts').DesignTokens} Tokens + * @typedef {import('../../types/DesignToken.ts').TransformedTokens} TransformedTokens + */ + +/** + * Map(0) => { key: 'colors.red.500', value: '#ff0000' } + * + * to + * + * { + * colors: { + * red: { + * 500: { + * value: '#ff0000' + * } + * } + * } + * } + * + * @param {Map} tokenMap + * @returns {TransformedTokens} + */ +export function convertToTokenObject(tokenMap) { + const obj = /** @type {TransformedTokens} */ ({}); + Array.from(tokenMap).forEach(([key, token]) => { + const keyArr = key.replace('{', '').replace('}', '').split('.'); + let slice = obj; + keyArr.forEach((k, i, arr) => { + if (slice[k] === undefined) { + slice[k] = {}; + } + // end + if (i === arr.length - 1) { + slice[k] = token; + } + slice = slice[k]; + }); + }); + return obj; +} diff --git a/lib/utils/convertTokenData.js b/lib/utils/convertTokenData.js new file mode 100644 index 000000000..138dbfec1 --- /dev/null +++ b/lib/utils/convertTokenData.js @@ -0,0 +1,123 @@ +/* + * Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with + * the License. A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR + * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions + * and limitations under the License. + */ +import flattenTokens from './flattenTokens.js'; + +/** + * @typedef {import('../../types/DesignToken').DesignTokens} DesignTokens + * @typedef {import('../../types/DesignToken').DesignToken} DesignToken + * @typedef {import('../../types/DesignToken').TransformedTokens} TransformedTokens + * @typedef {import('../../types/DesignToken').TransformedToken} TransformedToken + * @typedef {DesignToken|TransformedToken} Token + * @typedef {DesignTokens|TransformedTokens} Tokens + * @typedef {Map} TokenMap + * @typedef {{'map': TokenMap, 'object': Tokens, 'array': Token[]}} TypeMap + */ + +/** + * @template {Token} T + * @param {Array} flattenedTokens + * @returns {Map} + */ +function convertToTokenMap(flattenedTokens) { + return new Map( + flattenedTokens.map((t) => { + return [/** @type {string} */ (t.key), t]; + }), + ); +} + +/** + * @param {Token[]} tokenArray + * @returns + */ +function convertToTokenObject(tokenArray) { + const obj = /** @type {TransformedTokens} */ ({}); + tokenArray.forEach((token) => { + const { key } = token; + const keyArr = /** @type {string} */ (key).replace('{', '').replace('}', '').split('.'); + let slice = obj; + keyArr.forEach((k, i, arr) => { + if (slice[k] === undefined) { + slice[k] = {}; + } + // end + if (i === arr.length - 1) { + slice[k] = token; + } + slice = slice[k]; + }); + }); + return obj; +} + +/** + * Allows converting your tokens to one of the 3 data structures available: + * 'map' -> JavaScript Map + * 'object' -> JavaScript Object + * 'array' -> JavaScript Array + * + * The input format is automatically detected + * + * The return type depends on the "output" prop input, hence the use of a generic / type mapping + * @template {keyof TypeMap} T + * @param {Tokens | Token[] | TokenMap} tokens + * @param {{output: T, usesDtcg?: boolean}} options + * @return {TypeMap[T]} + */ +export function convertTokenData(tokens, options) { + const { usesDtcg, output } = options; + + /** @type {keyof TypeMap} */ + const input = Array.isArray(tokens) ? 'array' : tokens instanceof Map ? 'map' : 'object'; + + switch (output) { + case 'array': { + if (input === 'object') { + // adds "key" prop + return /** @type {TypeMap[T]} */ (flattenTokens(/** @type {Tokens} */ (tokens), usesDtcg)); + } else if (input === 'map') { + // we assume map is always keyed with "key" prop + return /** @type {TypeMap[T]} */ ( + Array.from(/** @type {TokenMap} */ (tokens)).map(([key, value]) => ({ + key, + ...value, + })) + ); + } + break; + } + case 'map': { + if (input === 'object') { + // adds "key" prop + const flattened = flattenTokens(/** @type {Tokens} */ (tokens), usesDtcg); + return /** @type {TypeMap[T]} */ (convertToTokenMap(flattened)); + } else if (input === 'array') { + // we assume that if it's an array, this array was created by flattenTokens which adds the "key" props + return /** @type {TypeMap[T]} */ (convertToTokenMap(/** @type {Token[]} */ (tokens))); + } + break; + } + case 'object': { + if (input === 'map') { + // we assume map is always keyed with "key" prop + const arr = Array.from(/** @type {TokenMap} */ (tokens).values()); + return /** @type {TypeMap[T]} */ (convertToTokenObject(arr)); + } else if (input === 'array') { + // we assume that if it's an array, this array was created by flattenTokens which adds the "key" props + return /** @type {TypeMap[T]} */ (convertToTokenObject(/** @type {Token[]} */ (tokens))); + } + break; + } + } + return /** @type {TypeMap[T]} */ (tokens); +} diff --git a/lib/utils/flattenTokens.js b/lib/utils/flattenTokens.js index 83972707d..8681cd78e 100644 --- a/lib/utils/flattenTokens.js +++ b/lib/utils/flattenTokens.js @@ -14,38 +14,50 @@ import isPlainObject from 'is-plain-obj'; /** - * @typedef {import('../../types/DesignToken.d.ts').TransformedTokens} Tokens - * @typedef {import('../../types/DesignToken.d.ts').TransformedToken} Token + * @typedef {import('../../types/DesignToken.ts').DesignToken} Token + * @typedef {import('../../types/DesignToken.ts').DesignTokens} Tokens + * @typedef {import('../../types/DesignToken.ts').TransformedToken} TransformedToken + * @typedef {import('../../types/DesignToken.ts').TransformedTokens} TransformedTokens */ /** * @private - * @param {Tokens} slice - The plain object you want flattened into an array. + * @template {Token | TransformedToken} T + * @template {Tokens | TransformedTokens} U + * @param {U} slice - The plain object you want flattened into an array. * @param {boolean} [usesDtcg] - Whether or not tokens are using DTCG syntax. - * @param {Token[]} [to_ret] - Tokens array. This function is recursive therefore this is what gets passed along. - * @return {Token[]} + * @param {Array} [result] - Tokens array. This function is recursive therefore this is what gets passed along. + * @param {string[]} [keyMemo] - Memo of keys we're traversing in order of depth + * @return {Array} */ -function _flattenTokens(slice, usesDtcg, to_ret = []) { - for (let name in slice) { - if (Object.hasOwn(slice, name)) { - // TODO: this is a bit fragile and arbitrary to stop when we get to a 'value' property. - if (isPlainObject(slice[name]) && Object.hasOwn(slice[name], `${usesDtcg ? '$' : ''}value`)) { - to_ret.push(/** @type {Token} */ (slice[name])); - } else if (isPlainObject(slice[name])) { - _flattenTokens(slice[name], usesDtcg, to_ret); +function _flattenTokens(slice, usesDtcg, result = [], keyMemo = []) { + for (let key in slice) { + if (Object.hasOwn(slice, key)) { + // Stop either when we encounter a "value" prop or if we find that every prop is not an object, meaning we cannot traverse any further + if (isPlainObject(slice[key]) && Object.hasOwn(slice[key], `${usesDtcg ? '$' : ''}value`)) { + result.push({ + .../** @type {T} */ (slice[key]), + // this keeps track of the ancestor keys of the token e.g. 'colors.red.500' + key: `{${[...keyMemo, key].join('.')}}`, + }); + } else if (isPlainObject(slice[key])) { + // pass the current slice key to the end of the memo onto the next recursive call + _flattenTokens(slice[key], usesDtcg, result, keyMemo.concat(key)); } } } - return to_ret; + return result; } /** * Takes an plain javascript object and will make a flat array of all the leaf nodes. * A leaf node in this context has a 'value' property. Potentially refactor this to * be more generic. - * @param {Tokens} tokens - The plain object you want flattened into an array. + * @template {Token | TransformedToken} T + * @template {Tokens | TransformedTokens} U + * @param {U} tokens - The plain object you want flattened into an array. * @param {boolean} [usesDtcg] - Whether or not tokens are using DTCG syntax. - * @return {Token[]} + * @return {Array} */ export default function flattenTokens(tokens, usesDtcg = false) { return _flattenTokens(tokens, usesDtcg); diff --git a/lib/utils/index.js b/lib/utils/index.js index eec211937..3d18ce199 100644 --- a/lib/utils/index.js +++ b/lib/utils/index.js @@ -17,6 +17,7 @@ import { resolveReferences } from './references/resolveReferences.js'; import { outputReferencesFilter } from './references/outputReferencesFilter.js'; import { outputReferencesTransformed } from './references/outputReferencesTransformed.js'; import flattenTokens from './flattenTokens.js'; +import { convertTokenData } from './convertTokenData.js'; import { typeDtcgDelegate } from './typeDtcgDelegate.js'; import { convertToDTCG, convertJSONToDTCG, convertZIPToDTCG } from './convertToDTCG.js'; import { stripMeta } from './stripMeta.js'; @@ -34,5 +35,6 @@ export { convertJSONToDTCG, convertZIPToDTCG, stripMeta, + convertTokenData, }; export * from '../common/formatHelpers/index.js'; diff --git a/types/Config.ts b/types/Config.ts index da2afbfca..aa9557167 100644 --- a/types/Config.ts +++ b/types/Config.ts @@ -93,6 +93,7 @@ export interface PlatformConfig extends RegexOptions { transformGroup?: string; transforms?: string[] | Omit[]; expand?: ExpandConfig; + preprocessors?: string[]; prefix?: string; buildPath?: string; files?: File[]; diff --git a/types/DesignToken.ts b/types/DesignToken.ts index 8bafa069a..30dfe8f93 100644 --- a/types/DesignToken.ts +++ b/types/DesignToken.ts @@ -25,6 +25,10 @@ export interface DesignToken { comment?: string; themeable?: boolean; attributes?: Record; + /** + * When flattening tokens, DesignToken is given a key that matches the original ancestor tree e.g. `{colors.red.500}` + */ + key?: string; [key: string]: any; } @@ -71,6 +75,8 @@ export interface TransformedTokens { export interface Dictionary { tokens: TransformedTokens; allTokens: TransformedToken[]; + tokenMap: Map; unfilteredTokens?: TransformedTokens; unfilteredAllTokens?: TransformedToken[]; + unfilteredTokenMap?: Map; }