Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add tokenMap props and convertTokenData utility #1397

Merged
merged 2 commits into from
Dec 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/ninety-adults-sing.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'style-dictionary': minor
---

Add `tokenMap` properties to Dictionary, which is a JavaScript Map structure of the tokens, which makes it easy to iterate as well as access tokens. Also add `convertTokenData` utility that allows to seemlessly convert between Map, Object or Array of tokens, and deprecate the `flattenTokens` utility in favor of that one.
80 changes: 54 additions & 26 deletions __integration__/__snapshots__/customFormats.test.snap.js
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,8 @@ snapshots["integration custom formats inline custom with new args should match s
"size",
"padding",
"small"
]
],
"key": "{size.padding.small}"
},
{
"value": "1rem",
Expand All @@ -135,7 +136,8 @@ snapshots["integration custom formats inline custom with new args should match s
"size",
"padding",
"medium"
]
],
"key": "{size.padding.medium}"
},
{
"value": "1rem",
Expand All @@ -156,7 +158,8 @@ snapshots["integration custom formats inline custom with new args should match s
"size",
"padding",
"large"
]
],
"key": "{size.padding.large}"
},
{
"value": "1rem",
Expand All @@ -177,9 +180,11 @@ snapshots["integration custom formats inline custom with new args should match s
"size",
"padding",
"xl"
]
],
"key": "{size.padding.xl}"
}
],
"tokenMap": {},
"unfilteredTokens": {
"size": {
"padding": {
Expand Down Expand Up @@ -290,7 +295,8 @@ snapshots["integration custom formats inline custom with new args should match s
"size",
"padding",
"small"
]
],
"key": "{size.padding.small}"
},
{
"value": "1rem",
Expand All @@ -311,7 +317,8 @@ snapshots["integration custom formats inline custom with new args should match s
"size",
"padding",
"medium"
]
],
"key": "{size.padding.medium}"
},
{
"value": "1rem",
Expand All @@ -332,7 +339,8 @@ snapshots["integration custom formats inline custom with new args should match s
"size",
"padding",
"large"
]
],
"key": "{size.padding.large}"
},
{
"value": "1rem",
Expand All @@ -353,9 +361,11 @@ snapshots["integration custom formats inline custom with new args should match s
"size",
"padding",
"xl"
]
],
"key": "{size.padding.xl}"
}
]
],
"unfilteredTokenMap": {}
},
"allTokens": [
{
Expand All @@ -377,7 +387,8 @@ snapshots["integration custom formats inline custom with new args should match s
"size",
"padding",
"small"
]
],
"key": "{size.padding.small}"
},
{
"value": "1rem",
Expand All @@ -398,7 +409,8 @@ snapshots["integration custom formats inline custom with new args should match s
"size",
"padding",
"medium"
]
],
"key": "{size.padding.medium}"
},
{
"value": "1rem",
Expand All @@ -419,7 +431,8 @@ snapshots["integration custom formats inline custom with new args should match s
"size",
"padding",
"large"
]
],
"key": "{size.padding.large}"
},
{
"value": "1rem",
Expand All @@ -440,7 +453,8 @@ snapshots["integration custom formats inline custom with new args should match s
"size",
"padding",
"xl"
]
],
"key": "{size.padding.xl}"
}
],
"tokens": {
Expand Down Expand Up @@ -1075,7 +1089,8 @@ snapshots["integration custom formats register custom format with new args shoul
"size",
"padding",
"small"
]
],
"key": "{size.padding.small}"
},
{
"value": "1rem",
Expand All @@ -1096,7 +1111,8 @@ snapshots["integration custom formats register custom format with new args shoul
"size",
"padding",
"medium"
]
],
"key": "{size.padding.medium}"
},
{
"value": "1rem",
Expand All @@ -1117,7 +1133,8 @@ snapshots["integration custom formats register custom format with new args shoul
"size",
"padding",
"large"
]
],
"key": "{size.padding.large}"
},
{
"value": "1rem",
Expand All @@ -1138,9 +1155,11 @@ snapshots["integration custom formats register custom format with new args shoul
"size",
"padding",
"xl"
]
],
"key": "{size.padding.xl}"
}
],
"tokenMap": {},
"unfilteredTokens": {
"size": {
"padding": {
Expand Down Expand Up @@ -1251,7 +1270,8 @@ snapshots["integration custom formats register custom format with new args shoul
"size",
"padding",
"small"
]
],
"key": "{size.padding.small}"
},
{
"value": "1rem",
Expand All @@ -1272,7 +1292,8 @@ snapshots["integration custom formats register custom format with new args shoul
"size",
"padding",
"medium"
]
],
"key": "{size.padding.medium}"
},
{
"value": "1rem",
Expand All @@ -1293,7 +1314,8 @@ snapshots["integration custom formats register custom format with new args shoul
"size",
"padding",
"large"
]
],
"key": "{size.padding.large}"
},
{
"value": "1rem",
Expand All @@ -1314,9 +1336,11 @@ snapshots["integration custom formats register custom format with new args shoul
"size",
"padding",
"xl"
]
],
"key": "{size.padding.xl}"
}
]
],
"unfilteredTokenMap": {}
},
"allTokens": [
{
Expand All @@ -1338,7 +1362,8 @@ snapshots["integration custom formats register custom format with new args shoul
"size",
"padding",
"small"
]
],
"key": "{size.padding.small}"
},
{
"value": "1rem",
Expand All @@ -1359,7 +1384,8 @@ snapshots["integration custom formats register custom format with new args shoul
"size",
"padding",
"medium"
]
],
"key": "{size.padding.medium}"
},
{
"value": "1rem",
Expand All @@ -1380,7 +1406,8 @@ snapshots["integration custom formats register custom format with new args shoul
"size",
"padding",
"large"
]
],
"key": "{size.padding.large}"
},
{
"value": "1rem",
Expand All @@ -1401,7 +1428,8 @@ snapshots["integration custom formats register custom format with new args shoul
"size",
"padding",
"xl"
]
],
"key": "{size.padding.xl}"
}
],
"tokens": {
Expand Down
3 changes: 2 additions & 1 deletion __perf_tests__/basic.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,8 @@ describe('cliBuildWithJsConfig', () => {
expect(end - start).to.be.below(70);
});

// TODO: aim for <1000ms (maybe a bit more margin due to CI CPUs being slower)
// TODO: this should be way lower in the future when transform/resolve are using tokenMap
// and refs are getting cached
it('should run tons of refs within 2750ms', async () => {
// 9000 tokens, 6000 refs
// (first layer is raw values, other 2 layers are refs to previous layer)
Expand Down
4 changes: 2 additions & 2 deletions __tests__/StyleDictionary.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import chalk from 'chalk';
import { fileToJSON, clearOutput, fileExists, clearSDMeta } from './__helpers.js';
import { resolve } from '../lib/resolve.js';
import GroupMessages from '../lib/utils/groupMessages.js';
import flattenTokens from '../lib/utils/flattenTokens.js';
import { convertTokenData } from '../lib/utils/convertTokenData.js';
import { stripMeta } from '../lib/utils/stripMeta.js';
import formats from '../lib/common/formats.js';
import { restore, stubMethod } from 'hanbi';
Expand Down Expand Up @@ -1145,7 +1145,7 @@ ${dictionary.allTokens.map((tok) => ` ${tok.name}: "${tok.value}";`).join('\n')
},
{
tokens: tokens,
allTokens: flattenTokens(tokens),
allTokens: convertTokenData(tokens, { output: 'array' }),
},
);
await expect(output).to.matchSnapshot();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
*/
import { expect } from 'chai';
import createPropertyFormatter from '../../../lib/common/formatHelpers/createPropertyFormatter.js';
import flattenTokens from '../../../lib/utils/flattenTokens.js';
import { convertTokenData } from '../../../lib/utils/convertTokenData.js';
import { outputReferencesFilter } from '../../../lib/utils/references/outputReferencesFilter.js';

const dictionary = {
Expand Down Expand Up @@ -257,7 +257,7 @@ describe('common', () => {
},
};
const tokens = { ...unfilteredTokens };
const allTokens = flattenTokens(tokens);
const allTokens = convertTokenData(tokens, { output: 'array' });
const propFormatter = createPropertyFormatter({
dictionary: {
tokens,
Expand Down Expand Up @@ -317,7 +317,7 @@ describe('common', () => {
};
const tokens = { ...unfilteredTokens };
delete tokens.foo;
const allTokens = flattenTokens(tokens);
const allTokens = convertTokenData(tokens, { output: 'array' });
const propFormatter = createPropertyFormatter({
dictionary: {
tokens,
Expand Down Expand Up @@ -378,7 +378,7 @@ describe('common', () => {
};
const tokens = { ...unfilteredTokens };
delete tokens.foo;
const allTokens = flattenTokens(tokens, true);
const allTokens = convertTokenData(tokens, { output: 'array', usesDtcg: true });
const propFormatter = createPropertyFormatter({
dictionary: {
tokens,
Expand Down
19 changes: 12 additions & 7 deletions __tests__/filterTokens.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import { expect } from 'chai';
import filterTokens from '../lib/filterTokens.js';
import { clearOutput } from './__helpers.js';
import flattenTokens from '../lib/utils/flattenTokens.js';
import { convertTokenData } from '../lib/utils/convertTokenData.js';

const colorRed = {
value: '#FF0000',
Expand Down Expand Up @@ -123,7 +123,7 @@ const random_meta_tokens = {

const random_meta_dictionary = {
tokens: random_meta_tokens,
allTokens: flattenTokens(random_meta_tokens),
allTokens: convertTokenData(random_meta_tokens, { output: 'array' }),
};

const falsy_values = {
Expand All @@ -133,12 +133,12 @@ const falsy_values = {

const dictionary = {
tokens,
allTokens: flattenTokens(tokens),
allTokens: convertTokenData(tokens, { output: 'array' }),
};

const falsy_dictionary = {
tokens: falsy_values,
allTokens: flattenTokens(falsy_values),
allTokens: convertTokenData(falsy_values, { output: 'array' }),
};

describe('filterTokens', () => {
Expand All @@ -161,7 +161,10 @@ describe('filterTokens', () => {
expect(token).to.not.equal(colorRed);
expect(token).not.to.not.equal(colorBlue);
});
expect(filteredDictionary.allTokens).to.eql([sizeSmall, sizeLarge]);
expect(filteredDictionary.allTokens).to.eql([
{ ...sizeSmall, key: '{size.small}' },
{ ...sizeLarge, key: '{size.large}' },
]);
expect(filteredDictionary.tokens).to.have.property('size');
expect(filteredDictionary.tokens).to.not.have.property('color');
});
Expand All @@ -173,7 +176,7 @@ describe('filterTokens', () => {
filteredDictionary.allTokens.forEach((token) => {
expect(token).to.not.equal(not_kept);
});
expect(filteredDictionary.allTokens).to.eql([kept]);
expect(filteredDictionary.allTokens).to.eql([{ ...kept, key: '{kept}' }]);
expect(filteredDictionary.tokens).to.have.property('kept');
expect(filteredDictionary.tokens).to.not.have.property('not_kept');
});
Expand All @@ -184,7 +187,9 @@ describe('filterTokens', () => {
};

const filteredDictionary = await filterTokens(random_meta_dictionary, filter);
expect(filteredDictionary.allTokens).to.eql([random_meta_tokens.foo.bar]);
expect(filteredDictionary.allTokens).to.eql([
{ ...random_meta_tokens.foo.bar, key: '{foo.bar}' },
]);
expect(filteredDictionary.tokens).to.have.nested.property('foo.bar');
expect(filteredDictionary.tokens).to.not.have.property('qux');
});
Expand Down
Loading