@pandacss/token-dictionary 0.35.0 → 0.36.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +3 -1
- package/dist/index.d.ts +3 -1
- package/dist/index.js +70 -37
- package/dist/index.mjs +71 -37
- package/package.json +4 -4
package/dist/index.d.mts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import * as _pandacss_types from '@pandacss/types';
|
|
2
|
-
import { Tokens, SemanticTokens } from '@pandacss/types';
|
|
2
|
+
import { Tokens, SemanticTokens, ThemeVariantsMap } from '@pandacss/types';
|
|
3
3
|
import { CssVarOptions, CssVar } from '@pandacss/shared';
|
|
4
4
|
|
|
5
5
|
type TokenStatus = 'deprecated' | 'experimental' | 'new';
|
|
@@ -9,6 +9,7 @@ interface ExtensionData {
|
|
|
9
9
|
references?: TokenReferences;
|
|
10
10
|
condition?: string;
|
|
11
11
|
conditions?: TokenConditions;
|
|
12
|
+
theme?: string;
|
|
12
13
|
}
|
|
13
14
|
interface TokenConditions {
|
|
14
15
|
[key: string]: string;
|
|
@@ -97,6 +98,7 @@ interface TokenDictionaryOptions {
|
|
|
97
98
|
tokens?: Tokens;
|
|
98
99
|
semanticTokens?: SemanticTokens;
|
|
99
100
|
breakpoints?: Record<string, string>;
|
|
101
|
+
themes?: ThemeVariantsMap | undefined;
|
|
100
102
|
prefix?: string;
|
|
101
103
|
hash?: boolean;
|
|
102
104
|
}
|
package/dist/index.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import * as _pandacss_types from '@pandacss/types';
|
|
2
|
-
import { Tokens, SemanticTokens } from '@pandacss/types';
|
|
2
|
+
import { Tokens, SemanticTokens, ThemeVariantsMap } from '@pandacss/types';
|
|
3
3
|
import { CssVarOptions, CssVar } from '@pandacss/shared';
|
|
4
4
|
|
|
5
5
|
type TokenStatus = 'deprecated' | 'experimental' | 'new';
|
|
@@ -9,6 +9,7 @@ interface ExtensionData {
|
|
|
9
9
|
references?: TokenReferences;
|
|
10
10
|
condition?: string;
|
|
11
11
|
conditions?: TokenConditions;
|
|
12
|
+
theme?: string;
|
|
12
13
|
}
|
|
13
14
|
interface TokenConditions {
|
|
14
15
|
[key: string]: string;
|
|
@@ -97,6 +98,7 @@ interface TokenDictionaryOptions {
|
|
|
97
98
|
tokens?: Tokens;
|
|
98
99
|
semanticTokens?: SemanticTokens;
|
|
99
100
|
breakpoints?: Record<string, string>;
|
|
101
|
+
themes?: ThemeVariantsMap | undefined;
|
|
100
102
|
prefix?: string;
|
|
101
103
|
hash?: boolean;
|
|
102
104
|
}
|
package/dist/index.js
CHANGED
|
@@ -592,6 +592,8 @@ var transformColorMix = {
|
|
|
592
592
|
return token.extensions.category === "colors" && token.value.includes("/");
|
|
593
593
|
},
|
|
594
594
|
transform(token, dict) {
|
|
595
|
+
if (!token.value.includes("/"))
|
|
596
|
+
return token;
|
|
595
597
|
return expandReferences(token.value, (path) => {
|
|
596
598
|
const tokenFn = (tokenPath) => {
|
|
597
599
|
const token2 = dict.getByName(tokenPath);
|
|
@@ -747,7 +749,7 @@ var TokenDictionary = class {
|
|
|
747
749
|
formatTokenName = (path) => path.join(".");
|
|
748
750
|
formatCssVar = (path, options) => (0, import_shared5.cssVar)(path.join("-"), options);
|
|
749
751
|
registerTokens() {
|
|
750
|
-
const { tokens = {}, semanticTokens = {}, breakpoints } = this.options;
|
|
752
|
+
const { tokens = {}, semanticTokens = {}, breakpoints, themes = {} } = this.options;
|
|
751
753
|
const breakpointTokens = expandBreakpoints(breakpoints);
|
|
752
754
|
const computedTokens = (0, import_shared5.compact)({
|
|
753
755
|
...tokens,
|
|
@@ -757,22 +759,50 @@ var TokenDictionary = class {
|
|
|
757
759
|
...breakpointTokens.sizes
|
|
758
760
|
}
|
|
759
761
|
});
|
|
762
|
+
const processToken = (token, path) => {
|
|
763
|
+
const isDefault = path.includes("DEFAULT");
|
|
764
|
+
path = filterDefault(path);
|
|
765
|
+
assertTokenFormat(token);
|
|
766
|
+
const category = path[0];
|
|
767
|
+
const name = this.formatTokenName(path);
|
|
768
|
+
const node = new Token({ ...token, name, path });
|
|
769
|
+
node.setExtensions({
|
|
770
|
+
category,
|
|
771
|
+
prop: this.formatTokenName(path.slice(1))
|
|
772
|
+
});
|
|
773
|
+
if (isDefault) {
|
|
774
|
+
node.setExtensions({ isDefault });
|
|
775
|
+
}
|
|
776
|
+
return node;
|
|
777
|
+
};
|
|
778
|
+
const processSemantic = (token, path) => {
|
|
779
|
+
const isDefault = path.includes("DEFAULT");
|
|
780
|
+
path = filterDefault(path);
|
|
781
|
+
assertTokenFormat(token);
|
|
782
|
+
const category = path[0];
|
|
783
|
+
const name = this.formatTokenName(path);
|
|
784
|
+
const normalizedToken = (0, import_shared5.isString)(token.value) || isCompositeTokenValue(token.value) ? { value: { base: token.value } } : token;
|
|
785
|
+
const { value, ...restData } = normalizedToken;
|
|
786
|
+
const node = new Token({
|
|
787
|
+
...restData,
|
|
788
|
+
name,
|
|
789
|
+
value: value.base || "",
|
|
790
|
+
path
|
|
791
|
+
});
|
|
792
|
+
node.setExtensions({
|
|
793
|
+
category,
|
|
794
|
+
conditions: value,
|
|
795
|
+
prop: this.formatTokenName(path.slice(1))
|
|
796
|
+
});
|
|
797
|
+
if (isDefault) {
|
|
798
|
+
node.setExtensions({ isDefault });
|
|
799
|
+
}
|
|
800
|
+
return node;
|
|
801
|
+
};
|
|
760
802
|
(0, import_shared5.walkObject)(
|
|
761
803
|
computedTokens,
|
|
762
804
|
(token, path) => {
|
|
763
|
-
const
|
|
764
|
-
path = filterDefault(path);
|
|
765
|
-
assertTokenFormat(token);
|
|
766
|
-
const category = path[0];
|
|
767
|
-
const name = this.formatTokenName(path);
|
|
768
|
-
const node = new Token({ ...token, name, path });
|
|
769
|
-
node.setExtensions({
|
|
770
|
-
category,
|
|
771
|
-
prop: this.formatTokenName(path.slice(1))
|
|
772
|
-
});
|
|
773
|
-
if (isDefault) {
|
|
774
|
-
node.setExtensions({ isDefault });
|
|
775
|
-
}
|
|
805
|
+
const node = processToken(token, path);
|
|
776
806
|
this.registerToken(node);
|
|
777
807
|
},
|
|
778
808
|
{ stop: isToken }
|
|
@@ -780,31 +810,34 @@ var TokenDictionary = class {
|
|
|
780
810
|
(0, import_shared5.walkObject)(
|
|
781
811
|
semanticTokens,
|
|
782
812
|
(token, path) => {
|
|
783
|
-
const
|
|
784
|
-
path = filterDefault(path);
|
|
785
|
-
assertTokenFormat(token);
|
|
786
|
-
const category = path[0];
|
|
787
|
-
const name = this.formatTokenName(path);
|
|
788
|
-
const normalizedToken = (0, import_shared5.isString)(token.value) || isCompositeTokenValue(token.value) ? { value: { base: token.value } } : token;
|
|
789
|
-
const { value, ...restData } = normalizedToken;
|
|
790
|
-
const node = new Token({
|
|
791
|
-
...restData,
|
|
792
|
-
name,
|
|
793
|
-
value: value.base || "",
|
|
794
|
-
path
|
|
795
|
-
});
|
|
796
|
-
node.setExtensions({
|
|
797
|
-
category,
|
|
798
|
-
conditions: value,
|
|
799
|
-
prop: this.formatTokenName(path.slice(1))
|
|
800
|
-
});
|
|
801
|
-
if (isDefault) {
|
|
802
|
-
node.setExtensions({ isDefault });
|
|
803
|
-
}
|
|
813
|
+
const node = processSemantic(token, path);
|
|
804
814
|
this.registerToken(node);
|
|
805
815
|
},
|
|
806
816
|
{ stop: isToken }
|
|
807
817
|
);
|
|
818
|
+
Object.entries(themes).forEach(([theme, themeVariant]) => {
|
|
819
|
+
const condName = "_theme" + (0, import_shared5.capitalize)(theme);
|
|
820
|
+
(0, import_shared5.walkObject)(
|
|
821
|
+
themeVariant.tokens ?? {},
|
|
822
|
+
(token, path) => {
|
|
823
|
+
const themeToken = { value: { [condName]: token.value } };
|
|
824
|
+
const node = processSemantic(themeToken, path);
|
|
825
|
+
node.setExtensions({ theme, isVirtual: true });
|
|
826
|
+
this.registerToken(node);
|
|
827
|
+
},
|
|
828
|
+
{ stop: isToken }
|
|
829
|
+
);
|
|
830
|
+
(0, import_shared5.walkObject)(
|
|
831
|
+
themeVariant.semanticTokens ?? {},
|
|
832
|
+
(token, path) => {
|
|
833
|
+
const themeToken = { value: { [condName]: token.value } };
|
|
834
|
+
const node = processSemantic(themeToken, path);
|
|
835
|
+
node.setExtensions({ theme, isSemantic: true, isVirtual: true });
|
|
836
|
+
this.registerToken(node);
|
|
837
|
+
},
|
|
838
|
+
{ stop: isToken }
|
|
839
|
+
);
|
|
840
|
+
});
|
|
808
841
|
return this;
|
|
809
842
|
}
|
|
810
843
|
registerToken = (token, transformPhase) => {
|
|
@@ -1095,8 +1128,8 @@ var TokenDictionaryView = class {
|
|
|
1095
1128
|
flat.set(token.name, value);
|
|
1096
1129
|
}
|
|
1097
1130
|
processVars(token, group) {
|
|
1098
|
-
const { condition, isNegative, isVirtual, var: varName } = token.extensions;
|
|
1099
|
-
if (isNegative || isVirtual || !condition)
|
|
1131
|
+
const { condition, isNegative, isVirtual, var: varName, theme } = token.extensions;
|
|
1132
|
+
if (isNegative || !theme && isVirtual || !condition)
|
|
1100
1133
|
return;
|
|
1101
1134
|
if (!group.has(condition))
|
|
1102
1135
|
group.set(condition, /* @__PURE__ */ new Map());
|
package/dist/index.mjs
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
// src/dictionary.ts
|
|
2
2
|
import {
|
|
3
|
+
capitalize,
|
|
3
4
|
compact,
|
|
4
5
|
cssVar,
|
|
5
6
|
isString as isString2,
|
|
@@ -572,6 +573,8 @@ var transformColorMix = {
|
|
|
572
573
|
return token.extensions.category === "colors" && token.value.includes("/");
|
|
573
574
|
},
|
|
574
575
|
transform(token, dict) {
|
|
576
|
+
if (!token.value.includes("/"))
|
|
577
|
+
return token;
|
|
575
578
|
return expandReferences(token.value, (path) => {
|
|
576
579
|
const tokenFn = (tokenPath) => {
|
|
577
580
|
const token2 = dict.getByName(tokenPath);
|
|
@@ -727,7 +730,7 @@ var TokenDictionary = class {
|
|
|
727
730
|
formatTokenName = (path) => path.join(".");
|
|
728
731
|
formatCssVar = (path, options) => cssVar(path.join("-"), options);
|
|
729
732
|
registerTokens() {
|
|
730
|
-
const { tokens = {}, semanticTokens = {}, breakpoints } = this.options;
|
|
733
|
+
const { tokens = {}, semanticTokens = {}, breakpoints, themes = {} } = this.options;
|
|
731
734
|
const breakpointTokens = expandBreakpoints(breakpoints);
|
|
732
735
|
const computedTokens = compact({
|
|
733
736
|
...tokens,
|
|
@@ -737,22 +740,50 @@ var TokenDictionary = class {
|
|
|
737
740
|
...breakpointTokens.sizes
|
|
738
741
|
}
|
|
739
742
|
});
|
|
743
|
+
const processToken = (token, path) => {
|
|
744
|
+
const isDefault = path.includes("DEFAULT");
|
|
745
|
+
path = filterDefault(path);
|
|
746
|
+
assertTokenFormat(token);
|
|
747
|
+
const category = path[0];
|
|
748
|
+
const name = this.formatTokenName(path);
|
|
749
|
+
const node = new Token({ ...token, name, path });
|
|
750
|
+
node.setExtensions({
|
|
751
|
+
category,
|
|
752
|
+
prop: this.formatTokenName(path.slice(1))
|
|
753
|
+
});
|
|
754
|
+
if (isDefault) {
|
|
755
|
+
node.setExtensions({ isDefault });
|
|
756
|
+
}
|
|
757
|
+
return node;
|
|
758
|
+
};
|
|
759
|
+
const processSemantic = (token, path) => {
|
|
760
|
+
const isDefault = path.includes("DEFAULT");
|
|
761
|
+
path = filterDefault(path);
|
|
762
|
+
assertTokenFormat(token);
|
|
763
|
+
const category = path[0];
|
|
764
|
+
const name = this.formatTokenName(path);
|
|
765
|
+
const normalizedToken = isString2(token.value) || isCompositeTokenValue(token.value) ? { value: { base: token.value } } : token;
|
|
766
|
+
const { value, ...restData } = normalizedToken;
|
|
767
|
+
const node = new Token({
|
|
768
|
+
...restData,
|
|
769
|
+
name,
|
|
770
|
+
value: value.base || "",
|
|
771
|
+
path
|
|
772
|
+
});
|
|
773
|
+
node.setExtensions({
|
|
774
|
+
category,
|
|
775
|
+
conditions: value,
|
|
776
|
+
prop: this.formatTokenName(path.slice(1))
|
|
777
|
+
});
|
|
778
|
+
if (isDefault) {
|
|
779
|
+
node.setExtensions({ isDefault });
|
|
780
|
+
}
|
|
781
|
+
return node;
|
|
782
|
+
};
|
|
740
783
|
walkObject2(
|
|
741
784
|
computedTokens,
|
|
742
785
|
(token, path) => {
|
|
743
|
-
const
|
|
744
|
-
path = filterDefault(path);
|
|
745
|
-
assertTokenFormat(token);
|
|
746
|
-
const category = path[0];
|
|
747
|
-
const name = this.formatTokenName(path);
|
|
748
|
-
const node = new Token({ ...token, name, path });
|
|
749
|
-
node.setExtensions({
|
|
750
|
-
category,
|
|
751
|
-
prop: this.formatTokenName(path.slice(1))
|
|
752
|
-
});
|
|
753
|
-
if (isDefault) {
|
|
754
|
-
node.setExtensions({ isDefault });
|
|
755
|
-
}
|
|
786
|
+
const node = processToken(token, path);
|
|
756
787
|
this.registerToken(node);
|
|
757
788
|
},
|
|
758
789
|
{ stop: isToken }
|
|
@@ -760,31 +791,34 @@ var TokenDictionary = class {
|
|
|
760
791
|
walkObject2(
|
|
761
792
|
semanticTokens,
|
|
762
793
|
(token, path) => {
|
|
763
|
-
const
|
|
764
|
-
path = filterDefault(path);
|
|
765
|
-
assertTokenFormat(token);
|
|
766
|
-
const category = path[0];
|
|
767
|
-
const name = this.formatTokenName(path);
|
|
768
|
-
const normalizedToken = isString2(token.value) || isCompositeTokenValue(token.value) ? { value: { base: token.value } } : token;
|
|
769
|
-
const { value, ...restData } = normalizedToken;
|
|
770
|
-
const node = new Token({
|
|
771
|
-
...restData,
|
|
772
|
-
name,
|
|
773
|
-
value: value.base || "",
|
|
774
|
-
path
|
|
775
|
-
});
|
|
776
|
-
node.setExtensions({
|
|
777
|
-
category,
|
|
778
|
-
conditions: value,
|
|
779
|
-
prop: this.formatTokenName(path.slice(1))
|
|
780
|
-
});
|
|
781
|
-
if (isDefault) {
|
|
782
|
-
node.setExtensions({ isDefault });
|
|
783
|
-
}
|
|
794
|
+
const node = processSemantic(token, path);
|
|
784
795
|
this.registerToken(node);
|
|
785
796
|
},
|
|
786
797
|
{ stop: isToken }
|
|
787
798
|
);
|
|
799
|
+
Object.entries(themes).forEach(([theme, themeVariant]) => {
|
|
800
|
+
const condName = "_theme" + capitalize(theme);
|
|
801
|
+
walkObject2(
|
|
802
|
+
themeVariant.tokens ?? {},
|
|
803
|
+
(token, path) => {
|
|
804
|
+
const themeToken = { value: { [condName]: token.value } };
|
|
805
|
+
const node = processSemantic(themeToken, path);
|
|
806
|
+
node.setExtensions({ theme, isVirtual: true });
|
|
807
|
+
this.registerToken(node);
|
|
808
|
+
},
|
|
809
|
+
{ stop: isToken }
|
|
810
|
+
);
|
|
811
|
+
walkObject2(
|
|
812
|
+
themeVariant.semanticTokens ?? {},
|
|
813
|
+
(token, path) => {
|
|
814
|
+
const themeToken = { value: { [condName]: token.value } };
|
|
815
|
+
const node = processSemantic(themeToken, path);
|
|
816
|
+
node.setExtensions({ theme, isSemantic: true, isVirtual: true });
|
|
817
|
+
this.registerToken(node);
|
|
818
|
+
},
|
|
819
|
+
{ stop: isToken }
|
|
820
|
+
);
|
|
821
|
+
});
|
|
788
822
|
return this;
|
|
789
823
|
}
|
|
790
824
|
registerToken = (token, transformPhase) => {
|
|
@@ -1075,8 +1109,8 @@ var TokenDictionaryView = class {
|
|
|
1075
1109
|
flat.set(token.name, value);
|
|
1076
1110
|
}
|
|
1077
1111
|
processVars(token, group) {
|
|
1078
|
-
const { condition, isNegative, isVirtual, var: varName } = token.extensions;
|
|
1079
|
-
if (isNegative || isVirtual || !condition)
|
|
1112
|
+
const { condition, isNegative, isVirtual, var: varName, theme } = token.extensions;
|
|
1113
|
+
if (isNegative || !theme && isVirtual || !condition)
|
|
1080
1114
|
return;
|
|
1081
1115
|
if (!group.has(condition))
|
|
1082
1116
|
group.set(condition, /* @__PURE__ */ new Map());
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@pandacss/token-dictionary",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.36.1",
|
|
4
4
|
"description": "Common error messages for css panda",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"module": "dist/index.mjs",
|
|
@@ -33,9 +33,9 @@
|
|
|
33
33
|
],
|
|
34
34
|
"dependencies": {
|
|
35
35
|
"ts-pattern": "5.0.8",
|
|
36
|
-
"@pandacss/logger": "^0.
|
|
37
|
-
"@pandacss/shared": "0.
|
|
38
|
-
"@pandacss/types": "0.
|
|
36
|
+
"@pandacss/logger": "^0.36.1",
|
|
37
|
+
"@pandacss/shared": "0.36.1",
|
|
38
|
+
"@pandacss/types": "0.36.1"
|
|
39
39
|
},
|
|
40
40
|
"scripts": {
|
|
41
41
|
"build": "tsup src/index.ts --format=esm,cjs --dts",
|