@xyo-network/crypto-nft-collection-diviner-score-plugin 2.75.0 → 2.75.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. package/dist/browser/Diviner.d.cts +11 -0
  2. package/dist/browser/Diviner.d.cts.map +1 -0
  3. package/dist/browser/Diviner.js +4 -95
  4. package/dist/browser/Diviner.js.map +1 -1
  5. package/dist/browser/Plugin.d.cts +50 -0
  6. package/dist/browser/Plugin.d.cts.map +1 -0
  7. package/dist/browser/Plugin.js +2 -127
  8. package/dist/browser/Plugin.js.map +1 -1
  9. package/dist/browser/index.d.cts +6 -0
  10. package/dist/browser/index.d.cts.map +1 -0
  11. package/dist/browser/index.js +3 -142
  12. package/dist/browser/index.js.map +1 -1
  13. package/dist/browser/lib/index.d.cts +2 -0
  14. package/dist/browser/lib/index.d.cts.map +1 -0
  15. package/dist/browser/lib/index.js +1 -91
  16. package/dist/browser/lib/index.js.map +1 -1
  17. package/dist/browser/lib/rating/analyzeNftCollection.d.cts +9 -0
  18. package/dist/browser/lib/rating/analyzeNftCollection.d.cts.map +1 -0
  19. package/dist/browser/lib/rating/analyzeNftCollection.js +2 -77
  20. package/dist/browser/lib/rating/analyzeNftCollection.js.map +1 -1
  21. package/dist/browser/lib/rating/criteria/index.d.cts +6 -0
  22. package/dist/browser/lib/rating/criteria/index.d.cts.map +1 -0
  23. package/dist/browser/lib/rating/criteria/index.js +2 -71
  24. package/dist/browser/lib/rating/criteria/index.js.map +1 -1
  25. package/dist/browser/lib/rating/criteria/scoring/index.d.cts +3 -0
  26. package/dist/browser/lib/rating/criteria/scoring/index.d.cts.map +1 -0
  27. package/dist/browser/lib/rating/criteria/scoring/index.js +2 -73
  28. package/dist/browser/lib/rating/criteria/scoring/index.js.map +1 -1
  29. package/dist/browser/lib/rating/criteria/scoring/metadata/index.d.cts +2 -0
  30. package/dist/browser/lib/rating/criteria/scoring/metadata/index.d.cts.map +1 -0
  31. package/dist/browser/lib/rating/criteria/scoring/metadata/index.js +1 -52
  32. package/dist/browser/lib/rating/criteria/scoring/metadata/index.js.map +1 -1
  33. package/dist/browser/lib/rating/criteria/scoring/metadata/metadata.d.cts +9 -0
  34. package/dist/browser/lib/rating/criteria/scoring/metadata/metadata.d.cts.map +1 -0
  35. package/dist/browser/lib/rating/criteria/scoring/metadata/metadata.js +4 -43
  36. package/dist/browser/lib/rating/criteria/scoring/metadata/metadata.js.map +1 -1
  37. package/dist/browser/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.d.cts +4 -0
  38. package/dist/browser/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.d.cts.map +1 -0
  39. package/dist/browser/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.js +2 -3
  40. package/dist/browser/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.js.map +1 -1
  41. package/dist/browser/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.d.cts +24 -0
  42. package/dist/browser/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.d.cts.map +1 -0
  43. package/dist/browser/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.js +6 -7
  44. package/dist/browser/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.js.map +1 -1
  45. package/dist/browser/lib/rating/criteria/scoring/total.d.cts +16 -0
  46. package/dist/browser/lib/rating/criteria/scoring/total.d.cts.map +1 -0
  47. package/dist/browser/lib/rating/criteria/scoring/total.js +8 -9
  48. package/dist/browser/lib/rating/criteria/scoring/total.js.map +1 -1
  49. package/dist/browser/lib/rating/index.d.cts +2 -0
  50. package/dist/browser/lib/rating/index.d.cts.map +1 -0
  51. package/dist/browser/lib/rating/index.js +1 -91
  52. package/dist/browser/lib/rating/index.js.map +1 -1
  53. package/dist/docs.json +21843 -0
  54. package/dist/node/Diviner.d.cts +11 -0
  55. package/dist/node/Diviner.d.cts.map +1 -0
  56. package/dist/node/Diviner.js +98 -6
  57. package/dist/node/Diviner.js.map +1 -1
  58. package/dist/node/Diviner.mjs +96 -5
  59. package/dist/node/Diviner.mjs.map +1 -1
  60. package/dist/node/Plugin.d.cts +50 -0
  61. package/dist/node/Plugin.d.cts.map +1 -0
  62. package/dist/node/Plugin.js +125 -3
  63. package/dist/node/Plugin.js.map +1 -1
  64. package/dist/node/Plugin.mjs +127 -2
  65. package/dist/node/Plugin.mjs.map +1 -1
  66. package/dist/node/index.d.cts +6 -0
  67. package/dist/node/index.d.cts.map +1 -0
  68. package/dist/node/index.js +145 -8
  69. package/dist/node/index.js.map +1 -1
  70. package/dist/node/index.mjs +142 -3
  71. package/dist/node/index.mjs.map +1 -1
  72. package/dist/node/lib/index.d.cts +2 -0
  73. package/dist/node/lib/index.d.cts.map +1 -0
  74. package/dist/node/lib/index.js +99 -3
  75. package/dist/node/lib/index.js.map +1 -1
  76. package/dist/node/lib/index.mjs +91 -1
  77. package/dist/node/lib/index.mjs.map +1 -1
  78. package/dist/node/lib/rating/analyzeNftCollection.d.cts +9 -0
  79. package/dist/node/lib/rating/analyzeNftCollection.d.cts.map +1 -0
  80. package/dist/node/lib/rating/analyzeNftCollection.js +81 -3
  81. package/dist/node/lib/rating/analyzeNftCollection.js.map +1 -1
  82. package/dist/node/lib/rating/analyzeNftCollection.mjs +77 -2
  83. package/dist/node/lib/rating/analyzeNftCollection.mjs.map +1 -1
  84. package/dist/node/lib/rating/criteria/index.d.cts +6 -0
  85. package/dist/node/lib/rating/criteria/index.d.cts.map +1 -0
  86. package/dist/node/lib/rating/criteria/index.js +76 -4
  87. package/dist/node/lib/rating/criteria/index.js.map +1 -1
  88. package/dist/node/lib/rating/criteria/index.mjs +71 -2
  89. package/dist/node/lib/rating/criteria/index.mjs.map +1 -1
  90. package/dist/node/lib/rating/criteria/scoring/index.d.cts +3 -0
  91. package/dist/node/lib/rating/criteria/scoring/index.d.cts.map +1 -0
  92. package/dist/node/lib/rating/criteria/scoring/index.js +83 -5
  93. package/dist/node/lib/rating/criteria/scoring/index.js.map +1 -1
  94. package/dist/node/lib/rating/criteria/scoring/index.mjs +73 -2
  95. package/dist/node/lib/rating/criteria/scoring/index.mjs.map +1 -1
  96. package/dist/node/lib/rating/criteria/scoring/metadata/index.d.cts +2 -0
  97. package/dist/node/lib/rating/criteria/scoring/metadata/index.d.cts.map +1 -0
  98. package/dist/node/lib/rating/criteria/scoring/metadata/index.js +61 -3
  99. package/dist/node/lib/rating/criteria/scoring/metadata/index.js.map +1 -1
  100. package/dist/node/lib/rating/criteria/scoring/metadata/index.mjs +52 -1
  101. package/dist/node/lib/rating/criteria/scoring/metadata/index.mjs.map +1 -1
  102. package/dist/node/lib/rating/criteria/scoring/metadata/metadata.d.cts +9 -0
  103. package/dist/node/lib/rating/criteria/scoring/metadata/metadata.d.cts.map +1 -0
  104. package/dist/node/lib/rating/criteria/scoring/metadata/metadata.js +48 -6
  105. package/dist/node/lib/rating/criteria/scoring/metadata/metadata.js.map +1 -1
  106. package/dist/node/lib/rating/criteria/scoring/metadata/metadata.mjs +43 -4
  107. package/dist/node/lib/rating/criteria/scoring/metadata/metadata.mjs.map +1 -1
  108. package/dist/node/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.d.cts +4 -0
  109. package/dist/node/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.d.cts.map +1 -0
  110. package/dist/node/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.js +4 -2
  111. package/dist/node/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.js.map +1 -1
  112. package/dist/node/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.mjs +3 -2
  113. package/dist/node/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.mjs.map +1 -1
  114. package/dist/node/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.d.cts +24 -0
  115. package/dist/node/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.d.cts.map +1 -0
  116. package/dist/node/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.js +8 -6
  117. package/dist/node/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.js.map +1 -1
  118. package/dist/node/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.mjs +7 -6
  119. package/dist/node/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.mjs.map +1 -1
  120. package/dist/node/lib/rating/criteria/scoring/total.d.cts +16 -0
  121. package/dist/node/lib/rating/criteria/scoring/total.d.cts.map +1 -0
  122. package/dist/node/lib/rating/criteria/scoring/total.js +10 -8
  123. package/dist/node/lib/rating/criteria/scoring/total.js.map +1 -1
  124. package/dist/node/lib/rating/criteria/scoring/total.mjs +9 -8
  125. package/dist/node/lib/rating/criteria/scoring/total.mjs.map +1 -1
  126. package/dist/node/lib/rating/index.d.cts +2 -0
  127. package/dist/node/lib/rating/index.d.cts.map +1 -0
  128. package/dist/node/lib/rating/index.js +99 -3
  129. package/dist/node/lib/rating/index.js.map +1 -1
  130. package/dist/node/lib/rating/index.mjs +91 -1
  131. package/dist/node/lib/rating/index.mjs.map +1 -1
  132. package/package.json +14 -14
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/metadata.ts"],"sourcesContent":["import { NftCollectionInfo } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { WeightedScoringCriteria } from '@xyo-network/crypto-nft-score-model'\n\nimport { scoreIndividualAttributes } from './scoreIndividualAttributes'\nimport { scoreTotalAttributes } from './scoreTotalAttributes'\n\nexport const attributeScoringCriteria: { [key: string]: WeightedScoringCriteria<NftCollectionInfo> } = {\n 'Metadata Attributes Individual': { score: scoreIndividualAttributes, weight: 2 },\n 'Metadata Attributes Total': { score: scoreTotalAttributes, weight: 2 },\n}\n\nexport const scoreMetadata: { [key: string]: WeightedScoringCriteria<NftCollectionInfo> } = {\n ...attributeScoringCriteria,\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,uCAA0C;AAC1C,kCAAqC;AAE9B,MAAM,2BAA0F;AAAA,EACrG,kCAAkC,EAAE,OAAO,4DAA2B,QAAQ,EAAE;AAAA,EAChF,6BAA6B,EAAE,OAAO,kDAAsB,QAAQ,EAAE;AACxE;AAEO,MAAM,gBAA+E;AAAA,EAC1F,GAAG;AACL;","names":[]}
1
+ {"version":3,"sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/metadata.ts","../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.ts","../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.ts"],"sourcesContent":["import { NftCollectionInfo } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { WeightedScoringCriteria } from '@xyo-network/crypto-nft-score-model'\n\nimport { scoreIndividualAttributes } from './scoreIndividualAttributes'\nimport { scoreTotalAttributes } from './scoreTotalAttributes'\n\nexport const attributeScoringCriteria: { [key: string]: WeightedScoringCriteria<NftCollectionInfo> } = {\n 'Metadata Attributes Individual': { score: scoreIndividualAttributes, weight: 2 },\n 'Metadata Attributes Total': { score: scoreTotalAttributes, weight: 2 },\n}\n\nexport const scoreMetadata: { [key: string]: WeightedScoringCriteria<NftCollectionInfo> } = {\n ...attributeScoringCriteria,\n}\n","import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\nexport const scoreIndividualAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n const entries = Object.entries(attributes)\n if (entries.length === 0) return [0, maxScore]\n const scores = Object.entries(attributes)\n .map(([_trait, { values }]) => {\n return Object.entries(values).map<Score>(([_traitValue, metrics]) => {\n const rarity = Math.min(Math.round((1 - metrics.binomial.p) * maxScore), maxScore)\n return [rarity, maxScore]\n })\n })\n .flat()\n const total = scores.reduce<Score>(([a, b], [c, d]) => [a + c, b + d], [0, 0])\n return normalize(total, maxScore)\n}\n","import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\n/**\n * Mean: What value is the distribution centered around\n */\nconst defaultMu = 0.15\n\n/**\n * Standard Deviation: How spread out is the distribution\n */\nconst defaultSigma = 0.1\n\n/**\n * Calculates the Gaussian probability density\n * @param x\n * @param mu Mean\n * @param sigma Standard Deviation\n * @returns\n */\nconst gaussianProbabilityDensity = (x: number, mu: number = defaultMu, sigma: number = defaultSigma): number => {\n const sqrtTwoPi = Math.sqrt(2 * Math.PI)\n const denominator = sigma * sqrtTwoPi\n const power = -0.5 * Math.pow((x - mu) / sigma, 2)\n return (1 / denominator) * Math.exp(power)\n}\n\n/**\n * For a Gaussian distribution, the peak of the distribution is the mean\n */\nconst maxProbabilityDensity = gaussianProbabilityDensity(defaultMu)\n\n/**\n * We're working on some assumptions here:\n *\n * - If you have a 100% chance of getting a trait, everyone get's a trophy\n * - If you have a 50% chance of getting a trait, it's not rare\n * - If you have a 0% chance of getting a trait, it's not fun\n *\n * So we're looking for something Pareto-ish (somewhere between\n * 80/20 or 90/10) as that's a good & sustainable model for the\n * distribution of many traits in real life.\n * However, we also don't want to maximally reward collections\n * that have a lot of single attributes distributed uniformly\n * (basically a 0% trait probably) as that's perfectly entropic\n * but not very interesting (some overlap is desirable).\n * So we're using a Gaussian distribution to model the\n * probability density of the joint probability of all traits\n * centered around 15%.\n * @param info\n * @returns\n */\nexport const scoreTotalAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n // This has somewhat of a filtering function by causing anything with 100% probability to\n // add no value to the end score\n const jointProbability = Object.entries(attributes).reduce((acc, [_trait, { metrics }]) => {\n return acc * metrics.binomial.p\n }, 1)\n const probabilityDensity = gaussianProbabilityDensity(jointProbability)\n const score: Score = [probabilityDensity, maxProbabilityDensity]\n return normalize(score, maxScore)\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,oCAAiC;AAEjC,IAAM,WAAW;AAEV,IAAM,4BAA4B,CAAC,SAA+C;AACvF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AACpC,QAAM,UAAU,OAAO,QAAQ,UAAU;AACzC,MAAI,QAAQ,WAAW;AAAG,WAAO,CAAC,GAAG,QAAQ;AAC7C,QAAM,SAAS,OAAO,QAAQ,UAAU,EACrC,IAAI,CAAC,CAAC,QAAQ,EAAE,OAAO,CAAC,MAAM;AAC7B,WAAO,OAAO,QAAQ,MAAM,EAAE,IAAW,CAAC,CAAC,aAAa,OAAO,MAAM;AACnE,YAAM,SAAS,KAAK,IAAI,KAAK,OAAO,IAAI,QAAQ,SAAS,KAAK,QAAQ,GAAG,QAAQ;AACjF,aAAO,CAAC,QAAQ,QAAQ;AAAA,IAC1B,CAAC;AAAA,EACH,CAAC,EACA,KAAK;AACR,QAAM,QAAQ,OAAO,OAAc,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AAC7E,aAAO,yCAAU,OAAO,QAAQ;AAClC;;;AClBA,IAAAA,iCAAiC;AAEjC,IAAMC,YAAW;AAKjB,IAAM,YAAY;AAKlB,IAAM,eAAe;AASrB,IAAM,6BAA6B,CAAC,GAAW,KAAa,WAAW,QAAgB,iBAAyB;AAC9G,QAAM,YAAY,KAAK,KAAK,IAAI,KAAK,EAAE;AACvC,QAAM,cAAc,QAAQ;AAC5B,QAAM,QAAQ,OAAO,KAAK,KAAK,IAAI,MAAM,OAAO,CAAC;AACjD,SAAQ,IAAI,cAAe,KAAK,IAAI,KAAK;AAC3C;AAKA,IAAM,wBAAwB,2BAA2B,SAAS;AAsB3D,IAAM,uBAAuB,CAAC,SAA+C;AAClF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AAGpC,QAAM,mBAAmB,OAAO,QAAQ,UAAU,EAAE,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,QAAQ,CAAC,MAAM;AACzF,WAAO,MAAM,QAAQ,SAAS;AAAA,EAChC,GAAG,CAAC;AACJ,QAAM,qBAAqB,2BAA2B,gBAAgB;AACtE,QAAM,QAAe,CAAC,oBAAoB,qBAAqB;AAC/D,aAAO,0CAAU,OAAOA,SAAQ;AAClC;;;AF1DO,IAAM,2BAA0F;AAAA,EACrG,kCAAkC,EAAE,OAAO,2BAA2B,QAAQ,EAAE;AAAA,EAChF,6BAA6B,EAAE,OAAO,sBAAsB,QAAQ,EAAE;AACxE;AAEO,IAAM,gBAA+E;AAAA,EAC1F,GAAG;AACL;","names":["import_crypto_nft_score_model","maxScore"]}
@@ -1,10 +1,49 @@
1
- import { scoreIndividualAttributes } from "./scoreIndividualAttributes";
2
- import { scoreTotalAttributes } from "./scoreTotalAttributes";
3
- const attributeScoringCriteria = {
1
+ // src/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.ts
2
+ import { normalize } from "@xyo-network/crypto-nft-score-model";
3
+ var maxScore = 10;
4
+ var scoreIndividualAttributes = (info) => {
5
+ const { attributes } = info.metrics.metadata;
6
+ const entries = Object.entries(attributes);
7
+ if (entries.length === 0)
8
+ return [0, maxScore];
9
+ const scores = Object.entries(attributes).map(([_trait, { values }]) => {
10
+ return Object.entries(values).map(([_traitValue, metrics]) => {
11
+ const rarity = Math.min(Math.round((1 - metrics.binomial.p) * maxScore), maxScore);
12
+ return [rarity, maxScore];
13
+ });
14
+ }).flat();
15
+ const total = scores.reduce(([a, b], [c, d]) => [a + c, b + d], [0, 0]);
16
+ return normalize(total, maxScore);
17
+ };
18
+
19
+ // src/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.ts
20
+ import { normalize as normalize2 } from "@xyo-network/crypto-nft-score-model";
21
+ var maxScore2 = 10;
22
+ var defaultMu = 0.15;
23
+ var defaultSigma = 0.1;
24
+ var gaussianProbabilityDensity = (x, mu = defaultMu, sigma = defaultSigma) => {
25
+ const sqrtTwoPi = Math.sqrt(2 * Math.PI);
26
+ const denominator = sigma * sqrtTwoPi;
27
+ const power = -0.5 * Math.pow((x - mu) / sigma, 2);
28
+ return 1 / denominator * Math.exp(power);
29
+ };
30
+ var maxProbabilityDensity = gaussianProbabilityDensity(defaultMu);
31
+ var scoreTotalAttributes = (info) => {
32
+ const { attributes } = info.metrics.metadata;
33
+ const jointProbability = Object.entries(attributes).reduce((acc, [_trait, { metrics }]) => {
34
+ return acc * metrics.binomial.p;
35
+ }, 1);
36
+ const probabilityDensity = gaussianProbabilityDensity(jointProbability);
37
+ const score = [probabilityDensity, maxProbabilityDensity];
38
+ return normalize2(score, maxScore2);
39
+ };
40
+
41
+ // src/lib/rating/criteria/scoring/metadata/metadata.ts
42
+ var attributeScoringCriteria = {
4
43
  "Metadata Attributes Individual": { score: scoreIndividualAttributes, weight: 2 },
5
44
  "Metadata Attributes Total": { score: scoreTotalAttributes, weight: 2 }
6
45
  };
7
- const scoreMetadata = {
46
+ var scoreMetadata = {
8
47
  ...attributeScoringCriteria
9
48
  };
10
49
  export {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/metadata.ts"],"sourcesContent":["import { NftCollectionInfo } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { WeightedScoringCriteria } from '@xyo-network/crypto-nft-score-model'\n\nimport { scoreIndividualAttributes } from './scoreIndividualAttributes'\nimport { scoreTotalAttributes } from './scoreTotalAttributes'\n\nexport const attributeScoringCriteria: { [key: string]: WeightedScoringCriteria<NftCollectionInfo> } = {\n 'Metadata Attributes Individual': { score: scoreIndividualAttributes, weight: 2 },\n 'Metadata Attributes Total': { score: scoreTotalAttributes, weight: 2 },\n}\n\nexport const scoreMetadata: { [key: string]: WeightedScoringCriteria<NftCollectionInfo> } = {\n ...attributeScoringCriteria,\n}\n"],"mappings":"AAGA,SAAS,iCAAiC;AAC1C,SAAS,4BAA4B;AAE9B,MAAM,2BAA0F;AAAA,EACrG,kCAAkC,EAAE,OAAO,2BAA2B,QAAQ,EAAE;AAAA,EAChF,6BAA6B,EAAE,OAAO,sBAAsB,QAAQ,EAAE;AACxE;AAEO,MAAM,gBAA+E;AAAA,EAC1F,GAAG;AACL;","names":[]}
1
+ {"version":3,"sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.ts","../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.ts","../../../../../../../src/lib/rating/criteria/scoring/metadata/metadata.ts"],"sourcesContent":["import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\nexport const scoreIndividualAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n const entries = Object.entries(attributes)\n if (entries.length === 0) return [0, maxScore]\n const scores = Object.entries(attributes)\n .map(([_trait, { values }]) => {\n return Object.entries(values).map<Score>(([_traitValue, metrics]) => {\n const rarity = Math.min(Math.round((1 - metrics.binomial.p) * maxScore), maxScore)\n return [rarity, maxScore]\n })\n })\n .flat()\n const total = scores.reduce<Score>(([a, b], [c, d]) => [a + c, b + d], [0, 0])\n return normalize(total, maxScore)\n}\n","import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\n/**\n * Mean: What value is the distribution centered around\n */\nconst defaultMu = 0.15\n\n/**\n * Standard Deviation: How spread out is the distribution\n */\nconst defaultSigma = 0.1\n\n/**\n * Calculates the Gaussian probability density\n * @param x\n * @param mu Mean\n * @param sigma Standard Deviation\n * @returns\n */\nconst gaussianProbabilityDensity = (x: number, mu: number = defaultMu, sigma: number = defaultSigma): number => {\n const sqrtTwoPi = Math.sqrt(2 * Math.PI)\n const denominator = sigma * sqrtTwoPi\n const power = -0.5 * Math.pow((x - mu) / sigma, 2)\n return (1 / denominator) * Math.exp(power)\n}\n\n/**\n * For a Gaussian distribution, the peak of the distribution is the mean\n */\nconst maxProbabilityDensity = gaussianProbabilityDensity(defaultMu)\n\n/**\n * We're working on some assumptions here:\n *\n * - If you have a 100% chance of getting a trait, everyone get's a trophy\n * - If you have a 50% chance of getting a trait, it's not rare\n * - If you have a 0% chance of getting a trait, it's not fun\n *\n * So we're looking for something Pareto-ish (somewhere between\n * 80/20 or 90/10) as that's a good & sustainable model for the\n * distribution of many traits in real life.\n * However, we also don't want to maximally reward collections\n * that have a lot of single attributes distributed uniformly\n * (basically a 0% trait probably) as that's perfectly entropic\n * but not very interesting (some overlap is desirable).\n * So we're using a Gaussian distribution to model the\n * probability density of the joint probability of all traits\n * centered around 15%.\n * @param info\n * @returns\n */\nexport const scoreTotalAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n // This has somewhat of a filtering function by causing anything with 100% probability to\n // add no value to the end score\n const jointProbability = Object.entries(attributes).reduce((acc, [_trait, { metrics }]) => {\n return acc * metrics.binomial.p\n }, 1)\n const probabilityDensity = gaussianProbabilityDensity(jointProbability)\n const score: Score = [probabilityDensity, maxProbabilityDensity]\n return normalize(score, maxScore)\n}\n","import { NftCollectionInfo } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { WeightedScoringCriteria } from '@xyo-network/crypto-nft-score-model'\n\nimport { scoreIndividualAttributes } from './scoreIndividualAttributes'\nimport { scoreTotalAttributes } from './scoreTotalAttributes'\n\nexport const attributeScoringCriteria: { [key: string]: WeightedScoringCriteria<NftCollectionInfo> } = {\n 'Metadata Attributes Individual': { score: scoreIndividualAttributes, weight: 2 },\n 'Metadata Attributes Total': { score: scoreTotalAttributes, weight: 2 },\n}\n\nexport const scoreMetadata: { [key: string]: WeightedScoringCriteria<NftCollectionInfo> } = {\n ...attributeScoringCriteria,\n}\n"],"mappings":";AACA,SAAS,iBAAwB;AAEjC,IAAM,WAAW;AAEV,IAAM,4BAA4B,CAAC,SAA+C;AACvF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AACpC,QAAM,UAAU,OAAO,QAAQ,UAAU;AACzC,MAAI,QAAQ,WAAW;AAAG,WAAO,CAAC,GAAG,QAAQ;AAC7C,QAAM,SAAS,OAAO,QAAQ,UAAU,EACrC,IAAI,CAAC,CAAC,QAAQ,EAAE,OAAO,CAAC,MAAM;AAC7B,WAAO,OAAO,QAAQ,MAAM,EAAE,IAAW,CAAC,CAAC,aAAa,OAAO,MAAM;AACnE,YAAM,SAAS,KAAK,IAAI,KAAK,OAAO,IAAI,QAAQ,SAAS,KAAK,QAAQ,GAAG,QAAQ;AACjF,aAAO,CAAC,QAAQ,QAAQ;AAAA,IAC1B,CAAC;AAAA,EACH,CAAC,EACA,KAAK;AACR,QAAM,QAAQ,OAAO,OAAc,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AAC7E,SAAO,UAAU,OAAO,QAAQ;AAClC;;;AClBA,SAAS,aAAAA,kBAAwB;AAEjC,IAAMC,YAAW;AAKjB,IAAM,YAAY;AAKlB,IAAM,eAAe;AASrB,IAAM,6BAA6B,CAAC,GAAW,KAAa,WAAW,QAAgB,iBAAyB;AAC9G,QAAM,YAAY,KAAK,KAAK,IAAI,KAAK,EAAE;AACvC,QAAM,cAAc,QAAQ;AAC5B,QAAM,QAAQ,OAAO,KAAK,KAAK,IAAI,MAAM,OAAO,CAAC;AACjD,SAAQ,IAAI,cAAe,KAAK,IAAI,KAAK;AAC3C;AAKA,IAAM,wBAAwB,2BAA2B,SAAS;AAsB3D,IAAM,uBAAuB,CAAC,SAA+C;AAClF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AAGpC,QAAM,mBAAmB,OAAO,QAAQ,UAAU,EAAE,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,QAAQ,CAAC,MAAM;AACzF,WAAO,MAAM,QAAQ,SAAS;AAAA,EAChC,GAAG,CAAC;AACJ,QAAM,qBAAqB,2BAA2B,gBAAgB;AACtE,QAAM,QAAe,CAAC,oBAAoB,qBAAqB;AAC/D,SAAOD,WAAU,OAAOC,SAAQ;AAClC;;;AC1DO,IAAM,2BAA0F;AAAA,EACrG,kCAAkC,EAAE,OAAO,2BAA2B,QAAQ,EAAE;AAAA,EAChF,6BAA6B,EAAE,OAAO,sBAAsB,QAAQ,EAAE;AACxE;AAEO,IAAM,gBAA+E;AAAA,EAC1F,GAAG;AACL;","names":["normalize","maxScore"]}
@@ -0,0 +1,4 @@
1
+ import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin';
2
+ import { Score } from '@xyo-network/crypto-nft-score-model';
3
+ export declare const scoreIndividualAttributes: (info: NftCollectionAttributeMetrics) => Score;
4
+ //# sourceMappingURL=scoreIndividualAttributes.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scoreIndividualAttributes.d.ts","sourceRoot":"","sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,6BAA6B,EAAE,MAAM,mDAAmD,CAAA;AACjG,OAAO,EAAa,KAAK,EAAE,MAAM,qCAAqC,CAAA;AAItE,eAAO,MAAM,yBAAyB,SAAU,6BAA6B,KAAG,KAc/E,CAAA"}
@@ -16,14 +16,16 @@ var __copyProps = (to, from, except, desc) => {
16
16
  return to;
17
17
  };
18
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.ts
19
21
  var scoreIndividualAttributes_exports = {};
20
22
  __export(scoreIndividualAttributes_exports, {
21
23
  scoreIndividualAttributes: () => scoreIndividualAttributes
22
24
  });
23
25
  module.exports = __toCommonJS(scoreIndividualAttributes_exports);
24
26
  var import_crypto_nft_score_model = require("@xyo-network/crypto-nft-score-model");
25
- const maxScore = 10;
26
- const scoreIndividualAttributes = (info) => {
27
+ var maxScore = 10;
28
+ var scoreIndividualAttributes = (info) => {
27
29
  const { attributes } = info.metrics.metadata;
28
30
  const entries = Object.entries(attributes);
29
31
  if (entries.length === 0)
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.ts"],"sourcesContent":["import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\nexport const scoreIndividualAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n const entries = Object.entries(attributes)\n if (entries.length === 0) return [0, maxScore]\n const scores = Object.entries(attributes)\n .map(([_trait, { values }]) => {\n return Object.entries(values).map<Score>(([_traitValue, metrics]) => {\n const rarity = Math.min(Math.round((1 - metrics.binomial.p) * maxScore), maxScore)\n return [rarity, maxScore]\n })\n })\n .flat()\n const total = scores.reduce<Score>(([a, b], [c, d]) => [a + c, b + d], [0, 0])\n return normalize(total, maxScore)\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oCAAiC;AAEjC,MAAM,WAAW;AAEV,MAAM,4BAA4B,CAAC,SAA+C;AACvF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AACpC,QAAM,UAAU,OAAO,QAAQ,UAAU;AACzC,MAAI,QAAQ,WAAW;AAAG,WAAO,CAAC,GAAG,QAAQ;AAC7C,QAAM,SAAS,OAAO,QAAQ,UAAU,EACrC,IAAI,CAAC,CAAC,QAAQ,EAAE,OAAO,CAAC,MAAM;AAC7B,WAAO,OAAO,QAAQ,MAAM,EAAE,IAAW,CAAC,CAAC,aAAa,OAAO,MAAM;AACnE,YAAM,SAAS,KAAK,IAAI,KAAK,OAAO,IAAI,QAAQ,SAAS,KAAK,QAAQ,GAAG,QAAQ;AACjF,aAAO,CAAC,QAAQ,QAAQ;AAAA,IAC1B,CAAC;AAAA,EACH,CAAC,EACA,KAAK;AACR,QAAM,QAAQ,OAAO,OAAc,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AAC7E,aAAO,yCAAU,OAAO,QAAQ;AAClC;","names":[]}
1
+ {"version":3,"sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.ts"],"sourcesContent":["import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\nexport const scoreIndividualAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n const entries = Object.entries(attributes)\n if (entries.length === 0) return [0, maxScore]\n const scores = Object.entries(attributes)\n .map(([_trait, { values }]) => {\n return Object.entries(values).map<Score>(([_traitValue, metrics]) => {\n const rarity = Math.min(Math.round((1 - metrics.binomial.p) * maxScore), maxScore)\n return [rarity, maxScore]\n })\n })\n .flat()\n const total = scores.reduce<Score>(([a, b], [c, d]) => [a + c, b + d], [0, 0])\n return normalize(total, maxScore)\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oCAAiC;AAEjC,IAAM,WAAW;AAEV,IAAM,4BAA4B,CAAC,SAA+C;AACvF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AACpC,QAAM,UAAU,OAAO,QAAQ,UAAU;AACzC,MAAI,QAAQ,WAAW;AAAG,WAAO,CAAC,GAAG,QAAQ;AAC7C,QAAM,SAAS,OAAO,QAAQ,UAAU,EACrC,IAAI,CAAC,CAAC,QAAQ,EAAE,OAAO,CAAC,MAAM;AAC7B,WAAO,OAAO,QAAQ,MAAM,EAAE,IAAW,CAAC,CAAC,aAAa,OAAO,MAAM;AACnE,YAAM,SAAS,KAAK,IAAI,KAAK,OAAO,IAAI,QAAQ,SAAS,KAAK,QAAQ,GAAG,QAAQ;AACjF,aAAO,CAAC,QAAQ,QAAQ;AAAA,IAC1B,CAAC;AAAA,EACH,CAAC,EACA,KAAK;AACR,QAAM,QAAQ,OAAO,OAAc,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AAC7E,aAAO,yCAAU,OAAO,QAAQ;AAClC;","names":[]}
@@ -1,6 +1,7 @@
1
+ // src/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.ts
1
2
  import { normalize } from "@xyo-network/crypto-nft-score-model";
2
- const maxScore = 10;
3
- const scoreIndividualAttributes = (info) => {
3
+ var maxScore = 10;
4
+ var scoreIndividualAttributes = (info) => {
4
5
  const { attributes } = info.metrics.metadata;
5
6
  const entries = Object.entries(attributes);
6
7
  if (entries.length === 0)
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.ts"],"sourcesContent":["import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\nexport const scoreIndividualAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n const entries = Object.entries(attributes)\n if (entries.length === 0) return [0, maxScore]\n const scores = Object.entries(attributes)\n .map(([_trait, { values }]) => {\n return Object.entries(values).map<Score>(([_traitValue, metrics]) => {\n const rarity = Math.min(Math.round((1 - metrics.binomial.p) * maxScore), maxScore)\n return [rarity, maxScore]\n })\n })\n .flat()\n const total = scores.reduce<Score>(([a, b], [c, d]) => [a + c, b + d], [0, 0])\n return normalize(total, maxScore)\n}\n"],"mappings":"AACA,SAAS,iBAAwB;AAEjC,MAAM,WAAW;AAEV,MAAM,4BAA4B,CAAC,SAA+C;AACvF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AACpC,QAAM,UAAU,OAAO,QAAQ,UAAU;AACzC,MAAI,QAAQ,WAAW;AAAG,WAAO,CAAC,GAAG,QAAQ;AAC7C,QAAM,SAAS,OAAO,QAAQ,UAAU,EACrC,IAAI,CAAC,CAAC,QAAQ,EAAE,OAAO,CAAC,MAAM;AAC7B,WAAO,OAAO,QAAQ,MAAM,EAAE,IAAW,CAAC,CAAC,aAAa,OAAO,MAAM;AACnE,YAAM,SAAS,KAAK,IAAI,KAAK,OAAO,IAAI,QAAQ,SAAS,KAAK,QAAQ,GAAG,QAAQ;AACjF,aAAO,CAAC,QAAQ,QAAQ;AAAA,IAC1B,CAAC;AAAA,EACH,CAAC,EACA,KAAK;AACR,QAAM,QAAQ,OAAO,OAAc,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AAC7E,SAAO,UAAU,OAAO,QAAQ;AAClC;","names":[]}
1
+ {"version":3,"sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.ts"],"sourcesContent":["import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\nexport const scoreIndividualAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n const entries = Object.entries(attributes)\n if (entries.length === 0) return [0, maxScore]\n const scores = Object.entries(attributes)\n .map(([_trait, { values }]) => {\n return Object.entries(values).map<Score>(([_traitValue, metrics]) => {\n const rarity = Math.min(Math.round((1 - metrics.binomial.p) * maxScore), maxScore)\n return [rarity, maxScore]\n })\n })\n .flat()\n const total = scores.reduce<Score>(([a, b], [c, d]) => [a + c, b + d], [0, 0])\n return normalize(total, maxScore)\n}\n"],"mappings":";AACA,SAAS,iBAAwB;AAEjC,IAAM,WAAW;AAEV,IAAM,4BAA4B,CAAC,SAA+C;AACvF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AACpC,QAAM,UAAU,OAAO,QAAQ,UAAU;AACzC,MAAI,QAAQ,WAAW;AAAG,WAAO,CAAC,GAAG,QAAQ;AAC7C,QAAM,SAAS,OAAO,QAAQ,UAAU,EACrC,IAAI,CAAC,CAAC,QAAQ,EAAE,OAAO,CAAC,MAAM;AAC7B,WAAO,OAAO,QAAQ,MAAM,EAAE,IAAW,CAAC,CAAC,aAAa,OAAO,MAAM;AACnE,YAAM,SAAS,KAAK,IAAI,KAAK,OAAO,IAAI,QAAQ,SAAS,KAAK,QAAQ,GAAG,QAAQ;AACjF,aAAO,CAAC,QAAQ,QAAQ;AAAA,IAC1B,CAAC;AAAA,EACH,CAAC,EACA,KAAK;AACR,QAAM,QAAQ,OAAO,OAAc,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AAC7E,SAAO,UAAU,OAAO,QAAQ;AAClC;","names":[]}
@@ -0,0 +1,24 @@
1
+ import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin';
2
+ import { Score } from '@xyo-network/crypto-nft-score-model';
3
+ /**
4
+ * We're working on some assumptions here:
5
+ *
6
+ * - If you have a 100% chance of getting a trait, everyone get's a trophy
7
+ * - If you have a 50% chance of getting a trait, it's not rare
8
+ * - If you have a 0% chance of getting a trait, it's not fun
9
+ *
10
+ * So we're looking for something Pareto-ish (somewhere between
11
+ * 80/20 or 90/10) as that's a good & sustainable model for the
12
+ * distribution of many traits in real life.
13
+ * However, we also don't want to maximally reward collections
14
+ * that have a lot of single attributes distributed uniformly
15
+ * (basically a 0% trait probably) as that's perfectly entropic
16
+ * but not very interesting (some overlap is desirable).
17
+ * So we're using a Gaussian distribution to model the
18
+ * probability density of the joint probability of all traits
19
+ * centered around 15%.
20
+ * @param info
21
+ * @returns
22
+ */
23
+ export declare const scoreTotalAttributes: (info: NftCollectionAttributeMetrics) => Score;
24
+ //# sourceMappingURL=scoreTotalAttributes.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scoreTotalAttributes.d.ts","sourceRoot":"","sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,6BAA6B,EAAE,MAAM,mDAAmD,CAAA;AACjG,OAAO,EAAa,KAAK,EAAE,MAAM,qCAAqC,CAAA;AAiCtE;;;;;;;;;;;;;;;;;;;GAmBG;AACH,eAAO,MAAM,oBAAoB,SAAU,6BAA6B,KAAG,KAU1E,CAAA"}
@@ -16,23 +16,25 @@ var __copyProps = (to, from, except, desc) => {
16
16
  return to;
17
17
  };
18
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.ts
19
21
  var scoreTotalAttributes_exports = {};
20
22
  __export(scoreTotalAttributes_exports, {
21
23
  scoreTotalAttributes: () => scoreTotalAttributes
22
24
  });
23
25
  module.exports = __toCommonJS(scoreTotalAttributes_exports);
24
26
  var import_crypto_nft_score_model = require("@xyo-network/crypto-nft-score-model");
25
- const maxScore = 10;
26
- const defaultMu = 0.15;
27
- const defaultSigma = 0.1;
28
- const gaussianProbabilityDensity = (x, mu = defaultMu, sigma = defaultSigma) => {
27
+ var maxScore = 10;
28
+ var defaultMu = 0.15;
29
+ var defaultSigma = 0.1;
30
+ var gaussianProbabilityDensity = (x, mu = defaultMu, sigma = defaultSigma) => {
29
31
  const sqrtTwoPi = Math.sqrt(2 * Math.PI);
30
32
  const denominator = sigma * sqrtTwoPi;
31
33
  const power = -0.5 * Math.pow((x - mu) / sigma, 2);
32
34
  return 1 / denominator * Math.exp(power);
33
35
  };
34
- const maxProbabilityDensity = gaussianProbabilityDensity(defaultMu);
35
- const scoreTotalAttributes = (info) => {
36
+ var maxProbabilityDensity = gaussianProbabilityDensity(defaultMu);
37
+ var scoreTotalAttributes = (info) => {
36
38
  const { attributes } = info.metrics.metadata;
37
39
  const jointProbability = Object.entries(attributes).reduce((acc, [_trait, { metrics }]) => {
38
40
  return acc * metrics.binomial.p;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.ts"],"sourcesContent":["import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\n/**\n * Mean: What value is the distribution centered around\n */\nconst defaultMu = 0.15\n\n/**\n * Standard Deviation: How spread out is the distribution\n */\nconst defaultSigma = 0.1\n\n/**\n * Calculates the Gaussian probability density\n * @param x\n * @param mu Mean\n * @param sigma Standard Deviation\n * @returns\n */\nconst gaussianProbabilityDensity = (x: number, mu: number = defaultMu, sigma: number = defaultSigma): number => {\n const sqrtTwoPi = Math.sqrt(2 * Math.PI)\n const denominator = sigma * sqrtTwoPi\n const power = -0.5 * Math.pow((x - mu) / sigma, 2)\n return (1 / denominator) * Math.exp(power)\n}\n\n/**\n * For a Gaussian distribution, the peak of the distribution is the mean\n */\nconst maxProbabilityDensity = gaussianProbabilityDensity(defaultMu)\n\n/**\n * We're working on some assumptions here:\n *\n * - If you have a 100% chance of getting a trait, everyone get's a trophy\n * - If you have a 50% chance of getting a trait, it's not rare\n * - If you have a 0% chance of getting a trait, it's not fun\n *\n * So we're looking for something Pareto-ish (somewhere between\n * 80/20 or 90/10) as that's a good & sustainable model for the\n * distribution of many traits in real life.\n * However, we also don't want to maximally reward collections\n * that have a lot of single attributes distributed uniformly\n * (basically a 0% trait probably) as that's perfectly entropic\n * but not very interesting (some overlap is desirable).\n * So we're using a Gaussian distribution to model the\n * probability density of the joint probability of all traits\n * centered around 15%.\n * @param info\n * @returns\n */\nexport const scoreTotalAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n // This has somewhat of a filtering function by causing anything with 100% probability to\n // add no value to the end score\n const jointProbability = Object.entries(attributes).reduce((acc, [_trait, { metrics }]) => {\n return acc * metrics.binomial.p\n }, 1)\n const probabilityDensity = gaussianProbabilityDensity(jointProbability)\n const score: Score = [probabilityDensity, maxProbabilityDensity]\n return normalize(score, maxScore)\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oCAAiC;AAEjC,MAAM,WAAW;AAKjB,MAAM,YAAY;AAKlB,MAAM,eAAe;AASrB,MAAM,6BAA6B,CAAC,GAAW,KAAa,WAAW,QAAgB,iBAAyB;AAC9G,QAAM,YAAY,KAAK,KAAK,IAAI,KAAK,EAAE;AACvC,QAAM,cAAc,QAAQ;AAC5B,QAAM,QAAQ,OAAO,KAAK,KAAK,IAAI,MAAM,OAAO,CAAC;AACjD,SAAQ,IAAI,cAAe,KAAK,IAAI,KAAK;AAC3C;AAKA,MAAM,wBAAwB,2BAA2B,SAAS;AAsB3D,MAAM,uBAAuB,CAAC,SAA+C;AAClF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AAGpC,QAAM,mBAAmB,OAAO,QAAQ,UAAU,EAAE,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,QAAQ,CAAC,MAAM;AACzF,WAAO,MAAM,QAAQ,SAAS;AAAA,EAChC,GAAG,CAAC;AACJ,QAAM,qBAAqB,2BAA2B,gBAAgB;AACtE,QAAM,QAAe,CAAC,oBAAoB,qBAAqB;AAC/D,aAAO,yCAAU,OAAO,QAAQ;AAClC;","names":[]}
1
+ {"version":3,"sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.ts"],"sourcesContent":["import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\n/**\n * Mean: What value is the distribution centered around\n */\nconst defaultMu = 0.15\n\n/**\n * Standard Deviation: How spread out is the distribution\n */\nconst defaultSigma = 0.1\n\n/**\n * Calculates the Gaussian probability density\n * @param x\n * @param mu Mean\n * @param sigma Standard Deviation\n * @returns\n */\nconst gaussianProbabilityDensity = (x: number, mu: number = defaultMu, sigma: number = defaultSigma): number => {\n const sqrtTwoPi = Math.sqrt(2 * Math.PI)\n const denominator = sigma * sqrtTwoPi\n const power = -0.5 * Math.pow((x - mu) / sigma, 2)\n return (1 / denominator) * Math.exp(power)\n}\n\n/**\n * For a Gaussian distribution, the peak of the distribution is the mean\n */\nconst maxProbabilityDensity = gaussianProbabilityDensity(defaultMu)\n\n/**\n * We're working on some assumptions here:\n *\n * - If you have a 100% chance of getting a trait, everyone get's a trophy\n * - If you have a 50% chance of getting a trait, it's not rare\n * - If you have a 0% chance of getting a trait, it's not fun\n *\n * So we're looking for something Pareto-ish (somewhere between\n * 80/20 or 90/10) as that's a good & sustainable model for the\n * distribution of many traits in real life.\n * However, we also don't want to maximally reward collections\n * that have a lot of single attributes distributed uniformly\n * (basically a 0% trait probably) as that's perfectly entropic\n * but not very interesting (some overlap is desirable).\n * So we're using a Gaussian distribution to model the\n * probability density of the joint probability of all traits\n * centered around 15%.\n * @param info\n * @returns\n */\nexport const scoreTotalAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n // This has somewhat of a filtering function by causing anything with 100% probability to\n // add no value to the end score\n const jointProbability = Object.entries(attributes).reduce((acc, [_trait, { metrics }]) => {\n return acc * metrics.binomial.p\n }, 1)\n const probabilityDensity = gaussianProbabilityDensity(jointProbability)\n const score: Score = [probabilityDensity, maxProbabilityDensity]\n return normalize(score, maxScore)\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oCAAiC;AAEjC,IAAM,WAAW;AAKjB,IAAM,YAAY;AAKlB,IAAM,eAAe;AASrB,IAAM,6BAA6B,CAAC,GAAW,KAAa,WAAW,QAAgB,iBAAyB;AAC9G,QAAM,YAAY,KAAK,KAAK,IAAI,KAAK,EAAE;AACvC,QAAM,cAAc,QAAQ;AAC5B,QAAM,QAAQ,OAAO,KAAK,KAAK,IAAI,MAAM,OAAO,CAAC;AACjD,SAAQ,IAAI,cAAe,KAAK,IAAI,KAAK;AAC3C;AAKA,IAAM,wBAAwB,2BAA2B,SAAS;AAsB3D,IAAM,uBAAuB,CAAC,SAA+C;AAClF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AAGpC,QAAM,mBAAmB,OAAO,QAAQ,UAAU,EAAE,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,QAAQ,CAAC,MAAM;AACzF,WAAO,MAAM,QAAQ,SAAS;AAAA,EAChC,GAAG,CAAC;AACJ,QAAM,qBAAqB,2BAA2B,gBAAgB;AACtE,QAAM,QAAe,CAAC,oBAAoB,qBAAqB;AAC/D,aAAO,yCAAU,OAAO,QAAQ;AAClC;","names":[]}
@@ -1,15 +1,16 @@
1
+ // src/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.ts
1
2
  import { normalize } from "@xyo-network/crypto-nft-score-model";
2
- const maxScore = 10;
3
- const defaultMu = 0.15;
4
- const defaultSigma = 0.1;
5
- const gaussianProbabilityDensity = (x, mu = defaultMu, sigma = defaultSigma) => {
3
+ var maxScore = 10;
4
+ var defaultMu = 0.15;
5
+ var defaultSigma = 0.1;
6
+ var gaussianProbabilityDensity = (x, mu = defaultMu, sigma = defaultSigma) => {
6
7
  const sqrtTwoPi = Math.sqrt(2 * Math.PI);
7
8
  const denominator = sigma * sqrtTwoPi;
8
9
  const power = -0.5 * Math.pow((x - mu) / sigma, 2);
9
10
  return 1 / denominator * Math.exp(power);
10
11
  };
11
- const maxProbabilityDensity = gaussianProbabilityDensity(defaultMu);
12
- const scoreTotalAttributes = (info) => {
12
+ var maxProbabilityDensity = gaussianProbabilityDensity(defaultMu);
13
+ var scoreTotalAttributes = (info) => {
13
14
  const { attributes } = info.metrics.metadata;
14
15
  const jointProbability = Object.entries(attributes).reduce((acc, [_trait, { metrics }]) => {
15
16
  return acc * metrics.binomial.p;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.ts"],"sourcesContent":["import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\n/**\n * Mean: What value is the distribution centered around\n */\nconst defaultMu = 0.15\n\n/**\n * Standard Deviation: How spread out is the distribution\n */\nconst defaultSigma = 0.1\n\n/**\n * Calculates the Gaussian probability density\n * @param x\n * @param mu Mean\n * @param sigma Standard Deviation\n * @returns\n */\nconst gaussianProbabilityDensity = (x: number, mu: number = defaultMu, sigma: number = defaultSigma): number => {\n const sqrtTwoPi = Math.sqrt(2 * Math.PI)\n const denominator = sigma * sqrtTwoPi\n const power = -0.5 * Math.pow((x - mu) / sigma, 2)\n return (1 / denominator) * Math.exp(power)\n}\n\n/**\n * For a Gaussian distribution, the peak of the distribution is the mean\n */\nconst maxProbabilityDensity = gaussianProbabilityDensity(defaultMu)\n\n/**\n * We're working on some assumptions here:\n *\n * - If you have a 100% chance of getting a trait, everyone get's a trophy\n * - If you have a 50% chance of getting a trait, it's not rare\n * - If you have a 0% chance of getting a trait, it's not fun\n *\n * So we're looking for something Pareto-ish (somewhere between\n * 80/20 or 90/10) as that's a good & sustainable model for the\n * distribution of many traits in real life.\n * However, we also don't want to maximally reward collections\n * that have a lot of single attributes distributed uniformly\n * (basically a 0% trait probably) as that's perfectly entropic\n * but not very interesting (some overlap is desirable).\n * So we're using a Gaussian distribution to model the\n * probability density of the joint probability of all traits\n * centered around 15%.\n * @param info\n * @returns\n */\nexport const scoreTotalAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n // This has somewhat of a filtering function by causing anything with 100% probability to\n // add no value to the end score\n const jointProbability = Object.entries(attributes).reduce((acc, [_trait, { metrics }]) => {\n return acc * metrics.binomial.p\n }, 1)\n const probabilityDensity = gaussianProbabilityDensity(jointProbability)\n const score: Score = [probabilityDensity, maxProbabilityDensity]\n return normalize(score, maxScore)\n}\n"],"mappings":"AACA,SAAS,iBAAwB;AAEjC,MAAM,WAAW;AAKjB,MAAM,YAAY;AAKlB,MAAM,eAAe;AASrB,MAAM,6BAA6B,CAAC,GAAW,KAAa,WAAW,QAAgB,iBAAyB;AAC9G,QAAM,YAAY,KAAK,KAAK,IAAI,KAAK,EAAE;AACvC,QAAM,cAAc,QAAQ;AAC5B,QAAM,QAAQ,OAAO,KAAK,KAAK,IAAI,MAAM,OAAO,CAAC;AACjD,SAAQ,IAAI,cAAe,KAAK,IAAI,KAAK;AAC3C;AAKA,MAAM,wBAAwB,2BAA2B,SAAS;AAsB3D,MAAM,uBAAuB,CAAC,SAA+C;AAClF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AAGpC,QAAM,mBAAmB,OAAO,QAAQ,UAAU,EAAE,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,QAAQ,CAAC,MAAM;AACzF,WAAO,MAAM,QAAQ,SAAS;AAAA,EAChC,GAAG,CAAC;AACJ,QAAM,qBAAqB,2BAA2B,gBAAgB;AACtE,QAAM,QAAe,CAAC,oBAAoB,qBAAqB;AAC/D,SAAO,UAAU,OAAO,QAAQ;AAClC;","names":[]}
1
+ {"version":3,"sources":["../../../../../../../src/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.ts"],"sourcesContent":["import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\n/**\n * Mean: What value is the distribution centered around\n */\nconst defaultMu = 0.15\n\n/**\n * Standard Deviation: How spread out is the distribution\n */\nconst defaultSigma = 0.1\n\n/**\n * Calculates the Gaussian probability density\n * @param x\n * @param mu Mean\n * @param sigma Standard Deviation\n * @returns\n */\nconst gaussianProbabilityDensity = (x: number, mu: number = defaultMu, sigma: number = defaultSigma): number => {\n const sqrtTwoPi = Math.sqrt(2 * Math.PI)\n const denominator = sigma * sqrtTwoPi\n const power = -0.5 * Math.pow((x - mu) / sigma, 2)\n return (1 / denominator) * Math.exp(power)\n}\n\n/**\n * For a Gaussian distribution, the peak of the distribution is the mean\n */\nconst maxProbabilityDensity = gaussianProbabilityDensity(defaultMu)\n\n/**\n * We're working on some assumptions here:\n *\n * - If you have a 100% chance of getting a trait, everyone get's a trophy\n * - If you have a 50% chance of getting a trait, it's not rare\n * - If you have a 0% chance of getting a trait, it's not fun\n *\n * So we're looking for something Pareto-ish (somewhere between\n * 80/20 or 90/10) as that's a good & sustainable model for the\n * distribution of many traits in real life.\n * However, we also don't want to maximally reward collections\n * that have a lot of single attributes distributed uniformly\n * (basically a 0% trait probably) as that's perfectly entropic\n * but not very interesting (some overlap is desirable).\n * So we're using a Gaussian distribution to model the\n * probability density of the joint probability of all traits\n * centered around 15%.\n * @param info\n * @returns\n */\nexport const scoreTotalAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n // This has somewhat of a filtering function by causing anything with 100% probability to\n // add no value to the end score\n const jointProbability = Object.entries(attributes).reduce((acc, [_trait, { metrics }]) => {\n return acc * metrics.binomial.p\n }, 1)\n const probabilityDensity = gaussianProbabilityDensity(jointProbability)\n const score: Score = [probabilityDensity, maxProbabilityDensity]\n return normalize(score, maxScore)\n}\n"],"mappings":";AACA,SAAS,iBAAwB;AAEjC,IAAM,WAAW;AAKjB,IAAM,YAAY;AAKlB,IAAM,eAAe;AASrB,IAAM,6BAA6B,CAAC,GAAW,KAAa,WAAW,QAAgB,iBAAyB;AAC9G,QAAM,YAAY,KAAK,KAAK,IAAI,KAAK,EAAE;AACvC,QAAM,cAAc,QAAQ;AAC5B,QAAM,QAAQ,OAAO,KAAK,KAAK,IAAI,MAAM,OAAO,CAAC;AACjD,SAAQ,IAAI,cAAe,KAAK,IAAI,KAAK;AAC3C;AAKA,IAAM,wBAAwB,2BAA2B,SAAS;AAsB3D,IAAM,uBAAuB,CAAC,SAA+C;AAClF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AAGpC,QAAM,mBAAmB,OAAO,QAAQ,UAAU,EAAE,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,QAAQ,CAAC,MAAM;AACzF,WAAO,MAAM,QAAQ,SAAS;AAAA,EAChC,GAAG,CAAC;AACJ,QAAM,qBAAqB,2BAA2B,gBAAgB;AACtE,QAAM,QAAe,CAAC,oBAAoB,qBAAqB;AAC/D,SAAO,UAAU,OAAO,QAAQ;AAClC;","names":[]}
@@ -0,0 +1,16 @@
1
+ import { NftCollectionCount } from '@xyo-network/crypto-nft-collection-payload-plugin';
2
+ import { Score } from '@xyo-network/crypto-nft-score-model';
3
+ /**
4
+ * We're working on some assumptions here:
5
+ * - If there's < 1000 NFTs in your collection it starts becoming too niche
6
+ * - If there's > 20,000 NFTs in your collection it starts becoming too broad
7
+ * So there's a sweet spot somewhere between 2000 and 10,000
8
+ * where a collection has enough NFTs to be interesting, but
9
+ * not so many that it's teetering on a diluted money grab.
10
+ * To model that we're using a log-normal distribution optimized
11
+ * to maximally reward collections in the aforementioned range
12
+ * @param nft
13
+ * @returns
14
+ */
15
+ export declare const scoreTotal: (nft: NftCollectionCount) => Score;
16
+ //# sourceMappingURL=total.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"total.d.ts","sourceRoot":"","sources":["../../../../../../src/lib/rating/criteria/scoring/total.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,MAAM,mDAAmD,CAAA;AACtF,OAAO,EAAa,KAAK,EAAE,MAAM,qCAAqC,CAAA;AA+BtE;;;;;;;;;;;GAWG;AACH,eAAO,MAAM,UAAU,QAAS,kBAAkB,KAAG,KAIpD,CAAA"}
@@ -16,25 +16,27 @@ var __copyProps = (to, from, except, desc) => {
16
16
  return to;
17
17
  };
18
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/lib/rating/criteria/scoring/total.ts
19
21
  var total_exports = {};
20
22
  __export(total_exports, {
21
23
  scoreTotal: () => scoreTotal
22
24
  });
23
25
  module.exports = __toCommonJS(total_exports);
24
26
  var import_crypto_nft_score_model = require("@xyo-network/crypto-nft-score-model");
25
- const median = 810308398217352e-7;
26
- const defaultMu = Math.log(median);
27
- const defaultSigma = 3;
28
- const mode = Math.exp(defaultMu - Math.pow(defaultSigma, 2));
29
- const logNormalProbabilityDensity = (x, mu = defaultMu, sigma = defaultSigma) => {
27
+ var median = 810308398217352e-7;
28
+ var defaultMu = Math.log(median);
29
+ var defaultSigma = 3;
30
+ var mode = Math.exp(defaultMu - Math.pow(defaultSigma, 2));
31
+ var logNormalProbabilityDensity = (x, mu = defaultMu, sigma = defaultSigma) => {
30
32
  if (x <= 0)
31
33
  return 0;
32
34
  const logX = Math.log(x);
33
35
  return 1 / (x * sigma * Math.sqrt(2 * Math.PI)) * Math.exp(-0.5 * Math.pow((logX - mu) / sigma, 2));
34
36
  };
35
- const maxProbabilityDensity = logNormalProbabilityDensity(mode);
36
- const maxScore = 10;
37
- const scoreTotal = (nft) => {
37
+ var maxProbabilityDensity = logNormalProbabilityDensity(mode);
38
+ var maxScore = 10;
39
+ var scoreTotal = (nft) => {
38
40
  const density = logNormalProbabilityDensity(nft.total);
39
41
  const score = [density, maxProbabilityDensity];
40
42
  return (0, import_crypto_nft_score_model.normalize)(score, maxScore);
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../../src/lib/rating/criteria/scoring/total.ts"],"sourcesContent":["import { NftCollectionCount } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\n/**\n * This \"magic\" value was obtained using Solver in Excel\n * to find the median, with mu/sigma fixed, which maximizes\n * the distribution (the mode for lognormal) at 10,000\n */\nconst median = 81030839.8217352\nconst defaultMu = Math.log(median)\nconst defaultSigma = 3\nconst mode = Math.exp(defaultMu - Math.pow(defaultSigma, 2))\n/**\n * Calculates the log-normal probability density\n * @param x the value at which you want to calculate the probability density\n * @param mu mean of the associated normal distribution\n * @param sigma standard deviation of the associated normal distribution\n * @returns\n */\nconst logNormalProbabilityDensity = (x: number, mu: number = defaultMu, sigma: number = defaultSigma): number => {\n if (x <= 0) return 0\n const logX = Math.log(x)\n return (1 / (x * sigma * Math.sqrt(2 * Math.PI))) * Math.exp(-0.5 * Math.pow((logX - mu) / sigma, 2))\n}\n\n/**\n * For a lognormal distribution, the peak of the distribution is the mode\n */\nconst maxProbabilityDensity = logNormalProbabilityDensity(mode)\n\nconst maxScore = 10\n\n/**\n * We're working on some assumptions here:\n * - If there's < 1000 NFTs in your collection it starts becoming too niche\n * - If there's > 20,000 NFTs in your collection it starts becoming too broad\n * So there's a sweet spot somewhere between 2000 and 10,000\n * where a collection has enough NFTs to be interesting, but\n * not so many that it's teetering on a diluted money grab.\n * To model that we're using a log-normal distribution optimized\n * to maximally reward collections in the aforementioned range\n * @param nft\n * @returns\n */\nexport const scoreTotal = (nft: NftCollectionCount): Score => {\n const density = logNormalProbabilityDensity(nft.total)\n const score: Score = [density, maxProbabilityDensity]\n return normalize(score, maxScore)\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oCAAiC;AAOjC,MAAM,SAAS;AACf,MAAM,YAAY,KAAK,IAAI,MAAM;AACjC,MAAM,eAAe;AACrB,MAAM,OAAO,KAAK,IAAI,YAAY,KAAK,IAAI,cAAc,CAAC,CAAC;AAQ3D,MAAM,8BAA8B,CAAC,GAAW,KAAa,WAAW,QAAgB,iBAAyB;AAC/G,MAAI,KAAK;AAAG,WAAO;AACnB,QAAM,OAAO,KAAK,IAAI,CAAC;AACvB,SAAQ,KAAK,IAAI,QAAQ,KAAK,KAAK,IAAI,KAAK,EAAE,KAAM,KAAK,IAAI,OAAO,KAAK,KAAK,OAAO,MAAM,OAAO,CAAC,CAAC;AACtG;AAKA,MAAM,wBAAwB,4BAA4B,IAAI;AAE9D,MAAM,WAAW;AAcV,MAAM,aAAa,CAAC,QAAmC;AAC5D,QAAM,UAAU,4BAA4B,IAAI,KAAK;AACrD,QAAM,QAAe,CAAC,SAAS,qBAAqB;AACpD,aAAO,yCAAU,OAAO,QAAQ;AAClC;","names":[]}
1
+ {"version":3,"sources":["../../../../../../src/lib/rating/criteria/scoring/total.ts"],"sourcesContent":["import { NftCollectionCount } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\n/**\n * This \"magic\" value was obtained using Solver in Excel\n * to find the median, with mu/sigma fixed, which maximizes\n * the distribution (the mode for lognormal) at 10,000\n */\nconst median = 81030839.8217352\nconst defaultMu = Math.log(median)\nconst defaultSigma = 3\nconst mode = Math.exp(defaultMu - Math.pow(defaultSigma, 2))\n/**\n * Calculates the log-normal probability density\n * @param x the value at which you want to calculate the probability density\n * @param mu mean of the associated normal distribution\n * @param sigma standard deviation of the associated normal distribution\n * @returns\n */\nconst logNormalProbabilityDensity = (x: number, mu: number = defaultMu, sigma: number = defaultSigma): number => {\n if (x <= 0) return 0\n const logX = Math.log(x)\n return (1 / (x * sigma * Math.sqrt(2 * Math.PI))) * Math.exp(-0.5 * Math.pow((logX - mu) / sigma, 2))\n}\n\n/**\n * For a lognormal distribution, the peak of the distribution is the mode\n */\nconst maxProbabilityDensity = logNormalProbabilityDensity(mode)\n\nconst maxScore = 10\n\n/**\n * We're working on some assumptions here:\n * - If there's < 1000 NFTs in your collection it starts becoming too niche\n * - If there's > 20,000 NFTs in your collection it starts becoming too broad\n * So there's a sweet spot somewhere between 2000 and 10,000\n * where a collection has enough NFTs to be interesting, but\n * not so many that it's teetering on a diluted money grab.\n * To model that we're using a log-normal distribution optimized\n * to maximally reward collections in the aforementioned range\n * @param nft\n * @returns\n */\nexport const scoreTotal = (nft: NftCollectionCount): Score => {\n const density = logNormalProbabilityDensity(nft.total)\n const score: Score = [density, maxProbabilityDensity]\n return normalize(score, maxScore)\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oCAAiC;AAOjC,IAAM,SAAS;AACf,IAAM,YAAY,KAAK,IAAI,MAAM;AACjC,IAAM,eAAe;AACrB,IAAM,OAAO,KAAK,IAAI,YAAY,KAAK,IAAI,cAAc,CAAC,CAAC;AAQ3D,IAAM,8BAA8B,CAAC,GAAW,KAAa,WAAW,QAAgB,iBAAyB;AAC/G,MAAI,KAAK;AAAG,WAAO;AACnB,QAAM,OAAO,KAAK,IAAI,CAAC;AACvB,SAAQ,KAAK,IAAI,QAAQ,KAAK,KAAK,IAAI,KAAK,EAAE,KAAM,KAAK,IAAI,OAAO,KAAK,KAAK,OAAO,MAAM,OAAO,CAAC,CAAC;AACtG;AAKA,IAAM,wBAAwB,4BAA4B,IAAI;AAE9D,IAAM,WAAW;AAcV,IAAM,aAAa,CAAC,QAAmC;AAC5D,QAAM,UAAU,4BAA4B,IAAI,KAAK;AACrD,QAAM,QAAe,CAAC,SAAS,qBAAqB;AACpD,aAAO,yCAAU,OAAO,QAAQ;AAClC;","names":[]}
@@ -1,17 +1,18 @@
1
+ // src/lib/rating/criteria/scoring/total.ts
1
2
  import { normalize } from "@xyo-network/crypto-nft-score-model";
2
- const median = 810308398217352e-7;
3
- const defaultMu = Math.log(median);
4
- const defaultSigma = 3;
5
- const mode = Math.exp(defaultMu - Math.pow(defaultSigma, 2));
6
- const logNormalProbabilityDensity = (x, mu = defaultMu, sigma = defaultSigma) => {
3
+ var median = 810308398217352e-7;
4
+ var defaultMu = Math.log(median);
5
+ var defaultSigma = 3;
6
+ var mode = Math.exp(defaultMu - Math.pow(defaultSigma, 2));
7
+ var logNormalProbabilityDensity = (x, mu = defaultMu, sigma = defaultSigma) => {
7
8
  if (x <= 0)
8
9
  return 0;
9
10
  const logX = Math.log(x);
10
11
  return 1 / (x * sigma * Math.sqrt(2 * Math.PI)) * Math.exp(-0.5 * Math.pow((logX - mu) / sigma, 2));
11
12
  };
12
- const maxProbabilityDensity = logNormalProbabilityDensity(mode);
13
- const maxScore = 10;
14
- const scoreTotal = (nft) => {
13
+ var maxProbabilityDensity = logNormalProbabilityDensity(mode);
14
+ var maxScore = 10;
15
+ var scoreTotal = (nft) => {
15
16
  const density = logNormalProbabilityDensity(nft.total);
16
17
  const score = [density, maxProbabilityDensity];
17
18
  return normalize(score, maxScore);
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../../src/lib/rating/criteria/scoring/total.ts"],"sourcesContent":["import { NftCollectionCount } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\n/**\n * This \"magic\" value was obtained using Solver in Excel\n * to find the median, with mu/sigma fixed, which maximizes\n * the distribution (the mode for lognormal) at 10,000\n */\nconst median = 81030839.8217352\nconst defaultMu = Math.log(median)\nconst defaultSigma = 3\nconst mode = Math.exp(defaultMu - Math.pow(defaultSigma, 2))\n/**\n * Calculates the log-normal probability density\n * @param x the value at which you want to calculate the probability density\n * @param mu mean of the associated normal distribution\n * @param sigma standard deviation of the associated normal distribution\n * @returns\n */\nconst logNormalProbabilityDensity = (x: number, mu: number = defaultMu, sigma: number = defaultSigma): number => {\n if (x <= 0) return 0\n const logX = Math.log(x)\n return (1 / (x * sigma * Math.sqrt(2 * Math.PI))) * Math.exp(-0.5 * Math.pow((logX - mu) / sigma, 2))\n}\n\n/**\n * For a lognormal distribution, the peak of the distribution is the mode\n */\nconst maxProbabilityDensity = logNormalProbabilityDensity(mode)\n\nconst maxScore = 10\n\n/**\n * We're working on some assumptions here:\n * - If there's < 1000 NFTs in your collection it starts becoming too niche\n * - If there's > 20,000 NFTs in your collection it starts becoming too broad\n * So there's a sweet spot somewhere between 2000 and 10,000\n * where a collection has enough NFTs to be interesting, but\n * not so many that it's teetering on a diluted money grab.\n * To model that we're using a log-normal distribution optimized\n * to maximally reward collections in the aforementioned range\n * @param nft\n * @returns\n */\nexport const scoreTotal = (nft: NftCollectionCount): Score => {\n const density = logNormalProbabilityDensity(nft.total)\n const score: Score = [density, maxProbabilityDensity]\n return normalize(score, maxScore)\n}\n"],"mappings":"AACA,SAAS,iBAAwB;AAOjC,MAAM,SAAS;AACf,MAAM,YAAY,KAAK,IAAI,MAAM;AACjC,MAAM,eAAe;AACrB,MAAM,OAAO,KAAK,IAAI,YAAY,KAAK,IAAI,cAAc,CAAC,CAAC;AAQ3D,MAAM,8BAA8B,CAAC,GAAW,KAAa,WAAW,QAAgB,iBAAyB;AAC/G,MAAI,KAAK;AAAG,WAAO;AACnB,QAAM,OAAO,KAAK,IAAI,CAAC;AACvB,SAAQ,KAAK,IAAI,QAAQ,KAAK,KAAK,IAAI,KAAK,EAAE,KAAM,KAAK,IAAI,OAAO,KAAK,KAAK,OAAO,MAAM,OAAO,CAAC,CAAC;AACtG;AAKA,MAAM,wBAAwB,4BAA4B,IAAI;AAE9D,MAAM,WAAW;AAcV,MAAM,aAAa,CAAC,QAAmC;AAC5D,QAAM,UAAU,4BAA4B,IAAI,KAAK;AACrD,QAAM,QAAe,CAAC,SAAS,qBAAqB;AACpD,SAAO,UAAU,OAAO,QAAQ;AAClC;","names":[]}
1
+ {"version":3,"sources":["../../../../../../src/lib/rating/criteria/scoring/total.ts"],"sourcesContent":["import { NftCollectionCount } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\n/**\n * This \"magic\" value was obtained using Solver in Excel\n * to find the median, with mu/sigma fixed, which maximizes\n * the distribution (the mode for lognormal) at 10,000\n */\nconst median = 81030839.8217352\nconst defaultMu = Math.log(median)\nconst defaultSigma = 3\nconst mode = Math.exp(defaultMu - Math.pow(defaultSigma, 2))\n/**\n * Calculates the log-normal probability density\n * @param x the value at which you want to calculate the probability density\n * @param mu mean of the associated normal distribution\n * @param sigma standard deviation of the associated normal distribution\n * @returns\n */\nconst logNormalProbabilityDensity = (x: number, mu: number = defaultMu, sigma: number = defaultSigma): number => {\n if (x <= 0) return 0\n const logX = Math.log(x)\n return (1 / (x * sigma * Math.sqrt(2 * Math.PI))) * Math.exp(-0.5 * Math.pow((logX - mu) / sigma, 2))\n}\n\n/**\n * For a lognormal distribution, the peak of the distribution is the mode\n */\nconst maxProbabilityDensity = logNormalProbabilityDensity(mode)\n\nconst maxScore = 10\n\n/**\n * We're working on some assumptions here:\n * - If there's < 1000 NFTs in your collection it starts becoming too niche\n * - If there's > 20,000 NFTs in your collection it starts becoming too broad\n * So there's a sweet spot somewhere between 2000 and 10,000\n * where a collection has enough NFTs to be interesting, but\n * not so many that it's teetering on a diluted money grab.\n * To model that we're using a log-normal distribution optimized\n * to maximally reward collections in the aforementioned range\n * @param nft\n * @returns\n */\nexport const scoreTotal = (nft: NftCollectionCount): Score => {\n const density = logNormalProbabilityDensity(nft.total)\n const score: Score = [density, maxProbabilityDensity]\n return normalize(score, maxScore)\n}\n"],"mappings":";AACA,SAAS,iBAAwB;AAOjC,IAAM,SAAS;AACf,IAAM,YAAY,KAAK,IAAI,MAAM;AACjC,IAAM,eAAe;AACrB,IAAM,OAAO,KAAK,IAAI,YAAY,KAAK,IAAI,cAAc,CAAC,CAAC;AAQ3D,IAAM,8BAA8B,CAAC,GAAW,KAAa,WAAW,QAAgB,iBAAyB;AAC/G,MAAI,KAAK;AAAG,WAAO;AACnB,QAAM,OAAO,KAAK,IAAI,CAAC;AACvB,SAAQ,KAAK,IAAI,QAAQ,KAAK,KAAK,IAAI,KAAK,EAAE,KAAM,KAAK,IAAI,OAAO,KAAK,KAAK,OAAO,MAAM,OAAO,CAAC,CAAC;AACtG;AAKA,IAAM,wBAAwB,4BAA4B,IAAI;AAE9D,IAAM,WAAW;AAcV,IAAM,aAAa,CAAC,QAAmC;AAC5D,QAAM,UAAU,4BAA4B,IAAI,KAAK;AACrD,QAAM,QAAe,CAAC,SAAS,qBAAqB;AACpD,SAAO,UAAU,OAAO,QAAQ;AAClC;","names":[]}
@@ -0,0 +1,2 @@
1
+ export * from './analyzeNftCollection';
2
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/lib/rating/index.ts"],"names":[],"mappings":"AAAA,cAAc,wBAAwB,CAAA"}
@@ -3,6 +3,10 @@ var __defProp = Object.defineProperty;
3
3
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
4
  var __getOwnPropNames = Object.getOwnPropertyNames;
5
5
  var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
6
10
  var __copyProps = (to, from, except, desc) => {
7
11
  if (from && typeof from === "object" || typeof from === "function") {
8
12
  for (let key of __getOwnPropNames(from))
@@ -11,13 +15,105 @@ var __copyProps = (to, from, except, desc) => {
11
15
  }
12
16
  return to;
13
17
  };
14
- var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
15
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/lib/rating/index.ts
16
21
  var rating_exports = {};
22
+ __export(rating_exports, {
23
+ analyzeNftCollection: () => analyzeNftCollection
24
+ });
17
25
  module.exports = __toCommonJS(rating_exports);
18
- __reExport(rating_exports, require("./analyzeNftCollection"), module.exports);
26
+
27
+ // src/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.ts
28
+ var import_crypto_nft_score_model = require("@xyo-network/crypto-nft-score-model");
29
+ var maxScore = 10;
30
+ var scoreIndividualAttributes = (info) => {
31
+ const { attributes } = info.metrics.metadata;
32
+ const entries = Object.entries(attributes);
33
+ if (entries.length === 0)
34
+ return [0, maxScore];
35
+ const scores = Object.entries(attributes).map(([_trait, { values }]) => {
36
+ return Object.entries(values).map(([_traitValue, metrics]) => {
37
+ const rarity = Math.min(Math.round((1 - metrics.binomial.p) * maxScore), maxScore);
38
+ return [rarity, maxScore];
39
+ });
40
+ }).flat();
41
+ const total = scores.reduce(([a, b], [c, d]) => [a + c, b + d], [0, 0]);
42
+ return (0, import_crypto_nft_score_model.normalize)(total, maxScore);
43
+ };
44
+
45
+ // src/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.ts
46
+ var import_crypto_nft_score_model2 = require("@xyo-network/crypto-nft-score-model");
47
+ var maxScore2 = 10;
48
+ var defaultMu = 0.15;
49
+ var defaultSigma = 0.1;
50
+ var gaussianProbabilityDensity = (x, mu = defaultMu, sigma = defaultSigma) => {
51
+ const sqrtTwoPi = Math.sqrt(2 * Math.PI);
52
+ const denominator = sigma * sqrtTwoPi;
53
+ const power = -0.5 * Math.pow((x - mu) / sigma, 2);
54
+ return 1 / denominator * Math.exp(power);
55
+ };
56
+ var maxProbabilityDensity = gaussianProbabilityDensity(defaultMu);
57
+ var scoreTotalAttributes = (info) => {
58
+ const { attributes } = info.metrics.metadata;
59
+ const jointProbability = Object.entries(attributes).reduce((acc, [_trait, { metrics }]) => {
60
+ return acc * metrics.binomial.p;
61
+ }, 1);
62
+ const probabilityDensity = gaussianProbabilityDensity(jointProbability);
63
+ const score = [probabilityDensity, maxProbabilityDensity];
64
+ return (0, import_crypto_nft_score_model2.normalize)(score, maxScore2);
65
+ };
66
+
67
+ // src/lib/rating/criteria/scoring/metadata/metadata.ts
68
+ var attributeScoringCriteria = {
69
+ "Metadata Attributes Individual": { score: scoreIndividualAttributes, weight: 2 },
70
+ "Metadata Attributes Total": { score: scoreTotalAttributes, weight: 2 }
71
+ };
72
+ var scoreMetadata = {
73
+ ...attributeScoringCriteria
74
+ };
75
+
76
+ // src/lib/rating/criteria/scoring/total.ts
77
+ var import_crypto_nft_score_model3 = require("@xyo-network/crypto-nft-score-model");
78
+ var median = 810308398217352e-7;
79
+ var defaultMu2 = Math.log(median);
80
+ var defaultSigma2 = 3;
81
+ var mode = Math.exp(defaultMu2 - Math.pow(defaultSigma2, 2));
82
+ var logNormalProbabilityDensity = (x, mu = defaultMu2, sigma = defaultSigma2) => {
83
+ if (x <= 0)
84
+ return 0;
85
+ const logX = Math.log(x);
86
+ return 1 / (x * sigma * Math.sqrt(2 * Math.PI)) * Math.exp(-0.5 * Math.pow((logX - mu) / sigma, 2));
87
+ };
88
+ var maxProbabilityDensity2 = logNormalProbabilityDensity(mode);
89
+ var maxScore3 = 10;
90
+ var scoreTotal = (nft) => {
91
+ const density = logNormalProbabilityDensity(nft.total);
92
+ const score = [density, maxProbabilityDensity2];
93
+ return (0, import_crypto_nft_score_model3.normalize)(score, maxScore3);
94
+ };
95
+
96
+ // src/lib/rating/criteria/index.ts
97
+ var scoringCriteria = {
98
+ ...scoreMetadata,
99
+ Total: { score: scoreTotal, weight: 2 }
100
+ };
101
+
102
+ // src/lib/rating/analyzeNftCollection.ts
103
+ var analyzeNftCollection = async (nft) => {
104
+ const result = Object.fromEntries(
105
+ await Promise.all(
106
+ Object.entries(scoringCriteria).map(async ([key, { score, weight }]) => {
107
+ const rawScore = await score(nft);
108
+ const weighted = rawScore.map((v) => v * weight);
109
+ return [key, weighted];
110
+ })
111
+ )
112
+ );
113
+ return result;
114
+ };
19
115
  // Annotate the CommonJS export names for ESM import in node:
20
116
  0 && (module.exports = {
21
- ...require("./analyzeNftCollection")
117
+ analyzeNftCollection
22
118
  });
23
119
  //# sourceMappingURL=index.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/lib/rating/index.ts"],"sourcesContent":["export * from './analyzeNftCollection'\n"],"mappings":";;;;;;;;;;;;;;;AAAA;AAAA;AAAA,2BAAc,mCAAd;","names":[]}
1
+ {"version":3,"sources":["../../../../src/lib/rating/index.ts","../../../../src/lib/rating/criteria/scoring/metadata/scoreIndividualAttributes.ts","../../../../src/lib/rating/criteria/scoring/metadata/scoreTotalAttributes.ts","../../../../src/lib/rating/criteria/scoring/metadata/metadata.ts","../../../../src/lib/rating/criteria/scoring/total.ts","../../../../src/lib/rating/criteria/index.ts","../../../../src/lib/rating/analyzeNftCollection.ts"],"sourcesContent":["export * from './analyzeNftCollection'\n","import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\nexport const scoreIndividualAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n const entries = Object.entries(attributes)\n if (entries.length === 0) return [0, maxScore]\n const scores = Object.entries(attributes)\n .map(([_trait, { values }]) => {\n return Object.entries(values).map<Score>(([_traitValue, metrics]) => {\n const rarity = Math.min(Math.round((1 - metrics.binomial.p) * maxScore), maxScore)\n return [rarity, maxScore]\n })\n })\n .flat()\n const total = scores.reduce<Score>(([a, b], [c, d]) => [a + c, b + d], [0, 0])\n return normalize(total, maxScore)\n}\n","import { NftCollectionAttributeMetrics } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\nconst maxScore = 10\n\n/**\n * Mean: What value is the distribution centered around\n */\nconst defaultMu = 0.15\n\n/**\n * Standard Deviation: How spread out is the distribution\n */\nconst defaultSigma = 0.1\n\n/**\n * Calculates the Gaussian probability density\n * @param x\n * @param mu Mean\n * @param sigma Standard Deviation\n * @returns\n */\nconst gaussianProbabilityDensity = (x: number, mu: number = defaultMu, sigma: number = defaultSigma): number => {\n const sqrtTwoPi = Math.sqrt(2 * Math.PI)\n const denominator = sigma * sqrtTwoPi\n const power = -0.5 * Math.pow((x - mu) / sigma, 2)\n return (1 / denominator) * Math.exp(power)\n}\n\n/**\n * For a Gaussian distribution, the peak of the distribution is the mean\n */\nconst maxProbabilityDensity = gaussianProbabilityDensity(defaultMu)\n\n/**\n * We're working on some assumptions here:\n *\n * - If you have a 100% chance of getting a trait, everyone get's a trophy\n * - If you have a 50% chance of getting a trait, it's not rare\n * - If you have a 0% chance of getting a trait, it's not fun\n *\n * So we're looking for something Pareto-ish (somewhere between\n * 80/20 or 90/10) as that's a good & sustainable model for the\n * distribution of many traits in real life.\n * However, we also don't want to maximally reward collections\n * that have a lot of single attributes distributed uniformly\n * (basically a 0% trait probably) as that's perfectly entropic\n * but not very interesting (some overlap is desirable).\n * So we're using a Gaussian distribution to model the\n * probability density of the joint probability of all traits\n * centered around 15%.\n * @param info\n * @returns\n */\nexport const scoreTotalAttributes = (info: NftCollectionAttributeMetrics): Score => {\n const { attributes } = info.metrics.metadata\n // This has somewhat of a filtering function by causing anything with 100% probability to\n // add no value to the end score\n const jointProbability = Object.entries(attributes).reduce((acc, [_trait, { metrics }]) => {\n return acc * metrics.binomial.p\n }, 1)\n const probabilityDensity = gaussianProbabilityDensity(jointProbability)\n const score: Score = [probabilityDensity, maxProbabilityDensity]\n return normalize(score, maxScore)\n}\n","import { NftCollectionInfo } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { WeightedScoringCriteria } from '@xyo-network/crypto-nft-score-model'\n\nimport { scoreIndividualAttributes } from './scoreIndividualAttributes'\nimport { scoreTotalAttributes } from './scoreTotalAttributes'\n\nexport const attributeScoringCriteria: { [key: string]: WeightedScoringCriteria<NftCollectionInfo> } = {\n 'Metadata Attributes Individual': { score: scoreIndividualAttributes, weight: 2 },\n 'Metadata Attributes Total': { score: scoreTotalAttributes, weight: 2 },\n}\n\nexport const scoreMetadata: { [key: string]: WeightedScoringCriteria<NftCollectionInfo> } = {\n ...attributeScoringCriteria,\n}\n","import { NftCollectionCount } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { normalize, Score } from '@xyo-network/crypto-nft-score-model'\n\n/**\n * This \"magic\" value was obtained using Solver in Excel\n * to find the median, with mu/sigma fixed, which maximizes\n * the distribution (the mode for lognormal) at 10,000\n */\nconst median = 81030839.8217352\nconst defaultMu = Math.log(median)\nconst defaultSigma = 3\nconst mode = Math.exp(defaultMu - Math.pow(defaultSigma, 2))\n/**\n * Calculates the log-normal probability density\n * @param x the value at which you want to calculate the probability density\n * @param mu mean of the associated normal distribution\n * @param sigma standard deviation of the associated normal distribution\n * @returns\n */\nconst logNormalProbabilityDensity = (x: number, mu: number = defaultMu, sigma: number = defaultSigma): number => {\n if (x <= 0) return 0\n const logX = Math.log(x)\n return (1 / (x * sigma * Math.sqrt(2 * Math.PI))) * Math.exp(-0.5 * Math.pow((logX - mu) / sigma, 2))\n}\n\n/**\n * For a lognormal distribution, the peak of the distribution is the mode\n */\nconst maxProbabilityDensity = logNormalProbabilityDensity(mode)\n\nconst maxScore = 10\n\n/**\n * We're working on some assumptions here:\n * - If there's < 1000 NFTs in your collection it starts becoming too niche\n * - If there's > 20,000 NFTs in your collection it starts becoming too broad\n * So there's a sweet spot somewhere between 2000 and 10,000\n * where a collection has enough NFTs to be interesting, but\n * not so many that it's teetering on a diluted money grab.\n * To model that we're using a log-normal distribution optimized\n * to maximally reward collections in the aforementioned range\n * @param nft\n * @returns\n */\nexport const scoreTotal = (nft: NftCollectionCount): Score => {\n const density = logNormalProbabilityDensity(nft.total)\n const score: Score = [density, maxProbabilityDensity]\n return normalize(score, maxScore)\n}\n","import { NftCollectionInfo } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { WeightedScoringCriteria } from '@xyo-network/crypto-nft-score-model'\n\nimport { scoreMetadata, scoreTotal } from './scoring'\n\nexport const scoringCriteria: { [key: string]: WeightedScoringCriteria<NftCollectionInfo> } = {\n ...scoreMetadata,\n Total: { score: scoreTotal, weight: 2 },\n}\n","import { NftCollectionInfo } from '@xyo-network/crypto-nft-collection-payload-plugin'\nimport { Score } from '@xyo-network/crypto-nft-score-model'\n\nimport { scoringCriteria } from './criteria'\n\nexport type ScoringCriteriaKey = keyof typeof scoringCriteria & PropertyKey\n\nexport type NftCollectionAnalysis = {\n [key in ScoringCriteriaKey]: Score\n}\n\nexport const analyzeNftCollection = async (\n /**\n * The NFT to evaluate\n */\n nft: NftCollectionInfo,\n): Promise<NftCollectionAnalysis> => {\n const result = Object.fromEntries(\n await Promise.all(\n Object.entries(scoringCriteria).map(async ([key, { score, weight }]) => {\n const rawScore = await score(nft)\n const weighted = rawScore.map((v) => v * weight) as Score\n return [key, weighted] as const\n }),\n ),\n ) as NftCollectionAnalysis\n return result\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,oCAAiC;AAEjC,IAAM,WAAW;AAEV,IAAM,4BAA4B,CAAC,SAA+C;AACvF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AACpC,QAAM,UAAU,OAAO,QAAQ,UAAU;AACzC,MAAI,QAAQ,WAAW;AAAG,WAAO,CAAC,GAAG,QAAQ;AAC7C,QAAM,SAAS,OAAO,QAAQ,UAAU,EACrC,IAAI,CAAC,CAAC,QAAQ,EAAE,OAAO,CAAC,MAAM;AAC7B,WAAO,OAAO,QAAQ,MAAM,EAAE,IAAW,CAAC,CAAC,aAAa,OAAO,MAAM;AACnE,YAAM,SAAS,KAAK,IAAI,KAAK,OAAO,IAAI,QAAQ,SAAS,KAAK,QAAQ,GAAG,QAAQ;AACjF,aAAO,CAAC,QAAQ,QAAQ;AAAA,IAC1B,CAAC;AAAA,EACH,CAAC,EACA,KAAK;AACR,QAAM,QAAQ,OAAO,OAAc,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AAC7E,aAAO,yCAAU,OAAO,QAAQ;AAClC;;;AClBA,IAAAA,iCAAiC;AAEjC,IAAMC,YAAW;AAKjB,IAAM,YAAY;AAKlB,IAAM,eAAe;AASrB,IAAM,6BAA6B,CAAC,GAAW,KAAa,WAAW,QAAgB,iBAAyB;AAC9G,QAAM,YAAY,KAAK,KAAK,IAAI,KAAK,EAAE;AACvC,QAAM,cAAc,QAAQ;AAC5B,QAAM,QAAQ,OAAO,KAAK,KAAK,IAAI,MAAM,OAAO,CAAC;AACjD,SAAQ,IAAI,cAAe,KAAK,IAAI,KAAK;AAC3C;AAKA,IAAM,wBAAwB,2BAA2B,SAAS;AAsB3D,IAAM,uBAAuB,CAAC,SAA+C;AAClF,QAAM,EAAE,WAAW,IAAI,KAAK,QAAQ;AAGpC,QAAM,mBAAmB,OAAO,QAAQ,UAAU,EAAE,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,QAAQ,CAAC,MAAM;AACzF,WAAO,MAAM,QAAQ,SAAS;AAAA,EAChC,GAAG,CAAC;AACJ,QAAM,qBAAqB,2BAA2B,gBAAgB;AACtE,QAAM,QAAe,CAAC,oBAAoB,qBAAqB;AAC/D,aAAO,0CAAU,OAAOA,SAAQ;AAClC;;;AC1DO,IAAM,2BAA0F;AAAA,EACrG,kCAAkC,EAAE,OAAO,2BAA2B,QAAQ,EAAE;AAAA,EAChF,6BAA6B,EAAE,OAAO,sBAAsB,QAAQ,EAAE;AACxE;AAEO,IAAM,gBAA+E;AAAA,EAC1F,GAAG;AACL;;;ACZA,IAAAC,iCAAiC;AAOjC,IAAM,SAAS;AACf,IAAMC,aAAY,KAAK,IAAI,MAAM;AACjC,IAAMC,gBAAe;AACrB,IAAM,OAAO,KAAK,IAAID,aAAY,KAAK,IAAIC,eAAc,CAAC,CAAC;AAQ3D,IAAM,8BAA8B,CAAC,GAAW,KAAaD,YAAW,QAAgBC,kBAAyB;AAC/G,MAAI,KAAK;AAAG,WAAO;AACnB,QAAM,OAAO,KAAK,IAAI,CAAC;AACvB,SAAQ,KAAK,IAAI,QAAQ,KAAK,KAAK,IAAI,KAAK,EAAE,KAAM,KAAK,IAAI,OAAO,KAAK,KAAK,OAAO,MAAM,OAAO,CAAC,CAAC;AACtG;AAKA,IAAMC,yBAAwB,4BAA4B,IAAI;AAE9D,IAAMC,YAAW;AAcV,IAAM,aAAa,CAAC,QAAmC;AAC5D,QAAM,UAAU,4BAA4B,IAAI,KAAK;AACrD,QAAM,QAAe,CAAC,SAASD,sBAAqB;AACpD,aAAO,0CAAU,OAAOC,SAAQ;AAClC;;;AC3CO,IAAM,kBAAiF;AAAA,EAC5F,GAAG;AAAA,EACH,OAAO,EAAE,OAAO,YAAY,QAAQ,EAAE;AACxC;;;ACGO,IAAM,uBAAuB,OAIlC,QACmC;AACnC,QAAM,SAAS,OAAO;AAAA,IACpB,MAAM,QAAQ;AAAA,MACZ,OAAO,QAAQ,eAAe,EAAE,IAAI,OAAO,CAAC,KAAK,EAAE,OAAO,OAAO,CAAC,MAAM;AACtE,cAAM,WAAW,MAAM,MAAM,GAAG;AAChC,cAAM,WAAW,SAAS,IAAI,CAAC,MAAM,IAAI,MAAM;AAC/C,eAAO,CAAC,KAAK,QAAQ;AAAA,MACvB,CAAC;AAAA,IACH;AAAA,EACF;AACA,SAAO;AACT;","names":["import_crypto_nft_score_model","maxScore","import_crypto_nft_score_model","defaultMu","defaultSigma","maxProbabilityDensity","maxScore"]}