@uniformdev/cli 19.214.1-alpha.10 → 19.214.1-alpha.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.mjs +493 -309
  2. package/package.json +11 -10
package/dist/index.mjs CHANGED
@@ -615,18 +615,22 @@ var AssetListModule = {
615
615
  }
616
616
  };
617
617
 
618
- // src/files/index.ts
619
- import { preferredType } from "@thi.ng/mime";
620
- import { FILE_READY_STATE, getFileNameFromUrl } from "@uniformdev/files";
621
- import { createHash } from "crypto";
622
- import { fileTypeFromBuffer } from "file-type";
618
+ // src/files/deleteDownloadedFileByUrl.ts
623
619
  import fsj from "fs-jetpack";
624
- import sizeOf from "image-size";
625
- import PQueue from "p-queue";
626
- import { dirname, join as join2 } from "path";
620
+ import { join as join3 } from "path";
621
+
622
+ // src/files/urlToFileName.ts
623
+ import { join as join2 } from "path";
624
+ import { dirname } from "path";
627
625
  var FILES_DIRECTORY_NAME = "files";
628
- var escapeRegExp = (string) => {
629
- return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
626
+ var getFilesDirectory = (directory) => {
627
+ const isPackage = isPathAPackageFile(directory);
628
+ return isPackage ? dirname(directory) : (
629
+ // If we are syncing to a directory, we want to write all files into a
630
+ // top-lvl folder. That way any entities that contain files will sync to the
631
+ // same directory, so there is no duplication
632
+ join2(directory, "..")
633
+ );
630
634
  };
631
635
  var urlToHash = (url) => {
632
636
  return Buffer.from(
@@ -635,24 +639,6 @@ var urlToHash = (url) => {
635
639
  new URL(url).pathname.substring(0, 64)
636
640
  ).toString("base64");
637
641
  };
638
- var hashToPartialPathname = (hash) => {
639
- try {
640
- return Buffer.from(hash, "base64").toString("utf8");
641
- } catch {
642
- return null;
643
- }
644
- };
645
- var findUrlMatchingPartialPathname = (source, pathname) => {
646
- const escapedPathname = escapeRegExp(pathname);
647
- const regex = new RegExp(
648
- `"(https://([^"]*?)?(img|files).uniform.(rocks|global)${escapedPathname}([^"]*?))"`
649
- );
650
- const match = source.match(regex);
651
- if (match && match[1]) {
652
- return match[1];
653
- }
654
- return null;
655
- };
656
642
  var urlToFileExtension = (url) => {
657
643
  try {
658
644
  const urlObject = new URL(url);
@@ -667,239 +653,383 @@ var urlToFileName = (url, hash) => {
667
653
  const fileExtension = urlToFileExtension(url);
668
654
  return `${fileName}${fileExtension ? `.${fileExtension}` : ""}`;
669
655
  };
670
- var getFilesDirectory = (directory) => {
671
- const isPackage = isPathAPackageFile(directory);
672
- return isPackage ? dirname(directory) : (
673
- // If we are syncing to a directory, we want to write all files into a
674
- // top-lvl folder. That way any entities that contain files will sync to the
675
- // same directory, so there is no duplication
676
- join2(directory, "..")
677
- );
678
- };
679
- var getUniformFileUrlMatches = (string) => {
680
- return string.matchAll(/"(https:\/\/([^"]*?)?(img|files)\.uniform\.(rocks|global)\/([^"]*?))"/g);
681
- };
656
+
657
+ // src/files/deleteDownloadedFileByUrl.ts
682
658
  var deleteDownloadedFileByUrl = async (url, options) => {
683
659
  const writeDirectory = getFilesDirectory(options.directory);
684
660
  const fileName = urlToFileName(url);
685
- const fileToDelete = join2(writeDirectory, FILES_DIRECTORY_NAME, fileName);
661
+ const fileToDelete = join3(writeDirectory, FILES_DIRECTORY_NAME, fileName);
686
662
  try {
687
663
  await fsj.removeAsync(fileToDelete);
688
664
  } catch {
689
665
  console.warn(`Failed to delete a local file ${fileToDelete}`);
690
666
  }
691
667
  };
692
- var extractAndDownloadUniformFilesForObject = async (object, options) => {
693
- const objectAsString = JSON.stringify(object);
694
- const uniformFileUrlMatches = getUniformFileUrlMatches(objectAsString);
695
- const writeDirectory = getFilesDirectory(options.directory);
696
- if (uniformFileUrlMatches) {
697
- const fileDownloadQueue = new PQueue({ concurrency: 10 });
698
- for (const match of uniformFileUrlMatches) {
699
- const url = new URL(match[1]);
700
- fileDownloadQueue.add(async () => {
668
+
669
+ // src/files/files.ts
670
+ import {
671
+ ASSETS_SOURCE_UNIFORM,
672
+ getPropertiesValue as getPropertiesValue2,
673
+ isAssetParamValue,
674
+ isAssetParamValueItem,
675
+ walkNodeTree as walkNodeTree2,
676
+ walkPropertyValues as walkPropertyValues2
677
+ } from "@uniformdev/canvas";
678
+ import { isRichTextNodeType, isRichTextValue, walkRichTextTree } from "@uniformdev/richtext";
679
+ import fsj4 from "fs-jetpack";
680
+ import PQueue3 from "p-queue";
681
+ import { join as join6 } from "path";
682
+
683
+ // src/files/downloadFile.ts
684
+ import fsj2 from "fs-jetpack";
685
+ import { join as join4 } from "path";
686
+ var downloadFile = async ({
687
+ fileClient,
688
+ fileUrl,
689
+ directory
690
+ }) => {
691
+ const writeDirectory = getFilesDirectory(directory);
692
+ const fileName = urlToFileName(fileUrl.toString());
693
+ const fileAlreadyExists = await fsj2.existsAsync(join4(writeDirectory, FILES_DIRECTORY_NAME, fileName));
694
+ if (fileAlreadyExists) {
695
+ return { url: fileUrl };
696
+ }
697
+ const file = await fileClient.get({ url: fileUrl }).catch(() => null);
698
+ if (!file) {
699
+ console.warn(`Skipping file ${fileUrl} as it does not exist in the project anymore`);
700
+ return null;
701
+ }
702
+ if (file.sourceId) {
703
+ try {
704
+ const hashAlreadyExists = await fsj2.findAsync(join4(writeDirectory, FILES_DIRECTORY_NAME), {
705
+ matching: [file.sourceId, `${file.sourceId}.*`]
706
+ });
707
+ if (hashAlreadyExists.length > 0) {
708
+ return { id: file.id, url: fileUrl };
709
+ }
710
+ } catch {
711
+ }
712
+ }
713
+ const fetchUrl = `${fileUrl}?format=original`;
714
+ const response = await fetch(fetchUrl);
715
+ if (!response.ok) {
716
+ return null;
717
+ }
718
+ const fileBuffer = await response.arrayBuffer();
719
+ await fsj2.writeAsync(join4(writeDirectory, FILES_DIRECTORY_NAME, fileName), Buffer.from(fileBuffer));
720
+ return { id: file.id, url: fileUrl };
721
+ };
722
+
723
+ // src/files/uploadFile.ts
724
+ import { preferredType } from "@thi.ng/mime";
725
+ import { FILE_READY_STATE, getFileNameFromUrl } from "@uniformdev/files";
726
+ import { fileTypeFromBuffer } from "file-type";
727
+ import fsj3 from "fs-jetpack";
728
+ import sizeOf from "image-size";
729
+ import PQueue from "p-queue";
730
+ import { join as join5 } from "path";
731
+ var fileUploadQueue = new PQueue({ concurrency: 10 });
732
+ var uploadFile = async ({
733
+ fileClient,
734
+ fileUrl,
735
+ directory,
736
+ fileId
737
+ }) => {
738
+ return await fileUploadQueue.add(async () => {
739
+ try {
740
+ const writeDirectory = getFilesDirectory(directory);
741
+ const hash = urlToHash(fileUrl);
742
+ const fileAlreadyExistsChecks = await Promise.all([
743
+ fileClient.get({ url: fileUrl }).catch(() => null),
744
+ fileClient.get({ sourceId: hash }).catch(() => null)
745
+ ]);
746
+ const file = fileAlreadyExistsChecks.find((check) => check !== null);
747
+ if (file?.url) {
748
+ return { id: file.id, url: file.url };
749
+ }
750
+ const localFileName = urlToFileName(fileUrl);
751
+ const expectedFilePath = join5(writeDirectory, FILES_DIRECTORY_NAME, localFileName);
752
+ const fileExistsLocally = await fsj3.existsAsync(expectedFilePath);
753
+ if (!fileExistsLocally) {
754
+ console.warn(
755
+ `Skipping file ${fileUrl} as we couldn't find a local copy (looked at ${expectedFilePath})`
756
+ );
757
+ return null;
758
+ }
759
+ const fileBuffer = await fsj3.readAsync(expectedFilePath, "buffer");
760
+ if (!fileBuffer) {
761
+ console.warn(`Skipping file ${fileUrl} (${expectedFilePath}) as we couldn't read it`);
762
+ return null;
763
+ }
764
+ const fileName = getFileNameFromUrl(fileUrl);
765
+ let mimeType = expectedFilePath.endsWith(".svg") ? "image/svg+xml" : (await fileTypeFromBuffer(fileBuffer))?.mime;
766
+ if (!mimeType) {
767
+ mimeType = preferredType(fileUrl.split(".").at(-1) ?? "");
768
+ }
769
+ if (mimeType === "audio/x-flac") {
770
+ mimeType = "audio/flac";
771
+ }
772
+ const { width, height } = (() => {
773
+ if (!mimeType.startsWith("image/")) {
774
+ return {
775
+ width: void 0,
776
+ height: void 0
777
+ };
778
+ }
701
779
  try {
702
- const fileName = urlToFileName(url.toString());
703
- const fileAlreadyExists = await fsj.existsAsync(
704
- join2(writeDirectory, FILES_DIRECTORY_NAME, fileName)
705
- );
706
- if (fileAlreadyExists) {
707
- return;
708
- }
709
- const file = await options.fileClient.get({ url: url.toString() }).catch(() => null);
710
- if (!file) {
711
- console.warn(`Skipping file ${url} as it does not exist in the project anymore`);
712
- return;
713
- }
714
- if (file.sourceId) {
715
- try {
716
- const hashAlreadyExists = await fsj.findAsync(join2(writeDirectory, FILES_DIRECTORY_NAME), {
717
- matching: [file.sourceId, `${file.sourceId}.*`]
718
- });
719
- if (hashAlreadyExists.length > 0) {
720
- return;
721
- }
722
- } catch {
723
- }
724
- }
725
- const fetchUrl = `${url.origin}${url.pathname}?format=original`;
726
- const response = await fetch(fetchUrl);
727
- if (!response.ok) {
728
- return;
729
- }
730
- const fileBuffer = await response.arrayBuffer();
731
- await fsj.writeAsync(join2(writeDirectory, FILES_DIRECTORY_NAME, fileName), Buffer.from(fileBuffer));
780
+ return sizeOf(fileBuffer);
732
781
  } catch {
733
- console.warn(`Failed to download file ${url}`);
782
+ return {
783
+ width: void 0,
784
+ height: void 0
785
+ };
786
+ }
787
+ })();
788
+ const { id, method, uploadUrl } = await fileClient.insert({
789
+ id: fileId,
790
+ name: fileName,
791
+ mediaType: mimeType,
792
+ size: fileBuffer.length,
793
+ width,
794
+ height,
795
+ sourceId: hash
796
+ });
797
+ const uploadResponse = await fetch(uploadUrl, {
798
+ method,
799
+ body: fileBuffer,
800
+ headers: {
801
+ "Content-Type": mimeType,
802
+ "Content-Length": fileBuffer.length.toString()
734
803
  }
735
804
  });
805
+ if (!uploadResponse.ok) {
806
+ console.warn(`Failed to upload file ${fileUrl} (${expectedFilePath})`);
807
+ return null;
808
+ }
809
+ let error;
810
+ const checkForFile = async () => {
811
+ if (error) {
812
+ throw error;
813
+ }
814
+ const file2 = await fileClient.get({ id });
815
+ if (!file2 || file2.state !== FILE_READY_STATE || !file2.url) {
816
+ await new Promise((resolve) => setTimeout(resolve, 1e3));
817
+ return checkForFile();
818
+ }
819
+ return file2.url;
820
+ };
821
+ const abortTimeout = setTimeout(() => {
822
+ error = new Error(`Failed to upload file ${fileUrl} (${expectedFilePath}) - upload timed out`);
823
+ }, 6e4);
824
+ const uploadedFileUrl = await checkForFile();
825
+ clearTimeout(abortTimeout);
826
+ return { id, url: uploadedFileUrl };
827
+ } catch (e) {
828
+ console.warn(`Failed to upload file ${fileUrl}`, e);
829
+ return null;
736
830
  }
737
- await fileDownloadQueue.onIdle();
738
- }
739
- return object;
831
+ }) ?? null;
740
832
  };
741
- var extractAndUploadUniformFilesForObject = async (object, options) => {
742
- let objectAsString = JSON.stringify(object);
743
- const uniformFileUrlMatches = getUniformFileUrlMatches(objectAsString);
744
- const writeDirectory = getFilesDirectory(options.directory);
745
- const isPackage = isPathAPackageFile(options.directory);
746
- const legacyWriteDirectory = isPackage ? dirname(options.directory) : options.directory;
747
- if (uniformFileUrlMatches) {
748
- const fileUploadQueue = new PQueue({ concurrency: 3 });
749
- for (const match of uniformFileUrlMatches) {
750
- const url = match[1];
751
- const hash = urlToHash(url);
752
- const legacyHash = legacyUrlToHash(url);
753
- fileUploadQueue.add(async () => {
754
- try {
755
- const fileAlreadyExistsChecks = await Promise.all([
756
- options.fileClient.get({ url }).catch(() => null),
757
- options.fileClient.get({ sourceId: hash }).catch(() => null),
758
- options.fileClient.get({ sourceId: legacyHash }).catch(() => null)
759
- ]);
760
- const file = fileAlreadyExistsChecks.find((check) => check !== null);
761
- if (file) {
762
- objectAsString = objectAsString.replaceAll(`"${url}"`, `"${file.url}"`);
763
- return;
764
- }
765
- const localFileName = urlToFileName(url);
766
- let expectedFilePath = join2(writeDirectory, FILES_DIRECTORY_NAME, localFileName);
767
- let fileExistsLocally = await fsj.existsAsync(expectedFilePath);
768
- if (!fileExistsLocally) {
769
- const localFileName2 = legacyUrlToFileName(url);
770
- expectedFilePath = join2(legacyWriteDirectory, FILES_DIRECTORY_NAME, localFileName2);
771
- fileExistsLocally = await fsj.existsAsync(expectedFilePath);
772
- }
773
- if (!fileExistsLocally) {
774
- console.warn(
775
- `Skipping file ${url} as we couldn't find a local copy (looked at ${expectedFilePath})`
776
- );
777
- return;
833
+
834
+ // src/files/walkFilesForCompositionOrEntry.ts
835
+ import {
836
+ getPropertiesValue,
837
+ walkNodeTree,
838
+ walkPropertyValues
839
+ } from "@uniformdev/canvas";
840
+ import PQueue2 from "p-queue";
841
+ var UNIFORM_FILE_MATCH = /"(https:\/\/([^"]*?)?(img|files)\.uniform\.(rocks|global)\/([^"]*?))"/g;
842
+ var walkFilesForCompositionOrEntry = async ({
843
+ entity,
844
+ directory,
845
+ fileClient,
846
+ callback
847
+ }) => {
848
+ const urlReplacementMap = /* @__PURE__ */ new Map();
849
+ const fileDownloadQueue = new PQueue2({ concurrency: 3 });
850
+ const thumbnail = "entry" in entity ? entity.entry._thumbnail : void 0;
851
+ if (typeof thumbnail === "string") {
852
+ const isUniformFile = `"${thumbnail}"`.match(UNIFORM_FILE_MATCH) !== null;
853
+ if (isUniformFile) {
854
+ fileDownloadQueue.add(async () => {
855
+ const result = await callback({ fileUrl: thumbnail, directory, fileClient });
856
+ if (result) {
857
+ urlReplacementMap.set(thumbnail, result.url);
858
+ }
859
+ });
860
+ }
861
+ }
862
+ walkNodeTree("entry" in entity ? entity.entry : entity.composition, ({ node }) => {
863
+ const properties = getPropertiesValue(node);
864
+ if (!properties) {
865
+ return;
866
+ }
867
+ Object.entries(properties).forEach(([_, property]) => {
868
+ if (property.type !== "image") {
869
+ return;
870
+ }
871
+ walkPropertyValues(property, ({ value }) => {
872
+ if (typeof value !== "string") {
873
+ return;
874
+ }
875
+ const isUniformFile = `"${value}"`.match(UNIFORM_FILE_MATCH) !== null;
876
+ if (!isUniformFile) {
877
+ return;
878
+ }
879
+ fileDownloadQueue.add(async () => {
880
+ const result = await callback({ fileUrl: value, directory, fileClient });
881
+ if (result) {
882
+ urlReplacementMap.set(value, result.url);
778
883
  }
779
- const fileBuffer = await fsj.readAsync(expectedFilePath, "buffer");
780
- if (!fileBuffer) {
781
- console.warn(`Skipping file ${url} (${expectedFilePath}) as we couldn't read it`);
884
+ });
885
+ });
886
+ });
887
+ });
888
+ await fileDownloadQueue.onIdle();
889
+ return urlReplacementMap;
890
+ };
891
+
892
+ // src/files/files.ts
893
+ var downloadFileForAsset = async ({
894
+ asset,
895
+ directory,
896
+ fileClient
897
+ }) => {
898
+ if (asset.asset.fields?.file?.value === void 0 || asset.asset.fields.url?.value === void 0) {
899
+ return null;
900
+ }
901
+ const fileId = asset.asset.fields?.file?.value;
902
+ const fileUrl = asset.asset.fields.url?.value;
903
+ if (fileId === "" || fileUrl === "") {
904
+ return null;
905
+ }
906
+ return downloadFile({ fileUrl, directory, fileClient });
907
+ };
908
+ var uploadFileForAsset = async ({
909
+ asset,
910
+ directory,
911
+ fileClient
912
+ }) => {
913
+ if (asset.asset.fields?.file?.value === void 0 || asset.asset.fields.url?.value === void 0) {
914
+ return null;
915
+ }
916
+ const fileUrl = asset.asset.fields.url.value;
917
+ const fileId = asset.asset.fields.file.value;
918
+ return uploadFile({ fileUrl, directory, fileClient, fileId });
919
+ };
920
+ var removeUrlsFromAssetParameters = (entity) => {
921
+ walkNodeTree2("entry" in entity ? entity.entry : entity.composition, ({ node }) => {
922
+ const properties = getPropertiesValue2(node);
923
+ if (!properties) {
924
+ return;
925
+ }
926
+ Object.entries(properties).forEach(([_, property]) => {
927
+ if (property.type === "asset") {
928
+ walkPropertyValues2(property, ({ value }) => {
929
+ if (!isAssetParamValue(value)) {
782
930
  return;
783
931
  }
784
- const fileName = getFileNameFromUrl(url);
785
- let mimeType = expectedFilePath.endsWith(".svg") ? "image/svg+xml" : (await fileTypeFromBuffer(fileBuffer))?.mime;
786
- if (!mimeType) {
787
- mimeType = preferredType(url.split(".").at(-1) ?? "");
788
- }
789
- if (mimeType === "audio/x-flac") {
790
- mimeType = "audio/flac";
791
- }
792
- const { width, height } = (() => {
793
- if (!mimeType.startsWith("image/")) {
794
- return {
795
- width: void 0,
796
- height: void 0
797
- };
932
+ value.forEach((asset) => {
933
+ if (!isAssetParamValueItem(asset)) {
934
+ return;
798
935
  }
799
- try {
800
- return sizeOf(fileBuffer);
801
- } catch {
802
- return {
803
- width: void 0,
804
- height: void 0
805
- };
936
+ if (asset._source !== ASSETS_SOURCE_UNIFORM || !asset.fields?.url.value) {
937
+ return;
806
938
  }
807
- })();
808
- const { id, method, uploadUrl } = await options.fileClient.insert({
809
- name: fileName,
810
- mediaType: mimeType,
811
- size: fileBuffer.length,
812
- width,
813
- height,
814
- sourceId: hash
939
+ asset.fields.url.value = "";
815
940
  });
816
- const uploadResponse = await fetch(uploadUrl, {
817
- method,
818
- body: fileBuffer,
819
- headers: {
820
- "Content-Type": mimeType,
821
- "Content-Length": fileBuffer.length.toString()
822
- }
823
- });
824
- if (!uploadResponse.ok) {
825
- console.warn(`Failed to upload file ${url} (${expectedFilePath})`);
941
+ });
942
+ } else if (property.type === "richText") {
943
+ walkPropertyValues2(property, ({ value }) => {
944
+ if (!isRichTextValue(value)) {
826
945
  return;
827
946
  }
828
- let error;
829
- const checkForFile = async () => {
830
- if (error) {
831
- throw error;
832
- }
833
- const file2 = await options.fileClient.get({ id });
834
- if (!file2 || file2.state !== FILE_READY_STATE || !file2.url) {
835
- await new Promise((resolve) => setTimeout(resolve, 1e3));
836
- return checkForFile();
947
+ walkRichTextTree(value.root, (node2) => {
948
+ if (isRichTextNodeType(node2, "asset")) {
949
+ if (node2.__asset?._source !== ASSETS_SOURCE_UNIFORM || !node2.__asset.fields.url.value) {
950
+ return;
951
+ }
952
+ node2.__asset.fields.url.value = "";
837
953
  }
838
- return file2.url;
839
- };
840
- const abortTimeout = setTimeout(() => {
841
- error = new Error(`Failed to upload file ${url} (${expectedFilePath}) - upload timed out`);
842
- }, 6e4);
843
- const uploadedFileUrl = await checkForFile();
844
- clearTimeout(abortTimeout);
845
- objectAsString = objectAsString.replaceAll(`"${url}"`, `"${uploadedFileUrl}"`);
846
- } catch (e) {
847
- console.warn(`Failed to upload file ${url}`, e);
848
- }
849
- });
850
- }
851
- await fileUploadQueue.onIdle();
954
+ });
955
+ });
956
+ }
957
+ });
958
+ });
959
+ return entity;
960
+ };
961
+ var compareCompositionsOrEntriesWithoutAssetUrls = (source, target) => {
962
+ return serializedDequal(
963
+ removeUrlsFromAssetParameters(structuredClone(source.object)),
964
+ removeUrlsFromAssetParameters(structuredClone(target.object))
965
+ );
966
+ };
967
+ var removeUrlFromAsset = (asset) => {
968
+ if (asset.asset.fields?.url?.value) {
969
+ asset.asset.fields.url.value = "";
852
970
  }
853
- return JSON.parse(objectAsString);
971
+ return asset;
854
972
  };
855
- var swapOutUniformFileUrlsForTargetProject = async (object, options) => {
856
- let objectAsString = JSON.stringify(object);
857
- const uniformFileUrlMatches = getUniformFileUrlMatches(objectAsString);
858
- if (uniformFileUrlMatches) {
859
- const fileUrlReplacementQueue = new PQueue({ concurrency: 3 });
860
- for (const match of uniformFileUrlMatches) {
861
- const url = match[1];
862
- const hash = urlToHash(url);
863
- const legacyHash = legacyUrlToHash(url);
864
- fileUrlReplacementQueue.add(async () => {
865
- try {
866
- const fileAlreadyExistsChecks = await Promise.all([
867
- options.fileClient.get({ url }).catch(() => null),
868
- options.fileClient.get({ sourceId: hash }).catch(() => null),
869
- options.fileClient.get({ sourceId: legacyHash }).catch(() => null)
870
- ]);
871
- const file = fileAlreadyExistsChecks.find((check) => check !== null);
872
- if (!file) {
873
- return;
874
- }
875
- objectAsString = objectAsString.replaceAll(`"${url}"`, `"${file.url}"`);
876
- } catch {
877
- }
878
- });
879
- }
880
- await fileUrlReplacementQueue.onIdle();
973
+ var compareAssetsWithoutUrls = (source, target) => {
974
+ return serializedDequal(
975
+ removeUrlFromAsset(structuredClone(source.object)),
976
+ removeUrlFromAsset(structuredClone(target.object))
977
+ );
978
+ };
979
+ var downloadFilesForCompositionOrEntry = async ({
980
+ entity,
981
+ directory,
982
+ fileClient
983
+ }) => {
984
+ await walkFilesForCompositionOrEntry({
985
+ entity,
986
+ directory,
987
+ fileClient,
988
+ callback: downloadFile
989
+ });
990
+ };
991
+ var uploadFilesForCompositionOrEntry = async ({
992
+ entity,
993
+ directory,
994
+ fileClient
995
+ }) => {
996
+ const replacements = await walkFilesForCompositionOrEntry({
997
+ entity: entity.object,
998
+ directory,
999
+ fileClient,
1000
+ callback: uploadFile
1001
+ });
1002
+ let entityAsString = JSON.stringify(entity);
1003
+ for (const [key, value] of replacements.entries()) {
1004
+ entityAsString = entityAsString.replaceAll(`"${key}"`, `"${value}"`);
881
1005
  }
882
- return JSON.parse(objectAsString);
1006
+ return JSON.parse(entityAsString);
883
1007
  };
884
- var replaceRemoteUrlsWithLocalReferences = async (sourceObject, targetObject, options) => {
885
- let sourceObjectAsString = JSON.stringify(sourceObject);
886
- const targetObjectAsString = JSON.stringify(targetObject);
887
- const uniformFileUrlMatches = getUniformFileUrlMatches(sourceObjectAsString);
888
- const writeDirectory = getFilesDirectory(options.directory);
889
- if (uniformFileUrlMatches) {
890
- const fileUrlReplacementQueue = new PQueue({ concurrency: 3 });
891
- for (const match of uniformFileUrlMatches) {
892
- const url = match[1];
1008
+ var replaceRemoteUrlsWithLocalReferences = async ({
1009
+ sourceEntity,
1010
+ targetEntity,
1011
+ fileClient,
1012
+ directory
1013
+ }) => {
1014
+ let sourceEntityAsString = JSON.stringify(sourceEntity);
1015
+ const targetEntityAsString = JSON.stringify(targetEntity);
1016
+ const writeDirectory = getFilesDirectory(directory);
1017
+ const fileUrlReplacementQueue = new PQueue3({ concurrency: 3 });
1018
+ await walkFilesForCompositionOrEntry({
1019
+ entity: sourceEntity.object,
1020
+ directory,
1021
+ fileClient,
1022
+ callback: async ({ fileUrl }) => {
893
1023
  fileUrlReplacementQueue.add(async () => {
894
1024
  try {
895
- const localFileName = urlToFileName(url);
896
- const fileExistsLocally = await fsj.existsAsync(
897
- join2(writeDirectory, FILES_DIRECTORY_NAME, localFileName)
1025
+ const localFileName = urlToFileName(fileUrl);
1026
+ const fileExistsLocally = await fsj4.existsAsync(
1027
+ join6(writeDirectory, FILES_DIRECTORY_NAME, localFileName)
898
1028
  );
899
1029
  if (fileExistsLocally) {
900
1030
  return;
901
1031
  }
902
- const file = await options.fileClient.get({ url }).catch(() => null);
1032
+ const file = await fileClient.get({ url: fileUrl }).catch(() => null);
903
1033
  if (!file || !file.sourceId) {
904
1034
  return;
905
1035
  }
@@ -907,46 +1037,74 @@ var replaceRemoteUrlsWithLocalReferences = async (sourceObject, targetObject, op
907
1037
  if (!originalPartialPath) {
908
1038
  return;
909
1039
  }
910
- const originalUrl = findUrlMatchingPartialPathname(targetObjectAsString, originalPartialPath);
1040
+ const originalUrl = findUrlMatchingPartialPathname(targetEntityAsString, originalPartialPath);
911
1041
  if (!originalUrl) {
912
1042
  return;
913
1043
  }
914
- sourceObjectAsString = sourceObjectAsString.replaceAll(`"${url}"`, `"${originalUrl}"`);
1044
+ sourceEntityAsString = sourceEntityAsString.replaceAll(`"${fileUrl}"`, `"${originalUrl}"`);
915
1045
  } catch {
916
1046
  }
917
1047
  });
1048
+ return null;
918
1049
  }
919
- await fileUrlReplacementQueue.onIdle();
920
- }
921
- return JSON.parse(sourceObjectAsString);
1050
+ });
1051
+ await fileUrlReplacementQueue.onIdle();
1052
+ return JSON.parse(sourceEntityAsString);
922
1053
  };
923
- var updateAssetFileIdBasedOnUrl = async (asset, options) => {
924
- if (!asset.asset.fields) {
925
- return asset;
926
- }
927
- const fileUrl = asset.asset.fields.url?.value;
928
- if (!fileUrl) {
929
- return asset;
930
- }
931
- const file = await options.fileClient.get({ url: fileUrl }).catch(() => null);
932
- if (!file) {
933
- return asset;
934
- }
935
- asset.asset.fields.file = {
936
- type: "file",
937
- value: file.id
938
- };
939
- return asset;
1054
+ var replaceLocalUrlsWithRemoteReferences = async ({
1055
+ entity,
1056
+ directory,
1057
+ fileClient
1058
+ }) => {
1059
+ let entityAsString = JSON.stringify(entity);
1060
+ const fileUrlReplacementQueue = new PQueue3({ concurrency: 3 });
1061
+ await walkFilesForCompositionOrEntry({
1062
+ entity: entity.object,
1063
+ directory,
1064
+ fileClient,
1065
+ callback: async ({ fileUrl }) => {
1066
+ fileUrlReplacementQueue.add(async () => {
1067
+ try {
1068
+ const hash = urlToHash(fileUrl);
1069
+ fileUrlReplacementQueue.add(async () => {
1070
+ try {
1071
+ const file = await fileClient.get({ sourceId: hash }).catch(() => null);
1072
+ if (!file) {
1073
+ return;
1074
+ }
1075
+ entityAsString = entityAsString.replaceAll(`"${fileUrl}"`, `"${file.url}"`);
1076
+ } catch {
1077
+ }
1078
+ });
1079
+ } catch {
1080
+ }
1081
+ });
1082
+ return null;
1083
+ }
1084
+ });
1085
+ await fileUrlReplacementQueue.onIdle();
1086
+ return JSON.parse(entityAsString);
940
1087
  };
941
- var legacyUrlToHash = (url) => {
942
- const hash = createHash("sha256");
943
- hash.update(url);
944
- return hash.digest("hex");
1088
+ var escapeRegExp = (string) => {
1089
+ return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
945
1090
  };
946
- var legacyUrlToFileName = (url) => {
947
- const fileName = legacyUrlToHash(url);
948
- const fileExtension = urlToFileExtension(url);
949
- return `${fileName}${fileExtension ? `.${fileExtension}` : ""}`;
1091
+ var hashToPartialPathname = (hash) => {
1092
+ try {
1093
+ return Buffer.from(hash, "base64").toString("utf8");
1094
+ } catch {
1095
+ return null;
1096
+ }
1097
+ };
1098
+ var findUrlMatchingPartialPathname = (source, pathname) => {
1099
+ const escapedPathname = escapeRegExp(pathname);
1100
+ const regex = new RegExp(
1101
+ `"(https://([^"]*?)?(img|files).uniform.(rocks|global)${escapedPathname}([^"]*?))"`
1102
+ );
1103
+ const match = source.match(regex);
1104
+ if (match && match[1]) {
1105
+ return match[1];
1106
+ }
1107
+ return null;
950
1108
  };
951
1109
 
952
1110
  // src/commands/canvas/assetEngineDataSource.ts
@@ -1137,27 +1295,25 @@ var AssetPullModule = {
1137
1295
  whatIf,
1138
1296
  allowEmptySource: allowEmptySource ?? true,
1139
1297
  log: createSyncEngineConsoleLogger({ diffMode }),
1140
- onBeforeCompareObjects: async (sourceObject, targetObject) => {
1298
+ onBeforeCompareObjects: async (sourceObject) => {
1141
1299
  delete sourceObject.object.asset._author;
1142
- const sourceObjectWithPotentiallySwappedUrl = await replaceRemoteUrlsWithLocalReferences(
1143
- sourceObject,
1144
- targetObject,
1145
- {
1146
- directory,
1147
- fileClient
1148
- }
1149
- );
1150
- if (sourceObjectWithPotentiallySwappedUrl.object.asset.fields?.url && targetObject.object.asset.fields?.url && sourceObjectWithPotentiallySwappedUrl.object.asset.fields.url.value === targetObject.object.asset.fields.url.value) {
1151
- targetObject.object.asset.fields.file = sourceObjectWithPotentiallySwappedUrl.object.asset.fields.file;
1152
- }
1153
- return sourceObjectWithPotentiallySwappedUrl;
1300
+ return sourceObject;
1154
1301
  },
1302
+ compareContents: compareAssetsWithoutUrls,
1155
1303
  onBeforeWriteObject: async (sourceObject) => {
1156
1304
  delete sourceObject.object.asset._author;
1157
- return extractAndDownloadUniformFilesForObject(sourceObject, {
1305
+ if (!sourceObject.object.asset.fields?.file) {
1306
+ return sourceObject;
1307
+ }
1308
+ const downloadedFile = await downloadFileForAsset({
1309
+ asset: sourceObject.object,
1158
1310
  directory,
1159
1311
  fileClient
1160
1312
  });
1313
+ if (downloadedFile?.id) {
1314
+ sourceObject.object.asset.fields.file.value = downloadedFile.id;
1315
+ }
1316
+ return sourceObject;
1161
1317
  }
1162
1318
  });
1163
1319
  }
@@ -1237,29 +1393,29 @@ var AssetPushModule = {
1237
1393
  if (targetObject) {
1238
1394
  delete targetObject.object.asset._author;
1239
1395
  }
1240
- const sourceObjectWithNewFileUrls = await swapOutUniformFileUrlsForTargetProject(sourceObject, {
1241
- fileClient
1242
- });
1243
- sourceObjectWithNewFileUrls.object = await updateAssetFileIdBasedOnUrl(
1244
- sourceObjectWithNewFileUrls.object,
1245
- {
1246
- fileClient
1247
- }
1248
- );
1249
- return sourceObjectWithNewFileUrls;
1396
+ return sourceObject;
1250
1397
  },
1398
+ compareContents: compareAssetsWithoutUrls,
1251
1399
  onBeforeWriteObject: async (sourceObject) => {
1252
- const sourceObjectWithNewFileUrls = await extractAndUploadUniformFilesForObject(sourceObject, {
1400
+ const uploadedFile = await uploadFileForAsset({
1401
+ asset: sourceObject.object,
1253
1402
  directory,
1254
1403
  fileClient
1255
1404
  });
1256
- sourceObjectWithNewFileUrls.object = await updateAssetFileIdBasedOnUrl(
1257
- sourceObjectWithNewFileUrls.object,
1258
- {
1259
- fileClient
1405
+ if (uploadedFile !== null) {
1406
+ if (sourceObject.object.asset.fields === void 0) {
1407
+ sourceObject.object.asset.fields = {};
1260
1408
  }
1261
- );
1262
- return sourceObjectWithNewFileUrls;
1409
+ sourceObject.object.asset.fields.file = {
1410
+ type: "file",
1411
+ value: uploadedFile.id
1412
+ };
1413
+ sourceObject.object.asset.fields.url = {
1414
+ type: "text",
1415
+ value: uploadedFile.url
1416
+ };
1417
+ }
1418
+ return sourceObject;
1263
1419
  }
1264
1420
  });
1265
1421
  }
@@ -2527,16 +2683,21 @@ var CompositionPullModule = {
2527
2683
  allowEmptySource: allowEmptySource ?? true,
2528
2684
  log: createSyncEngineConsoleLogger({ diffMode }),
2529
2685
  onBeforeCompareObjects: async (sourceObject, targetObject) => {
2530
- return replaceRemoteUrlsWithLocalReferences(sourceObject, targetObject, {
2686
+ return replaceRemoteUrlsWithLocalReferences({
2687
+ sourceEntity: sourceObject,
2688
+ targetEntity: targetObject,
2531
2689
  directory,
2532
2690
  fileClient
2533
2691
  });
2534
2692
  },
2693
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
2535
2694
  onBeforeWriteObject: async (sourceObject) => {
2536
- return extractAndDownloadUniformFilesForObject(sourceObject, {
2695
+ await downloadFilesForCompositionOrEntry({
2696
+ entity: sourceObject.object,
2537
2697
  directory,
2538
2698
  fileClient
2539
2699
  });
2700
+ return sourceObject;
2540
2701
  }
2541
2702
  });
2542
2703
  }
@@ -2680,12 +2841,16 @@ var CompositionPushModule = {
2680
2841
  allowEmptySource,
2681
2842
  log: createSyncEngineConsoleLogger({ diffMode }),
2682
2843
  onBeforeCompareObjects: async (sourceObject) => {
2683
- return swapOutUniformFileUrlsForTargetProject(sourceObject, {
2844
+ return replaceLocalUrlsWithRemoteReferences({
2845
+ entity: sourceObject,
2846
+ directory,
2684
2847
  fileClient
2685
2848
  });
2686
2849
  },
2850
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
2687
2851
  onBeforeWriteObject: async (sourceObject) => {
2688
- return extractAndUploadUniformFilesForObject(sourceObject, {
2852
+ return uploadFilesForCompositionOrEntry({
2853
+ entity: sourceObject,
2689
2854
  directory,
2690
2855
  fileClient
2691
2856
  });
@@ -4279,16 +4444,21 @@ var EntryPullModule = {
4279
4444
  allowEmptySource: allowEmptySource ?? true,
4280
4445
  log: createSyncEngineConsoleLogger({ diffMode }),
4281
4446
  onBeforeCompareObjects: async (sourceObject, targetObject) => {
4282
- return replaceRemoteUrlsWithLocalReferences(sourceObject, targetObject, {
4447
+ return replaceRemoteUrlsWithLocalReferences({
4448
+ sourceEntity: sourceObject,
4449
+ targetEntity: targetObject,
4283
4450
  directory,
4284
4451
  fileClient
4285
4452
  });
4286
4453
  },
4454
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
4287
4455
  onBeforeWriteObject: async (sourceObject) => {
4288
- return extractAndDownloadUniformFilesForObject(sourceObject, {
4456
+ await downloadFilesForCompositionOrEntry({
4457
+ entity: sourceObject.object,
4289
4458
  directory,
4290
4459
  fileClient
4291
4460
  });
4461
+ return sourceObject;
4292
4462
  }
4293
4463
  });
4294
4464
  }
@@ -4368,12 +4538,16 @@ var EntryPushModule = {
4368
4538
  allowEmptySource,
4369
4539
  log: createSyncEngineConsoleLogger({ diffMode }),
4370
4540
  onBeforeCompareObjects: async (sourceObject) => {
4371
- return swapOutUniformFileUrlsForTargetProject(sourceObject, {
4541
+ return replaceLocalUrlsWithRemoteReferences({
4542
+ entity: sourceObject,
4543
+ directory,
4372
4544
  fileClient
4373
4545
  });
4374
4546
  },
4547
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
4375
4548
  onBeforeWriteObject: async (sourceObject) => {
4376
- return extractAndUploadUniformFilesForObject(sourceObject, {
4549
+ return uploadFilesForCompositionOrEntry({
4550
+ entity: sourceObject,
4377
4551
  directory,
4378
4552
  fileClient
4379
4553
  });
@@ -4749,16 +4923,21 @@ var EntryPatternPullModule = {
4749
4923
  allowEmptySource: allowEmptySource ?? true,
4750
4924
  log: createSyncEngineConsoleLogger({ diffMode }),
4751
4925
  onBeforeCompareObjects: async (sourceObject, targetObject) => {
4752
- return replaceRemoteUrlsWithLocalReferences(sourceObject, targetObject, {
4926
+ return replaceRemoteUrlsWithLocalReferences({
4927
+ sourceEntity: sourceObject,
4928
+ targetEntity: targetObject,
4753
4929
  directory,
4754
4930
  fileClient
4755
4931
  });
4756
4932
  },
4933
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
4757
4934
  onBeforeWriteObject: async (sourceObject) => {
4758
- return extractAndDownloadUniformFilesForObject(sourceObject, {
4935
+ await downloadFilesForCompositionOrEntry({
4936
+ entity: sourceObject.object,
4759
4937
  directory,
4760
4938
  fileClient
4761
4939
  });
4940
+ return sourceObject;
4762
4941
  }
4763
4942
  });
4764
4943
  }
@@ -4843,12 +5022,16 @@ var EntryPatternPushModule = {
4843
5022
  allowEmptySource,
4844
5023
  log: createSyncEngineConsoleLogger({ diffMode }),
4845
5024
  onBeforeCompareObjects: async (sourceObject) => {
4846
- return swapOutUniformFileUrlsForTargetProject(sourceObject, {
5025
+ return replaceLocalUrlsWithRemoteReferences({
5026
+ entity: sourceObject,
5027
+ directory,
4847
5028
  fileClient
4848
5029
  });
4849
5030
  },
5031
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
4850
5032
  onBeforeWriteObject: async (sourceObject) => {
4851
- return extractAndUploadUniformFilesForObject(sourceObject, {
5033
+ return uploadFilesForCompositionOrEntry({
5034
+ entity: sourceObject,
4852
5035
  directory,
4853
5036
  fileClient
4854
5037
  });
@@ -8235,6 +8418,7 @@ var package_default = {
8235
8418
  "@uniformdev/canvas": "workspace:*",
8236
8419
  "@uniformdev/context": "workspace:*",
8237
8420
  "@uniformdev/files": "workspace:*",
8421
+ "@uniformdev/richtext": "workspace:*",
8238
8422
  "@uniformdev/project-map": "workspace:*",
8239
8423
  "@uniformdev/redirect": "workspace:*",
8240
8424
  "call-bind": "^1.0.2",
@@ -8244,11 +8428,11 @@ var package_default = {
8244
8428
  diff: "^5.0.0",
8245
8429
  dotenv: "^16.0.3",
8246
8430
  execa: "5.1.1",
8247
- "file-type": "^19.6.0",
8431
+ "file-type": "^20.0.0",
8248
8432
  "fs-jetpack": "5.1.0",
8249
8433
  graphql: "16.9.0",
8250
8434
  "graphql-request": "6.1.0",
8251
- "image-size": "^1.0.2",
8435
+ "image-size": "^1.2.0",
8252
8436
  inquirer: "9.2.17",
8253
8437
  "isomorphic-git": "1.25.2",
8254
8438
  "js-yaml": "^4.1.0",
@@ -8594,7 +8778,7 @@ ${err.message}`);
8594
8778
  // src/projects/cloneStarter.ts
8595
8779
  import crypto2 from "crypto";
8596
8780
  import fs3 from "fs";
8597
- import fsj2 from "fs-jetpack";
8781
+ import fsj5 from "fs-jetpack";
8598
8782
  import * as git from "isomorphic-git";
8599
8783
  import * as http from "isomorphic-git/http/node/index.js";
8600
8784
  import os from "os";
@@ -8625,7 +8809,7 @@ async function cloneStarter({
8625
8809
  throw new Error(`"${targetDir}" is not empty`);
8626
8810
  }
8627
8811
  const starterDir = path.join(cloneDir, ...pathSegments);
8628
- fsj2.copy(starterDir, targetDir, { overwrite: true });
8812
+ fsj5.copy(starterDir, targetDir, { overwrite: true });
8629
8813
  if (dotEnvFile) {
8630
8814
  fs3.writeFileSync(path.resolve(targetDir, ".env"), dotEnvFile, "utf-8");
8631
8815
  }
@@ -10605,14 +10789,14 @@ import { existsSync as existsSync4, promises as fs5 } from "fs";
10605
10789
  import { get as getHttp } from "http";
10606
10790
  import { get as getHttps } from "https";
10607
10791
  import { tmpdir } from "os";
10608
- import { join as join3 } from "path";
10792
+ import { join as join7 } from "path";
10609
10793
  import registryUrl from "registry-url";
10610
10794
  import { URL as URL2 } from "url";
10611
10795
  var compareVersions = (a, b) => a.localeCompare(b, "en-US", { numeric: true });
10612
10796
  var encode = (value) => encodeURIComponent(value).replace(/^%40/, "@");
10613
10797
  var getFile = async (details, distTag) => {
10614
10798
  const rootDir = tmpdir();
10615
- const subDir = join3(rootDir, "update-check");
10799
+ const subDir = join7(rootDir, "update-check");
10616
10800
  if (!existsSync4(subDir)) {
10617
10801
  await fs5.mkdir(subDir);
10618
10802
  }
@@ -10620,7 +10804,7 @@ var getFile = async (details, distTag) => {
10620
10804
  if (details.scope) {
10621
10805
  name = `${details.scope}-${name}`;
10622
10806
  }
10623
- return join3(subDir, name);
10807
+ return join7(subDir, name);
10624
10808
  };
10625
10809
  var evaluateCache = async (file, time, interval) => {
10626
10810
  if (existsSync4(file)) {
@@ -10775,7 +10959,7 @@ var checkForUpdateMiddleware = async ({ verbose }) => {
10775
10959
 
10776
10960
  // src/middleware/checkLocalDepsVersionsMiddleware.ts
10777
10961
  import { magenta, red as red5 } from "colorette";
10778
- import { join as join4 } from "path";
10962
+ import { join as join8 } from "path";
10779
10963
 
10780
10964
  // src/fs.ts
10781
10965
  import { promises as fs6 } from "fs";
@@ -10814,7 +10998,7 @@ var checkLocalDepsVersions = async (args) => {
10814
10998
  try {
10815
10999
  let isOutside = false;
10816
11000
  let warning = `${magenta("Warning:")} Installed Uniform packages should be the same version`;
10817
- const localPackages = await tryReadJSON(join4(process.cwd(), "package.json"));
11001
+ const localPackages = await tryReadJSON(join8(process.cwd(), "package.json"));
10818
11002
  if (!localPackages) return;
10819
11003
  let firstVersion;
10820
11004
  const allDependencies = {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@uniformdev/cli",
3
- "version": "19.214.1-alpha.10+98dac3377a",
3
+ "version": "19.214.1-alpha.32+fb2084b713",
4
4
  "description": "Uniform command line interface tool",
5
5
  "license": "SEE LICENSE IN LICENSE.txt",
6
6
  "main": "./cli.js",
@@ -27,12 +27,13 @@
27
27
  },
28
28
  "dependencies": {
29
29
  "@thi.ng/mime": "^2.2.23",
30
- "@uniformdev/assets": "19.214.1-alpha.10+98dac3377a",
31
- "@uniformdev/canvas": "19.214.1-alpha.10+98dac3377a",
32
- "@uniformdev/context": "19.214.1-alpha.10+98dac3377a",
33
- "@uniformdev/files": "19.214.1-alpha.10+98dac3377a",
34
- "@uniformdev/project-map": "19.214.1-alpha.10+98dac3377a",
35
- "@uniformdev/redirect": "19.214.1-alpha.10+98dac3377a",
30
+ "@uniformdev/assets": "19.214.1-alpha.32+fb2084b713",
31
+ "@uniformdev/canvas": "19.214.1-alpha.32+fb2084b713",
32
+ "@uniformdev/context": "19.214.1-alpha.32+fb2084b713",
33
+ "@uniformdev/files": "19.214.1-alpha.32+fb2084b713",
34
+ "@uniformdev/project-map": "19.214.1-alpha.32+fb2084b713",
35
+ "@uniformdev/redirect": "19.214.1-alpha.32+fb2084b713",
36
+ "@uniformdev/richtext": "19.214.1-alpha.32+fb2084b713",
36
37
  "call-bind": "^1.0.2",
37
38
  "colorette": "2.0.20",
38
39
  "cosmiconfig": "9.0.0",
@@ -40,11 +41,11 @@
40
41
  "diff": "^5.0.0",
41
42
  "dotenv": "^16.0.3",
42
43
  "execa": "5.1.1",
43
- "file-type": "^19.6.0",
44
+ "file-type": "^20.0.0",
44
45
  "fs-jetpack": "5.1.0",
45
46
  "graphql": "16.9.0",
46
47
  "graphql-request": "6.1.0",
47
- "image-size": "^1.0.2",
48
+ "image-size": "^1.2.0",
48
49
  "inquirer": "9.2.17",
49
50
  "isomorphic-git": "1.25.2",
50
51
  "js-yaml": "^4.1.0",
@@ -78,5 +79,5 @@
78
79
  "publishConfig": {
79
80
  "access": "public"
80
81
  },
81
- "gitHead": "98dac3377a8313b1d70d0b02632a6a7192f2409c"
82
+ "gitHead": "fb2084b713e551eb79a28c6d8a372f3b3038d02e"
82
83
  }