@uniformdev/cli 20.1.0 → 20.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.mjs +599 -307
  2. package/package.json +11 -10
package/dist/index.mjs CHANGED
@@ -615,17 +615,22 @@ var AssetListModule = {
615
615
  }
616
616
  };
617
617
 
618
- // src/files/index.ts
619
- import { preferredType } from "@thi.ng/mime";
620
- import { FILE_READY_STATE, getFileNameFromUrl } from "@uniformdev/files";
621
- import { fileTypeFromBuffer } from "file-type";
618
+ // src/files/deleteDownloadedFileByUrl.ts
622
619
  import fsj from "fs-jetpack";
623
- import sizeOf from "image-size";
624
- import PQueue from "p-queue";
625
- import { dirname, join as join2 } from "path";
620
+ import { join as join3 } from "path";
621
+
622
+ // src/files/urlToFileName.ts
623
+ import { join as join2 } from "path";
624
+ import { dirname } from "path";
626
625
  var FILES_DIRECTORY_NAME = "files";
627
- var escapeRegExp = (string) => {
628
- return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
626
+ var getFilesDirectory = (directory) => {
627
+ const isPackage = isPathAPackageFile(directory);
628
+ return isPackage ? dirname(directory) : (
629
+ // If we are syncing to a directory, we want to write all files into a
630
+ // top-lvl folder. That way any entities that contain files will sync to the
631
+ // same directory, so there is no duplication
632
+ join2(directory, "..")
633
+ );
629
634
  };
630
635
  var urlToHash = (url) => {
631
636
  return Buffer.from(
@@ -634,24 +639,6 @@ var urlToHash = (url) => {
634
639
  new URL(url).pathname.substring(0, 64)
635
640
  ).toString("base64");
636
641
  };
637
- var hashToPartialPathname = (hash) => {
638
- try {
639
- return Buffer.from(hash, "base64").toString("utf8");
640
- } catch {
641
- return null;
642
- }
643
- };
644
- var findUrlMatchingPartialPathname = (source, pathname) => {
645
- const escapedPathname = escapeRegExp(pathname);
646
- const regex = new RegExp(
647
- `"(https://([^"]*?)?(img|files).uniform.(rocks|global)${escapedPathname}([^"]*?))"`
648
- );
649
- const match = source.match(regex);
650
- if (match && match[1]) {
651
- return match[1];
652
- }
653
- return null;
654
- };
655
642
  var urlToFileExtension = (url) => {
656
643
  try {
657
644
  const urlObject = new URL(url);
@@ -666,228 +653,397 @@ var urlToFileName = (url, hash) => {
666
653
  const fileExtension = urlToFileExtension(url);
667
654
  return `${fileName}${fileExtension ? `.${fileExtension}` : ""}`;
668
655
  };
669
- var getFilesDirectory = (directory) => {
670
- const isPackage = isPathAPackageFile(directory);
671
- return isPackage ? dirname(directory) : (
672
- // If we are syncing to a directory, we want to write all files into a
673
- // top-lvl folder. That way any entities that contain files will sync to the
674
- // same directory, so there is no duplication
675
- join2(directory, "..")
676
- );
677
- };
678
- var getUniformFileUrlMatches = (string) => {
679
- return string.matchAll(/"(https:\/\/([^"]*?)?(img|files)\.uniform\.(rocks|global)\/([^"]*?))"/g);
656
+ var hashToPartialPathname = (hash) => {
657
+ try {
658
+ return Buffer.from(hash, "base64").toString("utf8");
659
+ } catch {
660
+ return null;
661
+ }
680
662
  };
663
+
664
+ // src/files/deleteDownloadedFileByUrl.ts
681
665
  var deleteDownloadedFileByUrl = async (url, options) => {
682
666
  const writeDirectory = getFilesDirectory(options.directory);
683
667
  const fileName = urlToFileName(url);
684
- const fileToDelete = join2(writeDirectory, FILES_DIRECTORY_NAME, fileName);
668
+ const fileToDelete = join3(writeDirectory, FILES_DIRECTORY_NAME, fileName);
685
669
  try {
686
670
  await fsj.removeAsync(fileToDelete);
687
671
  } catch {
688
672
  console.warn(`Failed to delete a local file ${fileToDelete}`);
689
673
  }
690
674
  };
691
- var extractAndDownloadUniformFilesForObject = async (object, options) => {
692
- const objectAsString = JSON.stringify(object);
693
- const uniformFileUrlMatches = getUniformFileUrlMatches(objectAsString);
694
- const writeDirectory = getFilesDirectory(options.directory);
695
- if (uniformFileUrlMatches) {
696
- const fileDownloadQueue = new PQueue({ concurrency: 10 });
697
- for (const match of uniformFileUrlMatches) {
698
- const url = new URL(match[1]);
699
- fileDownloadQueue.add(async () => {
675
+
676
+ // src/files/files.ts
677
+ import {
678
+ ASSETS_SOURCE_UNIFORM,
679
+ getPropertiesValue as getPropertiesValue2,
680
+ isAssetParamValue,
681
+ isAssetParamValueItem,
682
+ walkNodeTree as walkNodeTree2,
683
+ walkPropertyValues as walkPropertyValues2
684
+ } from "@uniformdev/canvas";
685
+ import { isRichTextNodeType, isRichTextValue, walkRichTextTree } from "@uniformdev/richtext";
686
+ import fsj4 from "fs-jetpack";
687
+ import PQueue2 from "p-queue";
688
+ import { join as join6 } from "path";
689
+
690
+ // src/files/downloadFile.ts
691
+ import fsj2 from "fs-jetpack";
692
+ import { join as join4 } from "path";
693
+ var downloadFile = async ({
694
+ fileClient,
695
+ fileUrl,
696
+ directory
697
+ }) => {
698
+ const writeDirectory = getFilesDirectory(directory);
699
+ const fileName = urlToFileName(fileUrl.toString());
700
+ const fileAlreadyExists = await fsj2.existsAsync(join4(writeDirectory, FILES_DIRECTORY_NAME, fileName));
701
+ if (fileAlreadyExists) {
702
+ return { url: fileUrl };
703
+ }
704
+ const file = await fileClient.get({ url: fileUrl }).catch(() => null);
705
+ if (!file) {
706
+ console.warn(`Skipping file ${fileUrl} as it does not exist in the project anymore`);
707
+ return null;
708
+ }
709
+ if (file.sourceId) {
710
+ try {
711
+ const hashAlreadyExists = await fsj2.findAsync(join4(writeDirectory, FILES_DIRECTORY_NAME), {
712
+ matching: [file.sourceId, `${file.sourceId}.*`]
713
+ });
714
+ if (hashAlreadyExists.length > 0) {
715
+ return { id: file.id, url: fileUrl };
716
+ }
717
+ } catch {
718
+ }
719
+ }
720
+ const fetchUrl = `${fileUrl}?format=original`;
721
+ const response = await fetch(fetchUrl);
722
+ if (!response.ok) {
723
+ return null;
724
+ }
725
+ const fileBuffer = await response.arrayBuffer();
726
+ await fsj2.writeAsync(join4(writeDirectory, FILES_DIRECTORY_NAME, fileName), Buffer.from(fileBuffer));
727
+ return { id: file.id, url: fileUrl };
728
+ };
729
+
730
+ // src/files/uploadFile.ts
731
+ import { preferredType } from "@thi.ng/mime";
732
+ import { FILE_READY_STATE, getFileNameFromUrl } from "@uniformdev/files";
733
+ import { fileTypeFromBuffer } from "file-type";
734
+ import fsj3 from "fs-jetpack";
735
+ import sizeOf from "image-size";
736
+ import PQueue from "p-queue";
737
+ import { join as join5 } from "path";
738
+ var uploadQueueByKey = /* @__PURE__ */ new Map();
739
+ var fileUploadQueue = new PQueue({ concurrency: 10 });
740
+ var uploadFile = async ({
741
+ fileClient,
742
+ fileUrl,
743
+ directory,
744
+ fileId
745
+ }) => {
746
+ const key = `${fileId}-${fileUrl}`;
747
+ if (uploadQueueByKey.has(key)) {
748
+ console.log("Already have this queued!!!");
749
+ const result2 = await uploadQueueByKey.get(key);
750
+ return result2 ?? null;
751
+ }
752
+ const promise = fileUploadQueue.add(async () => {
753
+ try {
754
+ const writeDirectory = getFilesDirectory(directory);
755
+ const hash = urlToHash(fileUrl);
756
+ const fileAlreadyExistsChecks = await Promise.all([
757
+ fileClient.get({ url: fileUrl }).catch(() => null),
758
+ fileClient.get({ sourceId: hash }).catch(() => null)
759
+ ]);
760
+ const file = fileAlreadyExistsChecks.find((check) => check !== null);
761
+ if (file?.url) {
762
+ return { id: file.id, url: file.url };
763
+ }
764
+ const localFileName = urlToFileName(fileUrl);
765
+ const expectedFilePath = join5(writeDirectory, FILES_DIRECTORY_NAME, localFileName);
766
+ const fileExistsLocally = await fsj3.existsAsync(expectedFilePath);
767
+ if (!fileExistsLocally) {
768
+ console.warn(
769
+ `Skipping file ${fileUrl} as we couldn't find a local copy (looked at ${expectedFilePath})`
770
+ );
771
+ return null;
772
+ }
773
+ const fileBuffer = await fsj3.readAsync(expectedFilePath, "buffer");
774
+ if (!fileBuffer) {
775
+ console.warn(`Skipping file ${fileUrl} (${expectedFilePath}) as we couldn't read it`);
776
+ return null;
777
+ }
778
+ const fileName = getFileNameFromUrl(fileUrl);
779
+ let mimeType = expectedFilePath.endsWith(".svg") ? "image/svg+xml" : (await fileTypeFromBuffer(fileBuffer))?.mime;
780
+ if (!mimeType) {
781
+ mimeType = preferredType(fileUrl.split(".").at(-1) ?? "");
782
+ }
783
+ if (mimeType === "audio/x-flac") {
784
+ mimeType = "audio/flac";
785
+ }
786
+ const { width, height } = (() => {
787
+ if (!mimeType.startsWith("image/")) {
788
+ return {
789
+ width: void 0,
790
+ height: void 0
791
+ };
792
+ }
700
793
  try {
701
- const fileName = urlToFileName(url.toString());
702
- const fileAlreadyExists = await fsj.existsAsync(
703
- join2(writeDirectory, FILES_DIRECTORY_NAME, fileName)
704
- );
705
- if (fileAlreadyExists) {
706
- return;
707
- }
708
- const file = await options.fileClient.get({ url: url.toString() }).catch(() => null);
709
- if (!file) {
710
- console.warn(`Skipping file ${url} as it does not exist in the project anymore`);
711
- return;
712
- }
713
- if (file.sourceId) {
714
- try {
715
- const hashAlreadyExists = await fsj.findAsync(join2(writeDirectory, FILES_DIRECTORY_NAME), {
716
- matching: [file.sourceId, `${file.sourceId}.*`]
717
- });
718
- if (hashAlreadyExists.length > 0) {
719
- return;
720
- }
721
- } catch {
722
- }
723
- }
724
- const fetchUrl = `${url.origin}${url.pathname}?format=original`;
725
- const response = await fetch(fetchUrl);
726
- if (!response.ok) {
727
- return;
728
- }
729
- const fileBuffer = await response.arrayBuffer();
730
- await fsj.writeAsync(join2(writeDirectory, FILES_DIRECTORY_NAME, fileName), Buffer.from(fileBuffer));
794
+ return sizeOf(fileBuffer);
731
795
  } catch {
732
- console.warn(`Failed to download file ${url}`);
796
+ return {
797
+ width: void 0,
798
+ height: void 0
799
+ };
733
800
  }
801
+ })();
802
+ const { id, method, uploadUrl } = await fileClient.insert({
803
+ id: fileId,
804
+ name: fileName,
805
+ mediaType: mimeType,
806
+ size: fileBuffer.length,
807
+ width,
808
+ height,
809
+ sourceId: hash
734
810
  });
811
+ const uploadResponse = await fetch(uploadUrl, {
812
+ method,
813
+ body: fileBuffer,
814
+ headers: {
815
+ "Content-Type": mimeType,
816
+ "Content-Length": fileBuffer.length.toString()
817
+ }
818
+ });
819
+ if (!uploadResponse.ok) {
820
+ console.warn(`Failed to upload file ${fileUrl} (${expectedFilePath})`);
821
+ return null;
822
+ }
823
+ let error;
824
+ const checkForFile = async () => {
825
+ if (error) {
826
+ throw error;
827
+ }
828
+ const file2 = await fileClient.get({ id });
829
+ if (!file2 || file2.state !== FILE_READY_STATE || !file2.url) {
830
+ await new Promise((resolve) => setTimeout(resolve, 1e3));
831
+ return checkForFile();
832
+ }
833
+ return file2.url;
834
+ };
835
+ const abortTimeout = setTimeout(() => {
836
+ error = new Error(`Failed to upload file ${fileUrl} (${expectedFilePath}) - upload timed out`);
837
+ }, 6e4);
838
+ const uploadedFileUrl = await checkForFile();
839
+ clearTimeout(abortTimeout);
840
+ return { id, url: uploadedFileUrl };
841
+ } catch (e) {
842
+ console.warn(`Failed to upload file ${fileUrl}`, e);
843
+ return null;
844
+ }
845
+ });
846
+ uploadQueueByKey.set(key, promise);
847
+ const result = await promise;
848
+ return result ?? null;
849
+ };
850
+
851
+ // src/files/walkFileUrlsForCompositionOrEntry.ts
852
+ import {
853
+ getPropertiesValue,
854
+ walkNodeTree,
855
+ walkPropertyValues
856
+ } from "@uniformdev/canvas";
857
+ var UNIFORM_FILE_MATCH = /"(https:\/\/([^"]*?)?(img|files)\.uniform\.(rocks|global)\/([^"]*?))"/g;
858
+ var walkFileUrlsForCompositionOrEntry = ({
859
+ entity,
860
+ callback
861
+ }) => {
862
+ const thumbnail = "entry" in entity ? entity.entry._thumbnail : void 0;
863
+ if (typeof thumbnail === "string") {
864
+ const isUniformFile = `"${thumbnail}"`.match(UNIFORM_FILE_MATCH) !== null;
865
+ if (isUniformFile) {
866
+ callback({ fileUrl: thumbnail });
735
867
  }
736
- await fileDownloadQueue.onIdle();
737
868
  }
738
- return object;
869
+ walkNodeTree("entry" in entity ? entity.entry : entity.composition, ({ node }) => {
870
+ const properties = getPropertiesValue(node);
871
+ if (!properties) {
872
+ return;
873
+ }
874
+ Object.entries(properties).forEach(([_, property]) => {
875
+ if (property.type !== "image") {
876
+ return;
877
+ }
878
+ walkPropertyValues(property, ({ value }) => {
879
+ if (typeof value !== "string") {
880
+ return;
881
+ }
882
+ const isUniformFile = `"${value}"`.match(UNIFORM_FILE_MATCH) !== null;
883
+ if (!isUniformFile) {
884
+ return;
885
+ }
886
+ callback({ fileUrl: value });
887
+ });
888
+ });
889
+ });
739
890
  };
740
- var extractAndUploadUniformFilesForObject = async (object, options) => {
741
- let objectAsString = JSON.stringify(object);
742
- const uniformFileUrlMatches = getUniformFileUrlMatches(objectAsString);
743
- const writeDirectory = getFilesDirectory(options.directory);
744
- if (uniformFileUrlMatches) {
745
- const fileUploadQueue = new PQueue({ concurrency: 3 });
746
- for (const match of uniformFileUrlMatches) {
747
- const url = match[1];
748
- const hash = urlToHash(url);
749
- fileUploadQueue.add(async () => {
750
- try {
751
- const fileAlreadyExistsChecks = await Promise.all([
752
- options.fileClient.get({ url }).catch(() => null),
753
- options.fileClient.get({ sourceId: hash }).catch(() => null)
754
- ]);
755
- const file = fileAlreadyExistsChecks.find((check) => check !== null);
756
- if (file) {
757
- objectAsString = objectAsString.replaceAll(`"${url}"`, `"${file.url}"`);
758
- return;
759
- }
760
- const localFileName = urlToFileName(url);
761
- const expectedFilePath = join2(writeDirectory, FILES_DIRECTORY_NAME, localFileName);
762
- const fileExistsLocally = await fsj.existsAsync(expectedFilePath);
763
- if (!fileExistsLocally) {
764
- console.warn(
765
- `Skipping file ${url} as we couldn't find a local copy (looked at ${expectedFilePath})`
766
- );
767
- return;
768
- }
769
- const fileBuffer = await fsj.readAsync(expectedFilePath, "buffer");
770
- if (!fileBuffer) {
771
- console.warn(`Skipping file ${url} (${expectedFilePath}) as we couldn't read it`);
891
+
892
+ // src/files/files.ts
893
+ var downloadFileForAsset = async ({
894
+ asset,
895
+ directory,
896
+ fileClient
897
+ }) => {
898
+ if (asset.asset.fields?.file?.value === void 0 || asset.asset.fields.url?.value === void 0) {
899
+ return null;
900
+ }
901
+ const fileId = asset.asset.fields?.file?.value;
902
+ const fileUrl = asset.asset.fields.url?.value;
903
+ if (fileId === "" || fileUrl === "") {
904
+ return null;
905
+ }
906
+ return downloadFile({ fileUrl, directory, fileClient });
907
+ };
908
+ var uploadFileForAsset = async ({
909
+ asset,
910
+ directory,
911
+ fileClient
912
+ }) => {
913
+ if (asset.asset.fields?.file?.value === void 0 || asset.asset.fields.url?.value === void 0) {
914
+ return null;
915
+ }
916
+ const fileUrl = asset.asset.fields.url.value;
917
+ const fileId = asset.asset.fields.file.value;
918
+ return uploadFile({ fileUrl, directory, fileClient, fileId });
919
+ };
920
+ var removeUrlsFromAssetParameters = (entity) => {
921
+ walkNodeTree2("entry" in entity ? entity.entry : entity.composition, ({ node }) => {
922
+ const properties = getPropertiesValue2(node);
923
+ if (!properties) {
924
+ return;
925
+ }
926
+ Object.entries(properties).forEach(([_, property]) => {
927
+ if (property.type === "asset") {
928
+ walkPropertyValues2(property, ({ value }) => {
929
+ if (!isAssetParamValue(value)) {
772
930
  return;
773
931
  }
774
- const fileName = getFileNameFromUrl(url);
775
- let mimeType = expectedFilePath.endsWith(".svg") ? "image/svg+xml" : (await fileTypeFromBuffer(fileBuffer))?.mime;
776
- if (!mimeType) {
777
- mimeType = preferredType(url.split(".").at(-1) ?? "");
778
- }
779
- if (mimeType === "audio/x-flac") {
780
- mimeType = "audio/flac";
781
- }
782
- const { width, height } = (() => {
783
- if (!mimeType.startsWith("image/")) {
784
- return {
785
- width: void 0,
786
- height: void 0
787
- };
932
+ value.forEach((asset) => {
933
+ if (!isAssetParamValueItem(asset)) {
934
+ return;
788
935
  }
789
- try {
790
- return sizeOf(fileBuffer);
791
- } catch {
792
- return {
793
- width: void 0,
794
- height: void 0
795
- };
936
+ if (asset._source !== ASSETS_SOURCE_UNIFORM || !asset.fields?.url.value) {
937
+ return;
796
938
  }
797
- })();
798
- const { id, method, uploadUrl } = await options.fileClient.insert({
799
- name: fileName,
800
- mediaType: mimeType,
801
- size: fileBuffer.length,
802
- width,
803
- height,
804
- sourceId: hash
939
+ asset.fields.url.value = "";
805
940
  });
806
- const uploadResponse = await fetch(uploadUrl, {
807
- method,
808
- body: fileBuffer,
809
- headers: {
810
- "Content-Type": mimeType,
811
- "Content-Length": fileBuffer.length.toString()
812
- }
813
- });
814
- if (!uploadResponse.ok) {
815
- console.warn(`Failed to upload file ${url} (${expectedFilePath})`);
941
+ });
942
+ } else if (property.type === "richText") {
943
+ walkPropertyValues2(property, ({ value }) => {
944
+ if (!isRichTextValue(value)) {
816
945
  return;
817
946
  }
818
- let error;
819
- const checkForFile = async () => {
820
- if (error) {
821
- throw error;
822
- }
823
- const file2 = await options.fileClient.get({ id });
824
- if (!file2 || file2.state !== FILE_READY_STATE || !file2.url) {
825
- await new Promise((resolve) => setTimeout(resolve, 1e3));
826
- return checkForFile();
947
+ walkRichTextTree(value.root, (node2) => {
948
+ if (isRichTextNodeType(node2, "asset")) {
949
+ if (node2.__asset?._source !== ASSETS_SOURCE_UNIFORM || !node2.__asset.fields.url.value) {
950
+ return;
951
+ }
952
+ node2.__asset.fields.url.value = "";
827
953
  }
828
- return file2.url;
829
- };
830
- const abortTimeout = setTimeout(() => {
831
- error = new Error(`Failed to upload file ${url} (${expectedFilePath}) - upload timed out`);
832
- }, 6e4);
833
- const uploadedFileUrl = await checkForFile();
834
- clearTimeout(abortTimeout);
835
- objectAsString = objectAsString.replaceAll(`"${url}"`, `"${uploadedFileUrl}"`);
836
- } catch (e) {
837
- console.warn(`Failed to upload file ${url}`, e);
838
- }
954
+ });
955
+ });
956
+ }
957
+ });
958
+ });
959
+ return entity;
960
+ };
961
+ var compareCompositionsOrEntriesWithoutAssetUrls = (source, target) => {
962
+ return serializedDequal(
963
+ removeUrlsFromAssetParameters(structuredClone(source.object)),
964
+ removeUrlsFromAssetParameters(structuredClone(target.object))
965
+ );
966
+ };
967
+ var removeUrlFromAsset = (asset) => {
968
+ if (asset.asset.fields?.url?.value) {
969
+ asset.asset.fields.url.value = "";
970
+ }
971
+ return asset;
972
+ };
973
+ var compareAssetsWithoutUrls = (source, target) => {
974
+ return serializedDequal(
975
+ removeUrlFromAsset(structuredClone(source.object)),
976
+ removeUrlFromAsset(structuredClone(target.object))
977
+ );
978
+ };
979
+ var downloadFilesForCompositionOrEntry = async ({
980
+ entity,
981
+ directory,
982
+ fileClient
983
+ }) => {
984
+ const fileDownloadQueue = new PQueue2({ concurrency: 3 });
985
+ await walkFileUrlsForCompositionOrEntry({
986
+ entity,
987
+ callback: ({ fileUrl }) => {
988
+ fileDownloadQueue.add(async () => {
989
+ await downloadFile({ fileUrl, directory, fileClient });
839
990
  });
840
991
  }
841
- await fileUploadQueue.onIdle();
842
- }
843
- return JSON.parse(objectAsString);
992
+ });
993
+ await fileDownloadQueue.onIdle();
844
994
  };
845
- var swapOutUniformFileUrlsForTargetProject = async (object, options) => {
846
- let objectAsString = JSON.stringify(object);
847
- const uniformFileUrlMatches = getUniformFileUrlMatches(objectAsString);
848
- if (uniformFileUrlMatches) {
849
- const fileUrlReplacementQueue = new PQueue({ concurrency: 3 });
850
- for (const match of uniformFileUrlMatches) {
851
- const url = match[1];
852
- const hash = urlToHash(url);
853
- fileUrlReplacementQueue.add(async () => {
854
- try {
855
- const fileAlreadyExistsChecks = await Promise.all([
856
- options.fileClient.get({ url }).catch(() => null),
857
- options.fileClient.get({ sourceId: hash }).catch(() => null)
858
- ]);
859
- const file = fileAlreadyExistsChecks.find((check) => check !== null);
860
- if (!file) {
861
- return;
862
- }
863
- objectAsString = objectAsString.replaceAll(`"${url}"`, `"${file.url}"`);
864
- } catch {
995
+ var uploadFilesForCompositionOrEntry = async ({
996
+ entity,
997
+ directory,
998
+ fileClient
999
+ }) => {
1000
+ const fileUploadQueue2 = new PQueue2({ concurrency: 3 });
1001
+ const urlReplacementMap = /* @__PURE__ */ new Map();
1002
+ walkFileUrlsForCompositionOrEntry({
1003
+ entity: entity.object,
1004
+ callback: async ({ fileUrl }) => {
1005
+ fileUploadQueue2.add(async () => {
1006
+ const upload = await uploadFile({
1007
+ directory,
1008
+ fileUrl,
1009
+ fileClient
1010
+ });
1011
+ if (upload !== null) {
1012
+ urlReplacementMap.set(fileUrl, upload.url);
865
1013
  }
866
1014
  });
867
1015
  }
868
- await fileUrlReplacementQueue.onIdle();
869
- }
870
- return JSON.parse(objectAsString);
871
- };
872
- var replaceRemoteUrlsWithLocalReferences = async (sourceObject, targetObject, options) => {
873
- let sourceObjectAsString = JSON.stringify(sourceObject);
874
- const targetObjectAsString = JSON.stringify(targetObject);
875
- const uniformFileUrlMatches = getUniformFileUrlMatches(sourceObjectAsString);
876
- const writeDirectory = getFilesDirectory(options.directory);
877
- if (uniformFileUrlMatches) {
878
- const fileUrlReplacementQueue = new PQueue({ concurrency: 3 });
879
- for (const match of uniformFileUrlMatches) {
880
- const url = match[1];
1016
+ });
1017
+ await fileUploadQueue2.onIdle();
1018
+ let entityAsString = JSON.stringify(entity);
1019
+ for (const [key, value] of urlReplacementMap.entries()) {
1020
+ entityAsString = entityAsString.replaceAll(`"${key}"`, `"${value}"`);
1021
+ }
1022
+ return JSON.parse(entityAsString);
1023
+ };
1024
+ var replaceRemoteUrlsWithLocalReferences = async ({
1025
+ sourceEntity,
1026
+ targetEntity,
1027
+ fileClient,
1028
+ directory
1029
+ }) => {
1030
+ let sourceEntityAsString = JSON.stringify(sourceEntity);
1031
+ const targetEntityAsString = JSON.stringify(targetEntity);
1032
+ const writeDirectory = getFilesDirectory(directory);
1033
+ const fileUrlReplacementQueue = new PQueue2({ concurrency: 3 });
1034
+ walkFileUrlsForCompositionOrEntry({
1035
+ entity: sourceEntity.object,
1036
+ callback: ({ fileUrl }) => {
881
1037
  fileUrlReplacementQueue.add(async () => {
882
1038
  try {
883
- const localFileName = urlToFileName(url);
884
- const fileExistsLocally = await fsj.existsAsync(
885
- join2(writeDirectory, FILES_DIRECTORY_NAME, localFileName)
1039
+ const localFileName = urlToFileName(fileUrl);
1040
+ const fileExistsLocally = await fsj4.existsAsync(
1041
+ join6(writeDirectory, FILES_DIRECTORY_NAME, localFileName)
886
1042
  );
887
1043
  if (fileExistsLocally) {
888
1044
  return;
889
1045
  }
890
- const file = await options.fileClient.get({ url }).catch(() => null);
1046
+ const file = await fileClient.get({ url: fileUrl }).catch(() => null);
891
1047
  if (!file || !file.sourceId) {
892
1048
  return;
893
1049
  }
@@ -895,36 +1051,64 @@ var replaceRemoteUrlsWithLocalReferences = async (sourceObject, targetObject, op
895
1051
  if (!originalPartialPath) {
896
1052
  return;
897
1053
  }
898
- const originalUrl = findUrlMatchingPartialPathname(targetObjectAsString, originalPartialPath);
1054
+ const originalUrl = findUrlMatchingPartialPathname(targetEntityAsString, originalPartialPath);
899
1055
  if (!originalUrl) {
900
1056
  return;
901
1057
  }
902
- sourceObjectAsString = sourceObjectAsString.replaceAll(`"${url}"`, `"${originalUrl}"`);
1058
+ sourceEntityAsString = sourceEntityAsString.replaceAll(`"${fileUrl}"`, `"${originalUrl}"`);
903
1059
  } catch {
904
1060
  }
905
1061
  });
1062
+ return null;
906
1063
  }
907
- await fileUrlReplacementQueue.onIdle();
908
- }
909
- return JSON.parse(sourceObjectAsString);
1064
+ });
1065
+ await fileUrlReplacementQueue.onIdle();
1066
+ return JSON.parse(sourceEntityAsString);
910
1067
  };
911
- var updateAssetFileIdBasedOnUrl = async (asset, options) => {
912
- if (!asset.asset.fields) {
913
- return asset;
914
- }
915
- const fileUrl = asset.asset.fields.url?.value;
916
- if (!fileUrl) {
917
- return asset;
918
- }
919
- const file = await options.fileClient.get({ url: fileUrl }).catch(() => null);
920
- if (!file) {
921
- return asset;
1068
+ var replaceLocalUrlsWithRemoteReferences = async ({
1069
+ entity,
1070
+ fileClient
1071
+ }) => {
1072
+ let entityAsString = JSON.stringify(entity);
1073
+ const fileUrlReplacementQueue = new PQueue2({ concurrency: 3 });
1074
+ walkFileUrlsForCompositionOrEntry({
1075
+ entity: entity.object,
1076
+ callback: ({ fileUrl }) => {
1077
+ fileUrlReplacementQueue.add(async () => {
1078
+ try {
1079
+ const hash = urlToHash(fileUrl);
1080
+ fileUrlReplacementQueue.add(async () => {
1081
+ try {
1082
+ const file = await fileClient.get({ sourceId: hash }).catch(() => null);
1083
+ if (!file) {
1084
+ return;
1085
+ }
1086
+ entityAsString = entityAsString.replaceAll(`"${fileUrl}"`, `"${file.url}"`);
1087
+ } catch {
1088
+ }
1089
+ });
1090
+ } catch {
1091
+ }
1092
+ });
1093
+ return null;
1094
+ }
1095
+ });
1096
+ await fileUrlReplacementQueue.onIdle();
1097
+ return JSON.parse(entityAsString);
1098
+ };
1099
+ var escapeRegExp = (string) => {
1100
+ return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
1101
+ };
1102
+ var findUrlMatchingPartialPathname = (source, pathname) => {
1103
+ const escapedPathname = escapeRegExp(pathname);
1104
+ const regex = new RegExp(
1105
+ `"(https://([^"]*?)?(img|files).uniform.(rocks|global)${escapedPathname}([^"]*?))"`
1106
+ );
1107
+ const match = source.match(regex);
1108
+ if (match && match[1]) {
1109
+ return match[1];
922
1110
  }
923
- asset.asset.fields.file = {
924
- type: "file",
925
- value: file.id
926
- };
927
- return asset;
1111
+ return null;
928
1112
  };
929
1113
 
930
1114
  // src/commands/canvas/assetEngineDataSource.ts
@@ -1115,27 +1299,25 @@ var AssetPullModule = {
1115
1299
  whatIf,
1116
1300
  allowEmptySource: allowEmptySource ?? true,
1117
1301
  log: createSyncEngineConsoleLogger({ diffMode }),
1118
- onBeforeCompareObjects: async (sourceObject, targetObject) => {
1302
+ onBeforeCompareObjects: async (sourceObject) => {
1119
1303
  delete sourceObject.object.asset._author;
1120
- const sourceObjectWithPotentiallySwappedUrl = await replaceRemoteUrlsWithLocalReferences(
1121
- sourceObject,
1122
- targetObject,
1123
- {
1124
- directory,
1125
- fileClient
1126
- }
1127
- );
1128
- if (sourceObjectWithPotentiallySwappedUrl.object.asset.fields?.url && targetObject.object.asset.fields?.url && sourceObjectWithPotentiallySwappedUrl.object.asset.fields.url.value === targetObject.object.asset.fields.url.value) {
1129
- targetObject.object.asset.fields.file = sourceObjectWithPotentiallySwappedUrl.object.asset.fields.file;
1130
- }
1131
- return sourceObjectWithPotentiallySwappedUrl;
1304
+ return sourceObject;
1132
1305
  },
1306
+ compareContents: compareAssetsWithoutUrls,
1133
1307
  onBeforeWriteObject: async (sourceObject) => {
1134
1308
  delete sourceObject.object.asset._author;
1135
- return extractAndDownloadUniformFilesForObject(sourceObject, {
1309
+ if (!sourceObject.object.asset.fields?.file) {
1310
+ return sourceObject;
1311
+ }
1312
+ const downloadedFile = await downloadFileForAsset({
1313
+ asset: sourceObject.object,
1136
1314
  directory,
1137
1315
  fileClient
1138
1316
  });
1317
+ if (downloadedFile?.id) {
1318
+ sourceObject.object.asset.fields.file.value = downloadedFile.id;
1319
+ }
1320
+ return sourceObject;
1139
1321
  }
1140
1322
  });
1141
1323
  }
@@ -1215,29 +1397,29 @@ var AssetPushModule = {
1215
1397
  if (targetObject) {
1216
1398
  delete targetObject.object.asset._author;
1217
1399
  }
1218
- const sourceObjectWithNewFileUrls = await swapOutUniformFileUrlsForTargetProject(sourceObject, {
1219
- fileClient
1220
- });
1221
- sourceObjectWithNewFileUrls.object = await updateAssetFileIdBasedOnUrl(
1222
- sourceObjectWithNewFileUrls.object,
1223
- {
1224
- fileClient
1225
- }
1226
- );
1227
- return sourceObjectWithNewFileUrls;
1400
+ return sourceObject;
1228
1401
  },
1402
+ compareContents: compareAssetsWithoutUrls,
1229
1403
  onBeforeWriteObject: async (sourceObject) => {
1230
- const sourceObjectWithNewFileUrls = await extractAndUploadUniformFilesForObject(sourceObject, {
1404
+ const uploadedFile = await uploadFileForAsset({
1405
+ asset: sourceObject.object,
1231
1406
  directory,
1232
1407
  fileClient
1233
1408
  });
1234
- sourceObjectWithNewFileUrls.object = await updateAssetFileIdBasedOnUrl(
1235
- sourceObjectWithNewFileUrls.object,
1236
- {
1237
- fileClient
1409
+ if (uploadedFile !== null) {
1410
+ if (sourceObject.object.asset.fields === void 0) {
1411
+ sourceObject.object.asset.fields = {};
1238
1412
  }
1239
- );
1240
- return sourceObjectWithNewFileUrls;
1413
+ sourceObject.object.asset.fields.file = {
1414
+ type: "file",
1415
+ value: uploadedFile.id
1416
+ };
1417
+ sourceObject.object.asset.fields.url = {
1418
+ type: "text",
1419
+ value: uploadedFile.url
1420
+ };
1421
+ }
1422
+ return sourceObject;
1241
1423
  }
1242
1424
  });
1243
1425
  }
@@ -2323,7 +2505,8 @@ var CompositionPublishModule = {
2323
2505
  onlyCompositions,
2324
2506
  onlyPatterns,
2325
2507
  patternType,
2326
- verbose
2508
+ verbose,
2509
+ directory
2327
2510
  }) => {
2328
2511
  if (!all && !ids || all && ids) {
2329
2512
  console.error(`Specify --all or composition ID(s) to publish.`);
@@ -2350,6 +2533,7 @@ var CompositionPublishModule = {
2350
2533
  patternType,
2351
2534
  verbose
2352
2535
  });
2536
+ const fileClient = getFileClient({ apiKey, apiHost, fetch: fetch2, projectId });
2353
2537
  await syncEngine({
2354
2538
  source,
2355
2539
  target,
@@ -2357,7 +2541,21 @@ var CompositionPublishModule = {
2357
2541
  mode: "createOrUpdate",
2358
2542
  whatIf,
2359
2543
  verbose,
2360
- log: createPublishStatusSyncEngineConsoleLogger({ status: "publish" })
2544
+ log: createPublishStatusSyncEngineConsoleLogger({ status: "publish" }),
2545
+ onBeforeCompareObjects: async (sourceObject) => {
2546
+ return replaceLocalUrlsWithRemoteReferences({
2547
+ entity: sourceObject,
2548
+ fileClient
2549
+ });
2550
+ },
2551
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
2552
+ onBeforeWriteObject: async (sourceObject) => {
2553
+ return uploadFilesForCompositionOrEntry({
2554
+ entity: sourceObject,
2555
+ directory,
2556
+ fileClient
2557
+ });
2558
+ }
2361
2559
  });
2362
2560
  }
2363
2561
  };
@@ -2505,16 +2703,21 @@ var CompositionPullModule = {
2505
2703
  allowEmptySource: allowEmptySource ?? true,
2506
2704
  log: createSyncEngineConsoleLogger({ diffMode }),
2507
2705
  onBeforeCompareObjects: async (sourceObject, targetObject) => {
2508
- return replaceRemoteUrlsWithLocalReferences(sourceObject, targetObject, {
2706
+ return replaceRemoteUrlsWithLocalReferences({
2707
+ sourceEntity: sourceObject,
2708
+ targetEntity: targetObject,
2509
2709
  directory,
2510
2710
  fileClient
2511
2711
  });
2512
2712
  },
2713
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
2513
2714
  onBeforeWriteObject: async (sourceObject) => {
2514
- return extractAndDownloadUniformFilesForObject(sourceObject, {
2715
+ await downloadFilesForCompositionOrEntry({
2716
+ entity: sourceObject.object,
2515
2717
  directory,
2516
2718
  fileClient
2517
2719
  });
2720
+ return sourceObject;
2518
2721
  }
2519
2722
  });
2520
2723
  }
@@ -2658,12 +2861,15 @@ var CompositionPushModule = {
2658
2861
  allowEmptySource,
2659
2862
  log: createSyncEngineConsoleLogger({ diffMode }),
2660
2863
  onBeforeCompareObjects: async (sourceObject) => {
2661
- return swapOutUniformFileUrlsForTargetProject(sourceObject, {
2864
+ return replaceLocalUrlsWithRemoteReferences({
2865
+ entity: sourceObject,
2662
2866
  fileClient
2663
2867
  });
2664
2868
  },
2869
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
2665
2870
  onBeforeWriteObject: async (sourceObject) => {
2666
- return extractAndUploadUniformFilesForObject(sourceObject, {
2871
+ return uploadFilesForCompositionOrEntry({
2872
+ entity: sourceObject,
2667
2873
  directory,
2668
2874
  fileClient
2669
2875
  });
@@ -4140,7 +4346,18 @@ var EntryPublishModule = {
4140
4346
  )
4141
4347
  )
4142
4348
  ),
4143
- handler: async ({ apiHost, edgeApiHost, apiKey, proxy, ids, all, project: projectId, whatIf, verbose }) => {
4349
+ handler: async ({
4350
+ apiHost,
4351
+ edgeApiHost,
4352
+ apiKey,
4353
+ proxy,
4354
+ ids,
4355
+ all,
4356
+ project: projectId,
4357
+ whatIf,
4358
+ verbose,
4359
+ directory
4360
+ }) => {
4144
4361
  if (!all && !ids || all && ids) {
4145
4362
  console.error(`Specify --all or entry ID(s) to publish.`);
4146
4363
  process.exit(1);
@@ -4160,13 +4377,28 @@ var EntryPublishModule = {
4160
4377
  entryIDs: entryIDsArray,
4161
4378
  onlyEntries: true
4162
4379
  });
4380
+ const fileClient = getFileClient({ apiKey, apiHost, fetch: fetch2, projectId });
4163
4381
  await syncEngine({
4164
4382
  source,
4165
4383
  target,
4166
4384
  // Publishing is one-direction operation, so no need to support automatic un-publishing
4167
4385
  mode: "createOrUpdate",
4168
4386
  whatIf,
4169
- log: createPublishStatusSyncEngineConsoleLogger({ status: "publish" })
4387
+ log: createPublishStatusSyncEngineConsoleLogger({ status: "publish" }),
4388
+ onBeforeCompareObjects: async (sourceObject) => {
4389
+ return replaceLocalUrlsWithRemoteReferences({
4390
+ entity: sourceObject,
4391
+ fileClient
4392
+ });
4393
+ },
4394
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
4395
+ onBeforeWriteObject: async (sourceObject) => {
4396
+ return uploadFilesForCompositionOrEntry({
4397
+ entity: sourceObject,
4398
+ directory,
4399
+ fileClient
4400
+ });
4401
+ }
4170
4402
  });
4171
4403
  }
4172
4404
  };
@@ -4257,16 +4489,21 @@ var EntryPullModule = {
4257
4489
  allowEmptySource: allowEmptySource ?? true,
4258
4490
  log: createSyncEngineConsoleLogger({ diffMode }),
4259
4491
  onBeforeCompareObjects: async (sourceObject, targetObject) => {
4260
- return replaceRemoteUrlsWithLocalReferences(sourceObject, targetObject, {
4492
+ return replaceRemoteUrlsWithLocalReferences({
4493
+ sourceEntity: sourceObject,
4494
+ targetEntity: targetObject,
4261
4495
  directory,
4262
4496
  fileClient
4263
4497
  });
4264
4498
  },
4499
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
4265
4500
  onBeforeWriteObject: async (sourceObject) => {
4266
- return extractAndDownloadUniformFilesForObject(sourceObject, {
4501
+ await downloadFilesForCompositionOrEntry({
4502
+ entity: sourceObject.object,
4267
4503
  directory,
4268
4504
  fileClient
4269
4505
  });
4506
+ return sourceObject;
4270
4507
  }
4271
4508
  });
4272
4509
  }
@@ -4346,12 +4583,15 @@ var EntryPushModule = {
4346
4583
  allowEmptySource,
4347
4584
  log: createSyncEngineConsoleLogger({ diffMode }),
4348
4585
  onBeforeCompareObjects: async (sourceObject) => {
4349
- return swapOutUniformFileUrlsForTargetProject(sourceObject, {
4586
+ return replaceLocalUrlsWithRemoteReferences({
4587
+ entity: sourceObject,
4350
4588
  fileClient
4351
4589
  });
4352
4590
  },
4591
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
4353
4592
  onBeforeWriteObject: async (sourceObject) => {
4354
- return extractAndUploadUniformFilesForObject(sourceObject, {
4593
+ return uploadFilesForCompositionOrEntry({
4594
+ entity: sourceObject,
4355
4595
  directory,
4356
4596
  fileClient
4357
4597
  });
@@ -4610,7 +4850,18 @@ var EntryPatternPublishModule = {
4610
4850
  )
4611
4851
  )
4612
4852
  ),
4613
- handler: async ({ apiHost, edgeApiHost, apiKey, proxy, ids, all, whatIf, project: projectId, verbose }) => {
4853
+ handler: async ({
4854
+ apiHost,
4855
+ edgeApiHost,
4856
+ apiKey,
4857
+ proxy,
4858
+ ids,
4859
+ all,
4860
+ whatIf,
4861
+ project: projectId,
4862
+ verbose,
4863
+ directory
4864
+ }) => {
4614
4865
  if (!all && !ids || all && ids) {
4615
4866
  console.error(`Specify --all or entry pattern ID(s) to publish.`);
4616
4867
  process.exit(1);
@@ -4630,13 +4881,28 @@ var EntryPatternPublishModule = {
4630
4881
  entryIDs: entryIDsArray,
4631
4882
  onlyPatterns: true
4632
4883
  });
4884
+ const fileClient = getFileClient({ apiKey, apiHost, fetch: fetch2, projectId });
4633
4885
  await syncEngine({
4634
4886
  source,
4635
4887
  target,
4636
4888
  // Publishing is one-direction operation, so no need to support automatic un-publishing
4637
4889
  mode: "createOrUpdate",
4638
4890
  whatIf,
4639
- log: createPublishStatusSyncEngineConsoleLogger({ status: "publish" })
4891
+ log: createPublishStatusSyncEngineConsoleLogger({ status: "publish" }),
4892
+ onBeforeCompareObjects: async (sourceObject) => {
4893
+ return replaceLocalUrlsWithRemoteReferences({
4894
+ entity: sourceObject,
4895
+ fileClient
4896
+ });
4897
+ },
4898
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
4899
+ onBeforeWriteObject: async (sourceObject) => {
4900
+ return uploadFilesForCompositionOrEntry({
4901
+ entity: sourceObject,
4902
+ directory,
4903
+ fileClient
4904
+ });
4905
+ }
4640
4906
  });
4641
4907
  }
4642
4908
  };
@@ -4727,16 +4993,21 @@ var EntryPatternPullModule = {
4727
4993
  allowEmptySource: allowEmptySource ?? true,
4728
4994
  log: createSyncEngineConsoleLogger({ diffMode }),
4729
4995
  onBeforeCompareObjects: async (sourceObject, targetObject) => {
4730
- return replaceRemoteUrlsWithLocalReferences(sourceObject, targetObject, {
4996
+ return replaceRemoteUrlsWithLocalReferences({
4997
+ sourceEntity: sourceObject,
4998
+ targetEntity: targetObject,
4731
4999
  directory,
4732
5000
  fileClient
4733
5001
  });
4734
5002
  },
5003
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
4735
5004
  onBeforeWriteObject: async (sourceObject) => {
4736
- return extractAndDownloadUniformFilesForObject(sourceObject, {
5005
+ await downloadFilesForCompositionOrEntry({
5006
+ entity: sourceObject.object,
4737
5007
  directory,
4738
5008
  fileClient
4739
5009
  });
5010
+ return sourceObject;
4740
5011
  }
4741
5012
  });
4742
5013
  }
@@ -4821,12 +5092,15 @@ var EntryPatternPushModule = {
4821
5092
  allowEmptySource,
4822
5093
  log: createSyncEngineConsoleLogger({ diffMode }),
4823
5094
  onBeforeCompareObjects: async (sourceObject) => {
4824
- return swapOutUniformFileUrlsForTargetProject(sourceObject, {
5095
+ return replaceLocalUrlsWithRemoteReferences({
5096
+ entity: sourceObject,
4825
5097
  fileClient
4826
5098
  });
4827
5099
  },
5100
+ compareContents: compareCompositionsOrEntriesWithoutAssetUrls,
4828
5101
  onBeforeWriteObject: async (sourceObject) => {
4829
- return extractAndUploadUniformFilesForObject(sourceObject, {
5102
+ return uploadFilesForCompositionOrEntry({
5103
+ entity: sourceObject,
4830
5104
  directory,
4831
5105
  fileClient
4832
5106
  });
@@ -8182,7 +8456,7 @@ import { PostHog } from "posthog-node";
8182
8456
  // package.json
8183
8457
  var package_default = {
8184
8458
  name: "@uniformdev/cli",
8185
- version: "20.1.0",
8459
+ version: "20.2.0",
8186
8460
  description: "Uniform command line interface tool",
8187
8461
  license: "SEE LICENSE IN LICENSE.txt",
8188
8462
  main: "./cli.js",
@@ -8215,6 +8489,7 @@ var package_default = {
8215
8489
  "@uniformdev/files": "workspace:*",
8216
8490
  "@uniformdev/project-map": "workspace:*",
8217
8491
  "@uniformdev/redirect": "workspace:*",
8492
+ "@uniformdev/richtext": "workspace:*",
8218
8493
  "call-bind": "^1.0.2",
8219
8494
  colorette: "2.0.20",
8220
8495
  cosmiconfig: "9.0.0",
@@ -8222,11 +8497,11 @@ var package_default = {
8222
8497
  diff: "^5.0.0",
8223
8498
  dotenv: "^16.0.3",
8224
8499
  execa: "5.1.1",
8225
- "file-type": "^19.6.0",
8500
+ "file-type": "^20.0.0",
8226
8501
  "fs-jetpack": "5.1.0",
8227
8502
  graphql: "16.9.0",
8228
8503
  "graphql-request": "6.1.0",
8229
- "image-size": "^1.0.2",
8504
+ "image-size": "^1.2.0",
8230
8505
  inquirer: "9.2.17",
8231
8506
  "isomorphic-git": "1.25.2",
8232
8507
  "js-yaml": "^4.1.0",
@@ -8572,7 +8847,7 @@ ${err.message}`);
8572
8847
  // src/projects/cloneStarter.ts
8573
8848
  import crypto2 from "crypto";
8574
8849
  import fs3 from "fs";
8575
- import fsj2 from "fs-jetpack";
8850
+ import fsj5 from "fs-jetpack";
8576
8851
  import * as git from "isomorphic-git";
8577
8852
  import * as http from "isomorphic-git/http/node/index.js";
8578
8853
  import os from "os";
@@ -8603,7 +8878,7 @@ async function cloneStarter({
8603
8878
  throw new Error(`"${targetDir}" is not empty`);
8604
8879
  }
8605
8880
  const starterDir = path.join(cloneDir, ...pathSegments);
8606
- fsj2.copy(starterDir, targetDir, { overwrite: true });
8881
+ fsj5.copy(starterDir, targetDir, { overwrite: true });
8607
8882
  if (dotEnvFile) {
8608
8883
  fs3.writeFileSync(path.resolve(targetDir, ".env"), dotEnvFile, "utf-8");
8609
8884
  }
@@ -10448,7 +10723,8 @@ var SyncPushModule = {
10448
10723
  ...otherParams,
10449
10724
  patternType: "component",
10450
10725
  onlyPatterns: true,
10451
- all: true
10726
+ all: true,
10727
+ directory: getPushFilename("componentPattern", config2)
10452
10728
  }),
10453
10729
  {
10454
10730
  text: "publishing component patterns...",
@@ -10471,7 +10747,8 @@ var SyncPushModule = {
10471
10747
  ...otherParams,
10472
10748
  all: true,
10473
10749
  onlyPatterns: true,
10474
- patternType: "composition"
10750
+ patternType: "composition",
10751
+ directory: getPushFilename("compositionPattern", config2)
10475
10752
  }),
10476
10753
  {
10477
10754
  text: "publishing composition patterns...",
@@ -10493,7 +10770,8 @@ var SyncPushModule = {
10493
10770
  CompositionPublishModule.handler({
10494
10771
  ...otherParams,
10495
10772
  all: true,
10496
- onlyCompositions: true
10773
+ onlyCompositions: true,
10774
+ directory: getPushFilename("composition", config2)
10497
10775
  }),
10498
10776
  {
10499
10777
  text: "publishing compositions...",
@@ -10511,30 +10789,44 @@ var SyncPushModule = {
10511
10789
  }
10512
10790
  if (config2.entitiesConfig?.entry && config2.entitiesConfig?.entry?.push?.disabled !== true && config2.entitiesConfig?.entry?.publish) {
10513
10791
  try {
10514
- await spinPromise(EntryPublishModule.handler({ ...otherParams, all: true }), {
10515
- text: "publishing entries...",
10516
- successText: "published entries",
10517
- failText(error) {
10518
- return `publishing entries
10792
+ await spinPromise(
10793
+ EntryPublishModule.handler({
10794
+ ...otherParams,
10795
+ all: true,
10796
+ directory: getPushFilename("entry", config2)
10797
+ }),
10798
+ {
10799
+ text: "publishing entries...",
10800
+ successText: "published entries",
10801
+ failText(error) {
10802
+ return `publishing entries
10519
10803
 
10520
10804
  ${error.stack ?? error.message}`;
10805
+ }
10521
10806
  }
10522
- });
10807
+ );
10523
10808
  } catch {
10524
10809
  process.exit(1);
10525
10810
  }
10526
10811
  }
10527
10812
  if (config2.entitiesConfig?.entryPattern && config2.entitiesConfig?.entryPattern?.push?.disabled !== true && config2.entitiesConfig?.entryPattern?.publish) {
10528
10813
  try {
10529
- await spinPromise(EntryPatternPublishModule.handler({ ...otherParams, all: true }), {
10530
- text: "publishing entry patterns...",
10531
- successText: "published entry patterns",
10532
- failText(error) {
10533
- return `publishing entry patterns
10814
+ await spinPromise(
10815
+ EntryPatternPublishModule.handler({
10816
+ ...otherParams,
10817
+ all: true,
10818
+ directory: getPushFilename("entryPattern", config2)
10819
+ }),
10820
+ {
10821
+ text: "publishing entry patterns...",
10822
+ successText: "published entry patterns",
10823
+ failText(error) {
10824
+ return `publishing entry patterns
10534
10825
 
10535
10826
  ${error.stack ?? error.message}`;
10827
+ }
10536
10828
  }
10537
- });
10829
+ );
10538
10830
  } catch {
10539
10831
  process.exit(1);
10540
10832
  }
@@ -10583,14 +10875,14 @@ import { existsSync as existsSync4, promises as fs5 } from "fs";
10583
10875
  import { get as getHttp } from "http";
10584
10876
  import { get as getHttps } from "https";
10585
10877
  import { tmpdir } from "os";
10586
- import { join as join3 } from "path";
10878
+ import { join as join7 } from "path";
10587
10879
  import registryUrl from "registry-url";
10588
10880
  import { URL as URL2 } from "url";
10589
10881
  var compareVersions = (a, b) => a.localeCompare(b, "en-US", { numeric: true });
10590
10882
  var encode = (value) => encodeURIComponent(value).replace(/^%40/, "@");
10591
10883
  var getFile = async (details, distTag) => {
10592
10884
  const rootDir = tmpdir();
10593
- const subDir = join3(rootDir, "update-check");
10885
+ const subDir = join7(rootDir, "update-check");
10594
10886
  if (!existsSync4(subDir)) {
10595
10887
  await fs5.mkdir(subDir);
10596
10888
  }
@@ -10598,7 +10890,7 @@ var getFile = async (details, distTag) => {
10598
10890
  if (details.scope) {
10599
10891
  name = `${details.scope}-${name}`;
10600
10892
  }
10601
- return join3(subDir, name);
10893
+ return join7(subDir, name);
10602
10894
  };
10603
10895
  var evaluateCache = async (file, time, interval) => {
10604
10896
  if (existsSync4(file)) {
@@ -10753,7 +11045,7 @@ var checkForUpdateMiddleware = async ({ verbose }) => {
10753
11045
 
10754
11046
  // src/middleware/checkLocalDepsVersionsMiddleware.ts
10755
11047
  import { magenta, red as red5 } from "colorette";
10756
- import { join as join4 } from "path";
11048
+ import { join as join8 } from "path";
10757
11049
 
10758
11050
  // src/fs.ts
10759
11051
  import { promises as fs6 } from "fs";
@@ -10792,7 +11084,7 @@ var checkLocalDepsVersions = async (args) => {
10792
11084
  try {
10793
11085
  let isOutside = false;
10794
11086
  let warning = `${magenta("Warning:")} Installed Uniform packages should be the same version`;
10795
- const localPackages = await tryReadJSON(join4(process.cwd(), "package.json"));
11087
+ const localPackages = await tryReadJSON(join8(process.cwd(), "package.json"));
10796
11088
  if (!localPackages) return;
10797
11089
  let firstVersion;
10798
11090
  const allDependencies = {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@uniformdev/cli",
3
- "version": "20.1.0",
3
+ "version": "20.2.0",
4
4
  "description": "Uniform command line interface tool",
5
5
  "license": "SEE LICENSE IN LICENSE.txt",
6
6
  "main": "./cli.js",
@@ -27,12 +27,13 @@
27
27
  },
28
28
  "dependencies": {
29
29
  "@thi.ng/mime": "^2.2.23",
30
- "@uniformdev/assets": "20.1.0",
31
- "@uniformdev/canvas": "20.1.0",
32
- "@uniformdev/context": "20.1.0",
33
- "@uniformdev/files": "20.1.0",
34
- "@uniformdev/project-map": "20.1.0",
35
- "@uniformdev/redirect": "20.1.0",
30
+ "@uniformdev/assets": "20.2.0",
31
+ "@uniformdev/canvas": "20.2.0",
32
+ "@uniformdev/context": "20.2.0",
33
+ "@uniformdev/files": "20.2.0",
34
+ "@uniformdev/project-map": "20.2.0",
35
+ "@uniformdev/redirect": "20.2.0",
36
+ "@uniformdev/richtext": "20.2.0",
36
37
  "call-bind": "^1.0.2",
37
38
  "colorette": "2.0.20",
38
39
  "cosmiconfig": "9.0.0",
@@ -40,11 +41,11 @@
40
41
  "diff": "^5.0.0",
41
42
  "dotenv": "^16.0.3",
42
43
  "execa": "5.1.1",
43
- "file-type": "^19.6.0",
44
+ "file-type": "^20.0.0",
44
45
  "fs-jetpack": "5.1.0",
45
46
  "graphql": "16.9.0",
46
47
  "graphql-request": "6.1.0",
47
- "image-size": "^1.0.2",
48
+ "image-size": "^1.2.0",
48
49
  "inquirer": "9.2.17",
49
50
  "isomorphic-git": "1.25.2",
50
51
  "js-yaml": "^4.1.0",
@@ -78,5 +79,5 @@
78
79
  "publishConfig": {
79
80
  "access": "public"
80
81
  },
81
- "gitHead": "343f488ce579a33e39d95c88bc7184763404ab07"
82
+ "gitHead": "7ca16ddb9f03fffe82a0252d53790fcaa12ebd3c"
82
83
  }