MeUtils 2025.2.5.11.6.16__py3-none-any.whl → 2025.2.6.13.5.49__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {MeUtils-2025.2.5.11.6.16.dist-info → MeUtils-2025.2.6.13.5.49.dist-info}/METADATA +26 -26
- {MeUtils-2025.2.5.11.6.16.dist-info → MeUtils-2025.2.6.13.5.49.dist-info}/RECORD +23 -21
- examples/_openaisdk/openai_baichuan.py +2 -2
- examples/_openaisdk/openai_chatfire.py +21 -13
- examples/_openaisdk/openai_dashscope.py +1 -1
- examples/_openaisdk/openai_luchentech.py +60 -0
- meutils/apis/chatglm/glm_video_api.py +4 -2
- meutils/apis/hailuoai/videos.py +18 -37
- meutils/apis/images/recraft.py +5 -5
- meutils/apis/jimeng/images.py +21 -8
- meutils/apis/search/metaso.py +34 -3
- meutils/data/VERSION +1 -1
- meutils/data/oneapi/NOTICE.md +3 -17
- meutils/llm/clients.py +3 -0
- meutils/llm/completions/agents/search.py +28 -6
- meutils/llm/completions/qwenllm.py +4 -2
- meutils/llm/completions/reasoner.py +87 -0
- meutils/schemas/metaso_types.py +7 -1
- meutils/schemas/oneapi/common.py +10 -4
- {MeUtils-2025.2.5.11.6.16.dist-info → MeUtils-2025.2.6.13.5.49.dist-info}/LICENSE +0 -0
- {MeUtils-2025.2.5.11.6.16.dist-info → MeUtils-2025.2.6.13.5.49.dist-info}/WHEEL +0 -0
- {MeUtils-2025.2.5.11.6.16.dist-info → MeUtils-2025.2.6.13.5.49.dist-info}/entry_points.txt +0 -0
- {MeUtils-2025.2.5.11.6.16.dist-info → MeUtils-2025.2.6.13.5.49.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: MeUtils
|
3
|
-
Version: 2025.2.
|
3
|
+
Version: 2025.2.6.13.5.49
|
4
4
|
Summary: description
|
5
5
|
Home-page: https://github.com/yuanjie-ai/MeUtils
|
6
6
|
Author: yuanjie
|
@@ -64,41 +64,41 @@ Requires-Dist: sse-starlette ; extra == 'ai'
|
|
64
64
|
Requires-Dist: openai ; extra == 'ai'
|
65
65
|
Requires-Dist: langchain ; extra == 'ai'
|
66
66
|
Provides-Extra: all
|
67
|
-
Requires-Dist:
|
67
|
+
Requires-Dist: missingno ; extra == 'all'
|
68
|
+
Requires-Dist: langchain ; extra == 'all'
|
68
69
|
Requires-Dist: sqlalchemy ; extra == 'all'
|
69
|
-
Requires-Dist:
|
70
|
+
Requires-Dist: geopy ; extra == 'all'
|
71
|
+
Requires-Dist: seaborn ; extra == 'all'
|
70
72
|
Requires-Dist: pyarrow ; extra == 'all'
|
71
|
-
Requires-Dist:
|
72
|
-
Requires-Dist:
|
73
|
+
Requires-Dist: streamlit ; extra == 'all'
|
74
|
+
Requires-Dist: simplejson ; extra == 'all'
|
75
|
+
Requires-Dist: iteration-utilities ; extra == 'all'
|
76
|
+
Requires-Dist: fastapi ; extra == 'all'
|
77
|
+
Requires-Dist: thefuck ; extra == 'all'
|
78
|
+
Requires-Dist: pandas-summary ; extra == 'all'
|
79
|
+
Requires-Dist: dataframe-image ; extra == 'all'
|
80
|
+
Requires-Dist: gunicorn ; extra == 'all'
|
81
|
+
Requires-Dist: asyncmy ; extra == 'all'
|
82
|
+
Requires-Dist: pretty-errors ; extra == 'all'
|
73
83
|
Requires-Dist: polars ; extra == 'all'
|
74
|
-
Requires-Dist:
|
84
|
+
Requires-Dist: pandas-profiling[notebook] ; extra == 'all'
|
75
85
|
Requires-Dist: sse-starlette ; extra == 'all'
|
76
|
-
Requires-Dist:
|
77
|
-
Requires-Dist:
|
78
|
-
Requires-Dist: fastapi ; extra == 'all'
|
79
|
-
Requires-Dist: uvicorn ; extra == 'all'
|
80
|
-
Requires-Dist: streamlit ; extra == 'all'
|
81
|
-
Requires-Dist: missingno ; extra == 'all'
|
82
|
-
Requires-Dist: jinja2 ; extra == 'all'
|
86
|
+
Requires-Dist: fastapi[all] ; extra == 'all'
|
87
|
+
Requires-Dist: openai ; extra == 'all'
|
83
88
|
Requires-Dist: pymilvus ; extra == 'all'
|
84
|
-
Requires-Dist:
|
85
|
-
Requires-Dist: geopy ; extra == 'all'
|
89
|
+
Requires-Dist: redis-py-cluster ; extra == 'all'
|
86
90
|
Requires-Dist: schedule ; extra == 'all'
|
87
|
-
Requires-Dist: openai ; extra == 'all'
|
88
|
-
Requires-Dist: pandas-summary ; extra == 'all'
|
89
91
|
Requires-Dist: jieba ; extra == 'all'
|
90
|
-
Requires-Dist:
|
91
|
-
Requires-Dist: reportlab ; extra == 'all'
|
92
|
-
Requires-Dist: simplejson ; extra == 'all'
|
93
|
-
Requires-Dist: langchain ; extra == 'all'
|
94
|
-
Requires-Dist: redis-py-cluster ; extra == 'all'
|
95
|
-
Requires-Dist: pandas-profiling[notebook] ; extra == 'all'
|
96
|
-
Requires-Dist: pretty-errors ; extra == 'all'
|
92
|
+
Requires-Dist: uvicorn ; extra == 'all'
|
97
93
|
Requires-Dist: pymupd ; extra == 'all'
|
98
|
-
Requires-Dist:
|
99
|
-
Requires-Dist: cachetools ; extra == 'all'
|
94
|
+
Requires-Dist: faiss-cpu ; extra == 'all'
|
100
95
|
Requires-Dist: filetype ; extra == 'all'
|
96
|
+
Requires-Dist: reportlab ; extra == 'all'
|
97
|
+
Requires-Dist: cachetools ; extra == 'all'
|
98
|
+
Requires-Dist: jinja2 ; extra == 'all'
|
101
99
|
Requires-Dist: pymongo ; extra == 'all'
|
100
|
+
Requires-Dist: thriftpy2 ; extra == 'all'
|
101
|
+
Requires-Dist: pymysql ; extra == 'all'
|
102
102
|
Provides-Extra: ann
|
103
103
|
Requires-Dist: pymilvus ; extra == 'ann'
|
104
104
|
Requires-Dist: faiss-cpu ; extra == 'ann'
|
@@ -91,12 +91,12 @@ examples/_openaisdk/minimax.py,sha256=zOshBkL1bHgRlj4z6k2FQRScNTr7ysZkfh2-OaAynk
|
|
91
91
|
examples/_openaisdk/open_router.py,sha256=Z4i_FJIc773JnIdYA3Hhuy1Dq5-DIOIrmkWwbp82hJE,1460
|
92
92
|
examples/_openaisdk/openai_aiplus.py,sha256=7sT2t4jjZBABFiwKQwUjgdilO8s006NuPAPx7neHER8,1182
|
93
93
|
examples/_openaisdk/openai_audio.py,sha256=ZyYslH9RfdJBpNUpjKn_5x0d94_H291Qt31u_6weISc,484
|
94
|
-
examples/_openaisdk/openai_baichuan.py,sha256
|
94
|
+
examples/_openaisdk/openai_baichuan.py,sha256=-xhHovkFmWOTX25CJjIon0my-ocmwfAZJu1C0eS80Xs,1614
|
95
95
|
examples/_openaisdk/openai_baidu.py,sha256=qnrxP66lY7Qf2jEbFEHrt0FrR5_Aji1AgwSI_46IabY,1969
|
96
96
|
examples/_openaisdk/openai_cache.py,sha256=89GbJ1QlqQbVxlUeuFgEbwDYHOkum7BK0zB-CqmDAZU,726
|
97
|
-
examples/_openaisdk/openai_chatfire.py,sha256=
|
97
|
+
examples/_openaisdk/openai_chatfire.py,sha256=kklrM7T8P5P5_mfCBPrXRq9FlBMKPMovGrI1mHZcKSQ,18446
|
98
98
|
examples/_openaisdk/openai_chatfire_all.py,sha256=DaiI1NfDeK655QvXyczkgXK9Awxp7NBe71LHA7trxGE,4831
|
99
|
-
examples/_openaisdk/openai_dashscope.py,sha256=
|
99
|
+
examples/_openaisdk/openai_dashscope.py,sha256=m9SjQpaNgYwr4Qd3XusWbJBGp5ZTtpuB40QopejxPKQ,1029
|
100
100
|
examples/_openaisdk/openai_deepinfra.py,sha256=innI4BT15dZEqheqDHG4lHDczY1QC7t2Whcavi1pcz0,1195
|
101
101
|
examples/_openaisdk/openai_deepseek.py,sha256=0IbHh2F6eVDs-i8gP8x8wj6s9R6CzOGN7QrCp1EuB5k,2086
|
102
102
|
examples/_openaisdk/openai_doubao.py,sha256=IKM929LKgYm2fXbCIvuX4ZPpUkm2igcdhJujV49XCaQ,1341
|
@@ -109,6 +109,7 @@ examples/_openaisdk/openai_groq.py,sha256=_5Ue1yEx0Gp6theFioB_D8kepR0UlqMU96PVGH
|
|
109
109
|
examples/_openaisdk/openai_images.py,sha256=mJQGbue_ka9kjsNx1TRfIRab9HgC0h5i0O_6U7kxb_c,5676
|
110
110
|
examples/_openaisdk/openai_json.py,sha256=M4tq_IkzYbmtgbFilGQHmRdJhH0OltiwGk7cK7uc-sQ,1850
|
111
111
|
examples/_openaisdk/openai_lingyi.py,sha256=l2GcNxhwfLv7MfMF-cdRWGGvX-dRaZyb8IRlXLPACMw,1834
|
112
|
+
examples/_openaisdk/openai_luchentech.py,sha256=Jy0JxpL1u3pXcmslViPvrbwSysVBV-Dz8wSKqv6QtxY,1960
|
112
113
|
examples/_openaisdk/openai_modelscope.py,sha256=eb-oj7O6VQiCBbcATB3iUS0iRRMJcUDqRFt51-3VJU0,1444
|
113
114
|
examples/_openaisdk/openai_moderations.py,sha256=WoIyXDEn4dfVPwqFc54HSz0-7wLPu7pd4ZisZcyLOWM,1369
|
114
115
|
examples/_openaisdk/openai_moon.py,sha256=Y7RrB6SUi9taQOlA50nfGTpQa9ON6XmPmrxtL-hSL8M,1753
|
@@ -336,7 +337,7 @@ meutils/apis/baidu/bdaitpzs.py,sha256=50pe09ECSKRJnXhJVMZlwHhGGc4r8yQrVWjq6d6OK0
|
|
336
337
|
meutils/apis/baidu/test.py,sha256=nb9ivelHIw84-d1V-MNGRpGoTpLFsNXPk-TTKpjwOfQ,2058
|
337
338
|
meutils/apis/chatglm/__init__.py,sha256=uFuc9qY-ZjGqbsfcdGZkcEkhjATojWpzk2WXTTujc70,271
|
338
339
|
meutils/apis/chatglm/glm_video.py,sha256=9LA9pyeIcDDHXGhJbZSJfjxrLeNC3SIuCKJwDIXRKR8,14015
|
339
|
-
meutils/apis/chatglm/glm_video_api.py,sha256=
|
340
|
+
meutils/apis/chatglm/glm_video_api.py,sha256=q2BMG17BPc3R4iBxbZFja7Nq8XuGvmeO8G343KsUA6M,3898
|
340
341
|
meutils/apis/chatglm/images.py,sha256=k2EtOpksgAfJb2MD6kLOKKem713kamTIinRmgr8_3n0,1974
|
341
342
|
meutils/apis/chatglm/temp.py,sha256=zYe6JGUHVfb77T_TdNsdmpTs2D9OyqDTUEkblDAIoYY,17811
|
342
343
|
meutils/apis/chatglm/x.py,sha256=lI86Cd6SN4noCN5HM9gRugcVmjHDtxDTV4m-7Bj61N0,24312
|
@@ -354,7 +355,7 @@ meutils/apis/hailuoai/demo.py,sha256=WTnDFL2HgVpAjcn5BljLrbf2y2aATkvvNy75zoEIQkc
|
|
354
355
|
meutils/apis/hailuoai/hasha_new.py,sha256=1sD3fxmxYrEufmPaJZGAkYJoHKO85hfLXemnY-xYIO8,9101
|
355
356
|
meutils/apis/hailuoai/music.py,sha256=S_oi-ZDbocTfbRgREdXtGx50xzUc1PCZGuxmr917E7M,265
|
356
357
|
meutils/apis/hailuoai/upload.py,sha256=22S6W6hGQhUquVKqh7QCkXb6KHDl3iMKKi6Z0AJdFEI,6501
|
357
|
-
meutils/apis/hailuoai/videos.py,sha256=
|
358
|
+
meutils/apis/hailuoai/videos.py,sha256=SAdEfSl-1a4IXor1FnZ8gf-t11kE5UGRK8FK0kdXPcg,20874
|
358
359
|
meutils/apis/hailuoai/yy.py,sha256=gJvr7VDa0YPEUkZuETdmCFWWheylDPoMUg8gbS_SVw8,8937
|
359
360
|
meutils/apis/hf/__init__.py,sha256=ZZoRl8yNYekoMhE__67XXBKNSYJaLgjtaa0netjJKUU,271
|
360
361
|
meutils/apis/hf/got_ocr.py,sha256=pZURFpryDh_Ak6PNiARztOzYgkjVrfScJ5-0XzMBrzI,1776
|
@@ -372,7 +373,7 @@ meutils/apis/images/deepinfra.py,sha256=lMqfU0LIXEzHiAS31dCcoTnPIE6tpoDJhq61H0TV
|
|
372
373
|
meutils/apis/images/demo.py,sha256=Cjk-EKMKbp44czykW_44qR0TzElX8nNCALqXBBw5sJA,4405
|
373
374
|
meutils/apis/images/edits.py,sha256=47s_oggNT3Vp1TYr4hwuGE08hOkNn47Nmv9wyDsMujQ,9727
|
374
375
|
meutils/apis/images/eidt.py,sha256=42dCdGPSLvr0zh8OZtwQxi-5B6Ln_o_FsL1gt2Ura_k,750
|
375
|
-
meutils/apis/images/recraft.py,sha256=
|
376
|
+
meutils/apis/images/recraft.py,sha256=ZdlDPFTmQBma9ToeBN44mNPISOP0-hXsmSgM6DyP1Ko,59625
|
376
377
|
meutils/apis/images/flux/__init__.py,sha256=W_PZabQm6EmtJYmzEiEpL_WCXTAW0AiJ-t_n5UTrE8c,271
|
377
378
|
meutils/apis/images/flux/fluxpro.py,sha256=NFl9t9C3R3jsNJpez4o9tOz3r-fjRXj-vKMxCSTRDTU,4022
|
378
379
|
meutils/apis/images/flux/mystic.py,sha256=cUkqaMAbd3dSBCvFEoNBBoEoWCGzvBJWPEkEq1Q8xzw,3616
|
@@ -390,7 +391,7 @@ meutils/apis/jimeng/common.py,sha256=o6qMyic9jbgD_tmOcGDPn9tcOctMyLP9Y3XW5rIwsZs
|
|
390
391
|
meutils/apis/jimeng/doubao.py,sha256=5OdnBpmZQOCLIb6lnN8YrL7J7E0XF47q0UF-z6asSuI,4295
|
391
392
|
meutils/apis/jimeng/doubao_utils.py,sha256=XCHnvwfCOtJmPQuzDOCfKDMP_Pcqh-Y4w2h5f7Y2gzM,5999
|
392
393
|
meutils/apis/jimeng/files.py,sha256=afwWvN6gSRjBGVMjV3CMpMwnuJ1cAVYBiFc6wamc570,9547
|
393
|
-
meutils/apis/jimeng/images.py,sha256=
|
394
|
+
meutils/apis/jimeng/images.py,sha256=5y0wdiLxAAeC-1E2-REiVQrmqHRq_6de4XhzvleNIwY,11448
|
394
395
|
meutils/apis/jimeng/videos.py,sha256=x-PDOYNBN-yTmNvMbyDbwm_E2LXZ6A4hcVzEahV_Hvc,10813
|
395
396
|
meutils/apis/kling/__init__.py,sha256=pKz40SEjui1dhHBD-icyNTJOYhzLvxLxJCwszuH4vio,271
|
396
397
|
meutils/apis/kling/api.py,sha256=AuVQ9URmrbTlz7dEv_kIyhA9fMTZPL9rF3Td6KxTIMA,2183
|
@@ -432,7 +433,7 @@ meutils/apis/runwayml/__init__.py,sha256=90oL_3o8YkFPYm4R4PCxd3m9hgnHpOCLr0u-X4B
|
|
432
433
|
meutils/apis/runwayml/gen.py,sha256=JrfegC4pI2r7DprDLGMi5px0K1C1pSgopnz0lcJU5RY,5194
|
433
434
|
meutils/apis/search/__init__.py,sha256=TWEHzb_O2hKDpjfThEWOz1EI1bJeiN20IAvC8wYONQ4,271
|
434
435
|
meutils/apis/search/baichuan.py,sha256=OUD0TH7KzqSxtuRJgjJPWO8XRQIoUwW5RX7lWMo7BXw,268
|
435
|
-
meutils/apis/search/metaso.py,sha256=
|
436
|
+
meutils/apis/search/metaso.py,sha256=V7gOUM9_8aNwxVaPhM0GSTFHRMc853nZRdVN_26W40A,9871
|
436
437
|
meutils/apis/search/metaso_.py,sha256=PqMX3FLYbbAcc9qpqrPZ58LuSF29h0asZO1XMYmXbes,3144
|
437
438
|
meutils/apis/search/n.py,sha256=o7jzNoR5v_nAjgdUvWgebL3nvvo3ECpq2KCj9YhUNr4,4337
|
438
439
|
meutils/apis/search/searxng.py,sha256=RBPeq-AYj5D42gROrZNg0SxIWwWEW0oqrbQ3wEDH9k8,951
|
@@ -517,14 +518,14 @@ meutils/config_utils/lark_utils/demo.py,sha256=3g0Fs7oLaeW75T60gYWMLgyNg1OnfOjfH
|
|
517
518
|
meutils/config_utils/lark_utils/x.py,sha256=MlMQGhehP9xMEgetxVCX68XFaosfKoW1JA5cZ3JqN2w,1857
|
518
519
|
meutils/crawlers/__init__.py,sha256=TBU4xA-IOsHV-0yIkW7YXxn_QT7TT8NncqxO7IykEfs,271
|
519
520
|
meutils/data/SimHei.ttf,sha256=-XEnekS5yHP_URkT4XBI2w22ylV-KxudhkeIYFbrILA,10062565
|
520
|
-
meutils/data/VERSION,sha256=
|
521
|
+
meutils/data/VERSION,sha256=YM436darEUmGypGxfGO1gsOHVQxlkssqwI5LWx3MLzY,19
|
521
522
|
meutils/data/_FLAG,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
522
523
|
meutils/data/_SUCCESS,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
523
524
|
meutils/data/__init__.py,sha256=Hfwkkxs4zHqKhxht0YrhS566a9P5axtmgNvM5wF8ceQ,243
|
524
525
|
meutils/data/coordinate.py,sha256=VbFtcdho0XG4j-rZD0DgqcIf2_LR2LlbLtfhLXbxiDY,600
|
525
526
|
meutils/data/cowboy-hat-face.webp,sha256=AHi2t-WclDVKWgGEe-oZzI4xKACNlgGWUgd622y0K-E,24814
|
526
527
|
meutils/data/oneapi/FOOTER.md,sha256=UHv39ey2ICocxPoCj-UI_PWyyWxZYLS8zG1inqtWNQ4,448
|
527
|
-
meutils/data/oneapi/NOTICE.md,sha256=
|
528
|
+
meutils/data/oneapi/NOTICE.md,sha256=W0yobskQQ42MPu4dQdsfbwHQ5MHivYCymWZsGPryLJc,6345
|
528
529
|
meutils/data/oneapi/__init__.py,sha256=cm2Kq2RDXPfdVMWcoBB77bu00zJpTDd21Rgc6bTlhws,398
|
529
530
|
meutils/db/__init__.py,sha256=HR99rqxoFTWXUHDhLVl33tzIn8M86gigHLmEzOKH1Ec,7637
|
530
531
|
meutils/db/mongo.py,sha256=B-B_t939rxtnNGt0PxhivjIUwinmHPzLUpXsjUIaTkQ,2337
|
@@ -570,7 +571,7 @@ meutils/jwt_utils/common.py,sha256=MlWDLtxiF-WeRl3oyK94KIa6BKFIXVeJtaQUnpa9ayo,8
|
|
570
571
|
meutils/llm/__init__.py,sha256=mYv0QpACLd-wz4htIduPuTU3mX06_Y8ZprxDohtZGBM,244
|
571
572
|
meutils/llm/check_api.py,sha256=sgxfMWM0fe3nNpv_lLR7kFgsWaowcm1cQlsbp7qZSKM,3567
|
572
573
|
meutils/llm/check_utils.py,sha256=o5SAsjIuTK_p8ARZgjeyJiapVz3Ox_A6UdNbVACmpGw,3355
|
573
|
-
meutils/llm/clients.py,sha256=
|
574
|
+
meutils/llm/clients.py,sha256=lvKq4PbELbSBzkJSkomxvjpvIn0x9DEjA3ewewpB_uk,1182
|
574
575
|
meutils/llm/demo.py,sha256=gyGjTxuNFMOa1X4CXaChkq0lcjbKiRsnSRhkeMvsoQk,3822
|
575
576
|
meutils/llm/mappers.py,sha256=ZQLl3OuD06kVMP8bwIpVgAB1UVzMJWUJiZ3vgk-ZjeI,303
|
576
577
|
meutils/llm/utils.py,sha256=C1FSpJ4lIuKdaogcDDNZrznwtKgHhDEK4glcO4wT3mA,4278
|
@@ -582,8 +583,9 @@ meutils/llm/completions/dify.py,sha256=dqL_8uxAp2d8M00pkyh2I30tflwi4pUyX-V6pwe3_
|
|
582
583
|
meutils/llm/completions/kimi.py,sha256=TSkLObebIAb_oivOMq3aKx9qoXcMy611_g2yIKex9eo,1541
|
583
584
|
meutils/llm/completions/modelscope.py,sha256=umPlJuHSL2RR0oNdTIAqts8iqEmGp_MR6Vh49g-UZuc,271
|
584
585
|
meutils/llm/completions/oi.py,sha256=DmvzxJfdLA_I3MWcULhFtiUdo90j7V2dLvYvmZ-Tm9U,293
|
585
|
-
meutils/llm/completions/qwenllm.py,sha256=
|
586
|
+
meutils/llm/completions/qwenllm.py,sha256=d3E51-iKSCuL2HrVO4aUmGN_fa5Jwz5K1ENbK7MtXEE,2043
|
586
587
|
meutils/llm/completions/rag.py,sha256=cpQ1aZX2QI1QDfFpODTxWHPYglK6_3qrlUM_KHLBNhc,1112
|
588
|
+
meutils/llm/completions/reasoner.py,sha256=KthHuz_8OJu1GP2BX7Vw6x4lfmAFI4mNQeQ1feSo6U8,2523
|
587
589
|
meutils/llm/completions/tryblend.py,sha256=NfVRktn1QiE2k60PzEI-LmKuxZCc7mtL_KlsrVcZg4k,8957
|
588
590
|
meutils/llm/completions/tune.py,sha256=FypfUgsB34t-E9xI6WydJYEk_-phscrLOX1iUCeKb_E,10138
|
589
591
|
meutils/llm/completions/x.py,sha256=XE8aCyuhkpYq0nho1mq9lAdRU1yOPc8BpvCpNYYMYqM,630
|
@@ -591,7 +593,7 @@ meutils/llm/completions/xx.py,sha256=pPfVRczvXZFUvAUOAFr0MMHMb5kVPNfKVhOLKugq52M
|
|
591
593
|
meutils/llm/completions/yuanbao.py,sha256=aN3MwR6yORFv5pGA1ODiLqlNKCfAKQslIfpruAD6m08,5468
|
592
594
|
meutils/llm/completions/agents/__init__.py,sha256=Wklnf7FTHm43hyVwfT8FXX44nqdOTykPb_HORIqDuuw,270
|
593
595
|
meutils/llm/completions/agents/file.py,sha256=4SGCkHSBEdAB6NbxPaViXYUJq5giGtEF9FEgq2WxyBY,4934
|
594
|
-
meutils/llm/completions/agents/search.py,sha256=
|
596
|
+
meutils/llm/completions/agents/search.py,sha256=MMvbI7qZjxWl5YxVP7r5asxZMtfnCXrEoygjZKBv2Zk,4529
|
595
597
|
meutils/llm/completions/rag/__init__.py,sha256=VH4g9H0pqMM1Rkjc1xefQb4Uh8f9vlTt6tjdD6G6Wc0,272
|
596
598
|
meutils/llm/completions/rag/fire.py,sha256=0a_POkY9mEy1YwZHXOGnqf9EVyjiJwx3fWhmv_bQ54U,5743
|
597
599
|
meutils/llm/completions/rag/qwen.py,sha256=1dBNLLbQDRsM-7EGaStcWFU8HRa-rp3RxJ9cpxu6TBg,265
|
@@ -716,7 +718,7 @@ meutils/schemas/kuaishou_types.py,sha256=-_-0Fkab5jyJFb_2Y4ugi92JSIDlvWL_e8mH-wM
|
|
716
718
|
meutils/schemas/luma_types.py,sha256=s9RZnZUpdUg8-13_WOWm3bE_e246olplFFjP8ygiDuw,1900
|
717
719
|
meutils/schemas/message_types.py,sha256=AJc9ZT_weaTjfjmwF5CDroV0WJEJKnA3u4bnbHhBamo,6176
|
718
720
|
meutils/schemas/message_types_.py,sha256=oL7uOsewaziSryZywfap09M5Wh9w28RJDuadFuAfCSI,6297
|
719
|
-
meutils/schemas/metaso_types.py,sha256=
|
721
|
+
meutils/schemas/metaso_types.py,sha256=HRZarMqNvp_reOOn5EyIyCHY4xDVDSbwze9ekspfVFo,1881
|
720
722
|
meutils/schemas/napkin_types.py,sha256=Kp0MbpRxzMBz4HWW4DrfcgOvHZVEF3N7P5yNeC1XGI4,2664
|
721
723
|
meutils/schemas/ocr_types.py,sha256=fZWYQAKii7HaTlSU9DS0wHyiEoOHuLnhvgoiH4Cn498,1381
|
722
724
|
meutils/schemas/openai_api_protocol.py,sha256=0GhmhUTEr54N_XCVuZI1MSxJldvwMNJjUAAAdWznw58,12716
|
@@ -742,7 +744,7 @@ meutils/schemas/db/__init__.py,sha256=m1maURVoM6dIW0yt6ELZrZTzULtkHybVOSXtHNJRVI
|
|
742
744
|
meutils/schemas/db/oneapi_types.py,sha256=YanT0q9pU7dva2ZBPWjCpwGNUuifnJh0zUJJCQ9070c,3848
|
743
745
|
meutils/schemas/oneapi/__init__.py,sha256=uevbi3QAvFzN9WPbx9bYKTDyKt7P2ueZO6W0nSiD0sk,289
|
744
746
|
meutils/schemas/oneapi/_types.py,sha256=ClvAaNy3SahEN8lL8KEErHTD6HANelXUeKc_3iLfosQ,1488
|
745
|
-
meutils/schemas/oneapi/common.py,sha256=
|
747
|
+
meutils/schemas/oneapi/common.py,sha256=S2TxKfewPOThEA-mNVo_94rDtSNiLqH0jrQEU60os-U,27636
|
746
748
|
meutils/schemas/oneapi/icons.py,sha256=T7W5gInBJoHe62wzMimbG_UI-wn3_-rmQ1O4O2z-CQY,1089
|
747
749
|
meutils/schemas/oneapi/model_group_info.py,sha256=rGtflYJuFIjk5MsVEvK9JUR4IciX8jfErqeLqf8DIlQ,1586
|
748
750
|
meutils/schemas/oneapi/model_info.py,sha256=_uwKEPIIqm7ZYfhmpxtXB2QNsS83SpJY-OaBzvogC9w,772
|
@@ -870,9 +872,9 @@ meutils/tools/seize.py,sha256=nOKAS63w-Lbi48I0m2MPhdsokUTwxco0laPxYVmW4Mw,1064
|
|
870
872
|
meutils/tools/service_monitor.py,sha256=ibsLtBN2g2DL7ZnLJ8vhiZOiOcqTAyx711djDdBK-3M,1255
|
871
873
|
meutils/tools/sys_monitor.py,sha256=6MoyzrItqDUOSjfHcMJmMofQkEPTW36CT_aKui0rg84,429
|
872
874
|
meutils/tools/token_monitor.py,sha256=Np-YK-R4P4IPAXyZvMxwvXI4sFmNJQAQK1lSegNaYpA,997
|
873
|
-
MeUtils-2025.2.
|
874
|
-
MeUtils-2025.2.
|
875
|
-
MeUtils-2025.2.
|
876
|
-
MeUtils-2025.2.
|
877
|
-
MeUtils-2025.2.
|
878
|
-
MeUtils-2025.2.
|
875
|
+
MeUtils-2025.2.6.13.5.49.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
876
|
+
MeUtils-2025.2.6.13.5.49.dist-info/METADATA,sha256=vvI3RHYaD6GpOX5fb1Tzz8_U1ZiRChhSPNeBTfP17Z0,6141
|
877
|
+
MeUtils-2025.2.6.13.5.49.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
|
878
|
+
MeUtils-2025.2.6.13.5.49.dist-info/entry_points.txt,sha256=lufZlBHRqqZKdY-ZQJ4CSZb0qhV5hQC37egZna9M7ug,357
|
879
|
+
MeUtils-2025.2.6.13.5.49.dist-info/top_level.txt,sha256=cInfxMmkgNOskurdjwP5unau4rA7Uw48nu07tYhS7KY,22
|
880
|
+
MeUtils-2025.2.6.13.5.49.dist-info/RECORD,,
|
@@ -28,9 +28,9 @@ for model in BAICHUAN:
|
|
28
28
|
model=model,
|
29
29
|
messages=[
|
30
30
|
# {"role": "system", "content": "你是个画图工具"},
|
31
|
-
{"role": "user", "content": "你是谁"},
|
31
|
+
# {"role": "user", "content": "你是谁"},
|
32
32
|
|
33
|
-
|
33
|
+
{"role": "user", "content": "今天南京天气如何"}
|
34
34
|
|
35
35
|
],
|
36
36
|
# top_p=0.7,
|
@@ -22,20 +22,20 @@ from meutils.llm.openai_utils import to_openai_completion_params
|
|
22
22
|
def check_models(models, api_key=os.getenv("OPENAI_API_KEY_GUOCHAN")):
|
23
23
|
messages = [
|
24
24
|
|
25
|
-
{
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
},
|
34
|
-
{"role": "user", "content": "1+1"},
|
35
|
-
{"role": "assistant", "content": "好的"},
|
25
|
+
# {
|
26
|
+
# "role": "system",
|
27
|
+
# "content": [
|
28
|
+
# {
|
29
|
+
# "type": "text",
|
30
|
+
# "text": "你是AI"
|
31
|
+
# }
|
32
|
+
# ]
|
33
|
+
# },
|
34
|
+
# {"role": "user", "content": "1+1"},
|
35
|
+
# {"role": "assistant", "content": "好的"},
|
36
36
|
|
37
37
|
# {"role": "system", "content": "你是个机器人"},
|
38
|
-
{"role": "user", "content": "
|
38
|
+
{"role": "user", "content": "9.8 9.11哪个大"},
|
39
39
|
]
|
40
40
|
|
41
41
|
for model in models:
|
@@ -225,7 +225,13 @@ if __name__ == '__main__':
|
|
225
225
|
# "deepseek-r1:1.5b"
|
226
226
|
# "deepseek-reasoner",
|
227
227
|
# "deepseek-r1:1.5b"
|
228
|
-
"deepseek-v3-128k"
|
228
|
+
# "deepseek-v3-128k",
|
229
|
+
# "qwen2.5-32b-instruct"
|
230
|
+
"deepseek-r1:1.5b"
|
231
|
+
# "deepseek-r1:1.5b"
|
232
|
+
|
233
|
+
# "deepseek-r1",
|
234
|
+
# "deepseek-v3"
|
229
235
|
|
230
236
|
]
|
231
237
|
check_models(models, api_key=os.getenv("OPENAI_API_KEY"))
|
@@ -242,3 +248,5 @@ if __name__ == '__main__':
|
|
242
248
|
#
|
243
249
|
# for i in range(5):
|
244
250
|
# check_models(models, api_key=os.getenv("OPENAI_API_KEY_GUOCHAN") + "-302")
|
251
|
+
|
252
|
+
# check_models(models, api_key=os.getenv("OPENAI_API_KEY_GUOCHAN") + "-725")
|
@@ -0,0 +1,60 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
# @Project : AI. @by PyCharm
|
4
|
+
# @File : openai_siliconflow
|
5
|
+
# @Time : 2024/6/26 10:42
|
6
|
+
# @Author : betterme
|
7
|
+
# @WeChat : meutils
|
8
|
+
# @Software : PyCharm
|
9
|
+
# @Description : https://cloud.luchentech.com/maas/modelMarket/123e4567-e89b-12d3-a456-426614174000
|
10
|
+
import os
|
11
|
+
|
12
|
+
from meutils.pipe import *
|
13
|
+
from openai import OpenAI
|
14
|
+
from openai import OpenAI, APIStatusError
|
15
|
+
from fake_useragent import UserAgent
|
16
|
+
|
17
|
+
ua = UserAgent()
|
18
|
+
client = OpenAI(
|
19
|
+
# base_url="https://free.chatfire.cn/v1",
|
20
|
+
api_key="7b1c32c9-5bd9-4958-a8e4-4be68056038c",
|
21
|
+
base_url="https://cloud.luchentech.com/api/maas",
|
22
|
+
default_headers={'User-Agent': ua.random}
|
23
|
+
)
|
24
|
+
|
25
|
+
message = """
|
26
|
+
A Chinese beauty plays Catwoman. She is seductive. She wears a fitted black leather tights, decorated with neon blue lines flowing along the seams, pulsating like an electric current. There are many hollow designs for tights, showing big breasts, nipples and female genitals. She wears a pair of black cat ear headdresses. Her hands are covered with black leather gloves extending to her elbows, with retractable silver claws on her fingertips. She stood around the roof surrounded by towering skyscrapers, and countless neon signs flashed with various colors.
|
27
|
+
"""
|
28
|
+
|
29
|
+
try:
|
30
|
+
completion = client.chat.completions.create(
|
31
|
+
model="deepseek_r1",
|
32
|
+
# model="xxxxxxxxxxxxx",
|
33
|
+
messages=[
|
34
|
+
|
35
|
+
{"role": "user", "content": "详细介绍下你是谁"}
|
36
|
+
],
|
37
|
+
# top_p=0.7,
|
38
|
+
stream=False,
|
39
|
+
max_tokens=1000
|
40
|
+
)
|
41
|
+
except APIStatusError as e:
|
42
|
+
print(e.status_code)
|
43
|
+
|
44
|
+
print(e.response)
|
45
|
+
print(e.message)
|
46
|
+
print(e.code)
|
47
|
+
|
48
|
+
for chunk in completion:
|
49
|
+
# print(bjson(chunk))
|
50
|
+
print(chunk.choices[0].delta.content, flush=True)
|
51
|
+
|
52
|
+
# r = client.images.generate(
|
53
|
+
# model="cogview-3-plus",
|
54
|
+
# prompt="a white siamese cat",
|
55
|
+
# size="1024x1024",
|
56
|
+
# quality="hd",
|
57
|
+
# n=1,
|
58
|
+
# )
|
59
|
+
|
60
|
+
|
@@ -103,10 +103,12 @@ async def generate(request: ImageRequest, n: int = 30): # 兼容dalle3
|
|
103
103
|
# VideoResult
|
104
104
|
if __name__ == '__main__':
|
105
105
|
api_key = "e21bd630f681c4d90b390cd609720483.WSFVgA3Kk1wNCX0mN"
|
106
|
-
api_key = "7d10426c06afa81e8d7401d97781249c.DbqlSsicRtaUdKXI" # 新号
|
106
|
+
# api_key = "7d10426c06afa81e8d7401d97781249c.DbqlSsicRtaUdKXI" # 新号
|
107
107
|
|
108
108
|
request = VideoRequest(
|
109
|
-
model='cogvideox-flash'
|
109
|
+
# model='cogvideox-flash',
|
110
|
+
model='cogvideox'
|
111
|
+
|
110
112
|
)
|
111
113
|
# r = arun(create_task(request, api_key=api_key))
|
112
114
|
pass
|
meutils/apis/hailuoai/videos.py
CHANGED
@@ -19,7 +19,8 @@ from meutils.pipe import *
|
|
19
19
|
from meutils.hash_utils import md5
|
20
20
|
from meutils.io.files_utils import to_bytes
|
21
21
|
from meutils.schemas.hailuo_types import BASE_URL, FEISHU_URL, FEISHU_URL_OSS
|
22
|
-
from meutils.schemas.hailuo_types import BASE_URL_ABROAD, FEISHU_URL_ABROAD
|
22
|
+
from meutils.schemas.hailuo_types import BASE_URL_ABROAD as BASE_URL, FEISHU_URL_ABROAD as FEISHU_URL
|
23
|
+
|
23
24
|
from meutils.schemas.hailuo_types import VideoRequest, VideoResponse
|
24
25
|
from meutils.llm.check_utils import check_tokens
|
25
26
|
from meutils.config_utils.lark_utils import get_next_token_for_polling
|
@@ -48,12 +49,8 @@ MODEL_MAPPING = {
|
|
48
49
|
# minimax_video-01,minimax_video-01-live2d,,minimax_t2v-01,minimax_i2v-01,minimax_i2v-01-live,minimax_s2v-01
|
49
50
|
|
50
51
|
|
51
|
-
async def upload(file: bytes, token:
|
52
|
-
|
53
|
-
if vip:
|
54
|
-
BASE_URL, FEISHU_URL = BASE_URL_ABROAD, FEISHU_URL_ABROAD
|
55
|
-
|
56
|
-
data = await get_request_policy(token, vip)
|
52
|
+
async def upload(file: bytes, token: str):
|
53
|
+
data = await get_request_policy(token)
|
57
54
|
|
58
55
|
access_key_id = data["accessKeyId"]
|
59
56
|
access_key_secret = data["accessKeySecret"]
|
@@ -116,13 +113,8 @@ async def upload(file: bytes, token: Optional[str] = None, vip: Optional[bool] =
|
|
116
113
|
|
117
114
|
@alru_cache(ttl=1 * 24 * 60 * 60)
|
118
115
|
@retrying()
|
119
|
-
async def get_access_token(token:
|
120
|
-
|
121
|
-
if vip:
|
122
|
-
BASE_URL, FEISHU_URL = BASE_URL_ABROAD, FEISHU_URL_ABROAD
|
123
|
-
|
124
|
-
token = token or await get_next_token_for_polling(FEISHU_URL, from_redis=True)
|
125
|
-
# if vip: return token # 关闭自动刷新
|
116
|
+
async def get_access_token(token: str):
|
117
|
+
logger.debug(f"get_access_token:{token}")
|
126
118
|
|
127
119
|
params = {
|
128
120
|
'device_platform': 'web',
|
@@ -154,7 +146,7 @@ async def get_access_token(token: Optional[str] = None, vip: Optional[bool] = Tr
|
|
154
146
|
|
155
147
|
|
156
148
|
@alru_cache(ttl=1 * 60)
|
157
|
-
async def get_request_policy(token
|
149
|
+
async def get_request_policy(token):
|
158
150
|
"""
|
159
151
|
{
|
160
152
|
"data": {
|
@@ -169,11 +161,7 @@ async def get_request_policy(token, vip: Optional[bool] = True):
|
|
169
161
|
}
|
170
162
|
}
|
171
163
|
"""
|
172
|
-
token = await get_access_token(token
|
173
|
-
|
174
|
-
global BASE_URL, FEISHU_URL
|
175
|
-
if vip:
|
176
|
-
BASE_URL, FEISHU_URL = BASE_URL_ABROAD, FEISHU_URL_ABROAD
|
164
|
+
token = await get_access_token(token)
|
177
165
|
|
178
166
|
headers = {
|
179
167
|
'token': token,
|
@@ -196,14 +184,11 @@ async def get_request_policy(token, vip: Optional[bool] = True):
|
|
196
184
|
|
197
185
|
|
198
186
|
@retrying(predicate=lambda r: r.base_resp.status_code in {1000061, 1500009}) # 限流
|
199
|
-
async def create_task(request: VideoRequest, token: Optional[str] = None
|
200
|
-
|
201
|
-
if vip:
|
202
|
-
BASE_URL, FEISHU_URL = BASE_URL_ABROAD, FEISHU_URL_ABROAD
|
203
|
-
|
204
|
-
refresh_token = token or await get_next_token_for_polling(FEISHU_URL, from_redis=True)
|
187
|
+
async def create_task(request: VideoRequest, token: Optional[str] = None):
|
188
|
+
refresh_token = token or await get_next_token_for_polling(FEISHU_URL, from_redis=True, check_token=check_token)
|
205
189
|
# refresh_token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3NDA0NzA4NzgsInVzZXIiOnsiaWQiOiIzMjg2MDg4ODkzNTA4MTU3NDQiLCJuYW1lIjoiUm9idXN0IEdlcnRydWRlIiwiYXZhdGFyIjoiaHR0cHM6Ly9jZG4uaGFpbHVvYWkudmlkZW8vbW9zcy9wcm9kLzIwMjQtMTItMjgtMTYvdXNlci91c2VyX2F2YXRhci8xNzM1Mzc1OTI0OTkyMTcxMDY3LWF2YXRhcl8zMjg2MDg4ODkzNTA4MTU3NDQiLCJkZXZpY2VJRCI6IjMwMjgzMzc1OTUxMjc2NDQxNyIsImlzQW5vbnltb3VzIjpmYWxzZX19.dLNBSHjqnKutGl3ralS2g8A-RodHjOdos11vdpbkPwc"
|
206
|
-
|
190
|
+
|
191
|
+
token = await get_access_token(refresh_token)
|
207
192
|
|
208
193
|
payload = {
|
209
194
|
"desc": request.prompt,
|
@@ -228,7 +213,7 @@ async def create_task(request: VideoRequest, token: Optional[str] = None, vip: O
|
|
228
213
|
# "name": "duikoux.jpg", "type": "jpg"}],
|
229
214
|
# "modelID": "23021", "quantity": "1"}
|
230
215
|
else:
|
231
|
-
data = await upload(file, token=refresh_token
|
216
|
+
data = await upload(file, token=refresh_token)
|
232
217
|
file_data = {
|
233
218
|
"id": data['data']['fileID'],
|
234
219
|
"name": "_.png",
|
@@ -291,7 +276,7 @@ async def create_task(request: VideoRequest, token: Optional[str] = None, vip: O
|
|
291
276
|
|
292
277
|
task_id = (data.get('data') or {}).get('id', '')
|
293
278
|
response = VideoResponse(
|
294
|
-
task_id=f"
|
279
|
+
task_id=f"hailuoai-{task_id}",
|
295
280
|
base_resp=data.get('statusInfo', {}),
|
296
281
|
system_fingerprint=refresh_token
|
297
282
|
)
|
@@ -331,9 +316,6 @@ async def create_task(request: VideoRequest, token: Optional[str] = None, vip: O
|
|
331
316
|
|
332
317
|
# 307134660730421250
|
333
318
|
async def get_task(task_id: str, token: str):
|
334
|
-
global BASE_URL, FEISHU_URL
|
335
|
-
if "hailuoai" in task_id:
|
336
|
-
BASE_URL, FEISHU_URL = BASE_URL_ABROAD, FEISHU_URL_ABROAD
|
337
319
|
|
338
320
|
task_id = task_id.rsplit('-', 1)[-1]
|
339
321
|
|
@@ -380,9 +362,8 @@ async def get_task(task_id: str, token: str):
|
|
380
362
|
|
381
363
|
|
382
364
|
# https://hailuoai.video/v1/api/user/equity?device_platform=web&app_id=3001&version_code=22201&uuid=3de88ad0-8a38-48a9-8ed3-0d63f9c71296&lang=en&device_id=302833759512764417&os_name=Mac&browser_name=chrome&device_memory=8&cpu_core_num=10&browser_language=zh-CN&browser_platform=MacIntel&screen_width=1920&screen_height=1080&unix=1731571578000
|
365
|
+
@alru_cache(ttl=3600)
|
383
366
|
async def check_token(token, threshold: int = 30, **kwargs):
|
384
|
-
BASE_URL, FEISHU_URL = BASE_URL_ABROAD, FEISHU_URL_ABROAD
|
385
|
-
|
386
367
|
if not isinstance(token, str):
|
387
368
|
return await check_tokens(token, check_token)
|
388
369
|
|
@@ -426,9 +407,9 @@ if __name__ == '__main__': # 304752356930580482
|
|
426
407
|
# p = "/Users/betterme/PycharmProjects/AI/MeUtils/meutils/data/cowboy-hat-face.webp"
|
427
408
|
# arun(upload(Path(p).read_bytes(), token=token, vip=vip))
|
428
409
|
# arun(upload(Path(p).read_bytes(), vip=False))
|
429
|
-
access_token = arun(get_access_token())
|
410
|
+
# access_token = arun(get_access_token())
|
411
|
+
# arun(upload(Path("img.png").read_bytes(), token=access_token))
|
430
412
|
# arun(upload(Path("img.png").read_bytes(), token=access_token))
|
431
|
-
arun(upload(Path("img.png").read_bytes(), token=access_token))
|
432
413
|
|
433
414
|
# arun(get_task(task_id="307137575113703427", token=token)) # 307173162217783304
|
434
415
|
# arun(get_task(task_id="307148849188945924", token=token))
|
@@ -448,7 +429,7 @@ if __name__ == '__main__': # 304752356930580482
|
|
448
429
|
first_frame_image="https://oss.ffire.cc/files/kling_watermark.png" # 307173162217783304
|
449
430
|
)
|
450
431
|
|
451
|
-
r = arun(create_task(request, token=token
|
432
|
+
r = arun(create_task(request, token=token))
|
452
433
|
arun(get_task(task_id=r.task_id, token=r.system_fingerprint))
|
453
434
|
#
|
454
435
|
#
|
meutils/apis/images/recraft.py
CHANGED
@@ -147,11 +147,11 @@ if __name__ == '__main__':
|
|
147
147
|
**data
|
148
148
|
)
|
149
149
|
|
150
|
-
|
151
|
-
|
150
|
+
with timer():
|
151
|
+
arun(generate(request, token=token))
|
152
152
|
|
153
153
|
# tokens = [token]
|
154
154
|
|
155
|
-
tokens = list(arun(aget_spreadsheet_values(feishu_url=FEISHU_URL, to_dataframe=True))[0]) | xfilter_
|
156
|
-
|
157
|
-
r = arun(check_token(tokens))
|
155
|
+
# tokens = list(arun(aget_spreadsheet_values(feishu_url=FEISHU_URL, to_dataframe=True))[0]) | xfilter_
|
156
|
+
#
|
157
|
+
# r = arun(check_token(tokens))
|
meutils/apis/jimeng/images.py
CHANGED
@@ -40,7 +40,7 @@ async def create_draft_content(request: ImageRequest, token: str):
|
|
40
40
|
|
41
41
|
if urls := parse_url(request.prompt):
|
42
42
|
url = urls[-1]
|
43
|
-
image_uri = upload_for_image(url, token)
|
43
|
+
image_uri = await upload_for_image(url, token)
|
44
44
|
|
45
45
|
request.prompt = request.prompt.replace(url, '')
|
46
46
|
request.model = "high_aes_general_v20_L:general_v2.0_L" # 2.1不支持图片编辑
|
@@ -164,8 +164,8 @@ async def create_draft_content(request: ImageRequest, token: str):
|
|
164
164
|
"main_component_id": main_component_id,
|
165
165
|
"component_list": [component]
|
166
166
|
}
|
167
|
-
|
168
|
-
logger.debug(bjson(draft_content))
|
167
|
+
logger.debug(draft_content)
|
168
|
+
# logger.debug(bjson(draft_content))
|
169
169
|
|
170
170
|
return draft_content
|
171
171
|
|
@@ -181,6 +181,9 @@ async def create_task(request: ImageRequest, token: Optional[str] = None):
|
|
181
181
|
request.model = "high_aes_general_v20_L:general_v2.0_L"
|
182
182
|
|
183
183
|
draft_content = await create_draft_content(request, token)
|
184
|
+
|
185
|
+
logger.debug(json.dumps(draft_content))
|
186
|
+
|
184
187
|
payload = {
|
185
188
|
"extend": {
|
186
189
|
"root_model": request.model,
|
@@ -194,7 +197,6 @@ async def create_task(request: ImageRequest, token: Optional[str] = None):
|
|
194
197
|
}
|
195
198
|
}
|
196
199
|
|
197
|
-
logger.debug(bjson(payload))
|
198
200
|
|
199
201
|
async with httpx.AsyncClient(base_url=BASE_URL, headers=headers, timeout=60) as client:
|
200
202
|
response = await client.post(url, json=payload)
|
@@ -276,7 +278,10 @@ async def generate(request: ImageRequest):
|
|
276
278
|
response = await get_task(task_response.task_id, task_response.system_fingerprint)
|
277
279
|
logger.debug(response)
|
278
280
|
if response.status.lower().startswith("fail"):
|
279
|
-
raise
|
281
|
+
raise HTTPException(
|
282
|
+
status_code=status.HTTP_451_UNAVAILABLE_FOR_LEGAL_REASONS,
|
283
|
+
detail=response.message
|
284
|
+
)
|
280
285
|
|
281
286
|
if data := response.data:
|
282
287
|
return {"data": data}
|
@@ -312,10 +317,18 @@ if __name__ == '__main__':
|
|
312
317
|
# t3 = ("10186352959490", "eb4d120829cfd3ee957943f63d6152ed")
|
313
318
|
#
|
314
319
|
# arun(get_task(*t3))
|
320
|
+
data = {
|
321
|
+
"model": "seededit",
|
322
|
+
"prompt": "https://oss.ffire.cc/files/kling_watermark.png 让这个女人带上墨镜,衣服换个颜色",
|
323
|
+
"size": "1024x1024"
|
324
|
+
}
|
325
|
+
# arun(generate(ImageRequest(**data)))
|
315
326
|
|
316
|
-
arun(generate(ImageRequest(prompt="做一个圣诞节的海报")))
|
317
|
-
|
318
|
-
|
327
|
+
# arun(generate(ImageRequest(prompt="做一个圣诞节的海报")))
|
328
|
+
prompt = "A plump Chinese beauty wearing a wedding dress revealing her skirt and underwear is swinging on the swing,Happy smile,cleavage,Exposed thighs,Spread your legs open,Extend your leg,panties,upskirt,Barefoot,sole"
|
329
|
+
request = ImageRequest(prompt=prompt)
|
319
330
|
# task = arun(create_task(request))
|
320
331
|
|
321
332
|
# arun(get_task(task.task_id, task.system_fingerprint))
|
333
|
+
|
334
|
+
arun(generate(request))
|
meutils/apis/search/metaso.py
CHANGED
@@ -16,6 +16,8 @@ from meutils.apis.proxy.ips import FEISHU_URL_METASO, get_one_proxy, get_proxies
|
|
16
16
|
from meutils.schemas.openai_types import ChatCompletionRequest
|
17
17
|
from meutils.notice.feishu import send_message
|
18
18
|
|
19
|
+
from meutils.llm.clients import zhipuai_client
|
20
|
+
|
19
21
|
token = "wr8+pHu3KYryzz0O2MaBSNUZbVLjLUYC1FR4sKqSW0p19vmcZAoEmHC72zPh/fHtOhYhCcR5GKXrxQs9QjN6dulxfOKfQkLdVkLMahMclPPjNVCPE8bLQut3zBABECLaSqpI0fVWBrdbJptnhASrSw=="
|
20
22
|
|
21
23
|
MODELS = {
|
@@ -27,6 +29,8 @@ MODELS = {
|
|
27
29
|
"ai-search-pro:scholar": "research"
|
28
30
|
}
|
29
31
|
|
32
|
+
""
|
33
|
+
|
30
34
|
|
31
35
|
async def get_session_id(request: MetasoRequest, headers: Optional[dict] = None, proxy: Optional[str] = None):
|
32
36
|
if proxy:
|
@@ -86,7 +90,13 @@ async def create(request: ChatCompletionRequest, response_format: Optional[str]
|
|
86
90
|
if ":" in request.model:
|
87
91
|
_, engine_type = request.model.split(':')
|
88
92
|
|
93
|
+
model = None
|
94
|
+
if request.model.startswith("deepseek"):
|
95
|
+
model = "ds-r1"
|
96
|
+
system_fingerprint = "deepseek-r1"
|
97
|
+
|
89
98
|
request = MetasoRequest(
|
99
|
+
model=model,
|
90
100
|
mode=MODELS.get(request.model, "concise"),
|
91
101
|
engineType=engine_type,
|
92
102
|
question=request.last_content,
|
@@ -139,14 +149,32 @@ async def create(request: ChatCompletionRequest, response_format: Optional[str]
|
|
139
149
|
|
140
150
|
reference_mapping = {}
|
141
151
|
async for chunk in response.aiter_lines():
|
152
|
+
|
142
153
|
if (chunk := chunk.strip()) and chunk != "data:[DONE]":
|
143
154
|
# logger.debug(chunk)
|
144
155
|
|
145
156
|
try:
|
146
157
|
response = MetasoResponse(chunk=chunk)
|
158
|
+
logger.debug(response)
|
159
|
+
|
160
|
+
if not response.content and response.data.get("infoType") == 'weather':
|
161
|
+
# yield response.data
|
162
|
+
|
163
|
+
################################################################ todo: 外部搜索
|
164
|
+
chatcompletionchunks = await zhipuai_client.chat.completions.create(
|
165
|
+
model="glm-4-flash",
|
166
|
+
messages=[
|
167
|
+
{"role": "user", "content": response.data}
|
168
|
+
],
|
169
|
+
stream=True
|
170
|
+
)
|
171
|
+
async for chatcompletionchunk in chatcompletionchunks:
|
172
|
+
yield chatcompletionchunk.choices[0].delta.content
|
173
|
+
################################################################
|
174
|
+
break
|
147
175
|
|
148
176
|
if len(response.content) == 1 and response.content.startswith('秘'): # 替换 模型水印
|
149
|
-
response.content = f"{system_fingerprint}AI搜索,它是一款能够深入理解您的问题的AI搜索引擎。"
|
177
|
+
response.content = f"{system_fingerprint} AI搜索,它是一款能够深入理解您的问题的AI搜索引擎。"
|
150
178
|
yield response.content
|
151
179
|
break
|
152
180
|
|
@@ -199,13 +227,16 @@ if __name__ == '__main__':
|
|
199
227
|
# arun(get_access_token(request))
|
200
228
|
|
201
229
|
request = ChatCompletionRequest(
|
202
|
-
# model="",
|
230
|
+
# model="deepseek-search",
|
231
|
+
model="deepseek-r1-search",
|
203
232
|
# model="ai-search",
|
204
|
-
model="ai-search:scholar",
|
233
|
+
# model="ai-search:scholar",
|
205
234
|
# model="ai-search-pro:scholar",
|
206
235
|
|
207
236
|
# model="search-pro",
|
208
237
|
|
238
|
+
messages=[{'role': 'user', 'content': '今天南京天气怎么样'}]
|
239
|
+
|
209
240
|
)
|
210
241
|
|
211
242
|
arun(create(request))
|
meutils/data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
2025.02.
|
1
|
+
2025.02.06.13.05.49
|
meutils/data/oneapi/NOTICE.md
CHANGED
@@ -35,23 +35,9 @@
|
|
35
35
|
<summary><b>大额对公,请联系客服</b></summary>
|
36
36
|
</details>
|
37
37
|
|
38
|
-
## 2025-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
- 充值200增送 5%
|
43
|
-
|
44
|
-
- 充值500增送 10%
|
45
|
-
- 充值1000增送 20%
|
46
|
-
- 充值2000增送 25%
|
47
|
-
|
48
|
-
### 新上线模型Deepseek系列
|
49
|
-
|
50
|
-
- deepseek-v3
|
51
|
-
- deepseek-r1
|
52
|
-
- deepseek-r1:7b
|
53
|
-
|
54
|
-
|
38
|
+
## 2025-02-06(预祝大家新年快乐)
|
39
|
+
- deepseek-ai/deepseek-v3
|
40
|
+
- 即日起至北京时间 2025-02-09 00:00 同步官网倍率
|
55
41
|
|
56
42
|
|
57
43
|
---
|
meutils/llm/clients.py
CHANGED
@@ -14,6 +14,9 @@ from meutils.pipe import *
|
|
14
14
|
OpenAI = lru_cache(Client)
|
15
15
|
AsyncOpenAI = lru_cache(AsyncClient)
|
16
16
|
|
17
|
+
|
18
|
+
chatfire_client = AsyncOpenAI()
|
19
|
+
|
17
20
|
moonshot_client = AsyncOpenAI(
|
18
21
|
api_key=os.getenv("MOONSHOT_API_KEY"),
|
19
22
|
base_url=os.getenv("MOONSHOT_BASE_URL")
|
@@ -9,7 +9,7 @@
|
|
9
9
|
# @Description : todo: 格式匹配
|
10
10
|
|
11
11
|
from meutils.pipe import *
|
12
|
-
from meutils.llm.clients import AsyncOpenAI, zhipuai_client, moonshot_client
|
12
|
+
from meutils.llm.clients import AsyncOpenAI, chatfire_client, zhipuai_client, moonshot_client
|
13
13
|
from meutils.llm.openai_utils import to_openai_params
|
14
14
|
from meutils.schemas.openai_types import chat_completion, chat_completion_chunk, ChatCompletionRequest, CompletionUsage
|
15
15
|
|
@@ -90,18 +90,33 @@ class Completions(object):
|
|
90
90
|
completion = await client.chat.completions.create(**data)
|
91
91
|
return completion
|
92
92
|
|
93
|
-
else:
|
94
|
-
|
93
|
+
else:
|
94
|
+
# 搜索
|
95
95
|
data = to_openai_params(request)
|
96
|
-
|
97
|
-
|
96
|
+
data['model'] = "web-search-pro"
|
97
|
+
data['stream'] = False
|
98
|
+
search_completion = await zhipuai_client.chat.completions.create(**data)
|
99
|
+
logger.debug(search_completion)
|
100
|
+
|
101
|
+
# 大模型
|
102
|
+
request.messages.append({
|
103
|
+
"role": "user",
|
104
|
+
"content": search_completion.model_dump_json(indent=4),
|
105
|
+
})
|
106
|
+
|
107
|
+
data = to_openai_params(request)
|
108
|
+
|
109
|
+
completion = await chatfire_client.chat.completions.create(**data)
|
98
110
|
return completion
|
99
111
|
|
100
112
|
|
101
113
|
if __name__ == '__main__':
|
102
114
|
request = ChatCompletionRequest(
|
103
115
|
# model="baichuan4-turbo",
|
104
|
-
model="xx",
|
116
|
+
# model="xx",
|
117
|
+
# model="deepseek-r1",
|
118
|
+
model="deepseek-r1:1.5b",
|
119
|
+
|
105
120
|
# model="moonshot-v1-8k",
|
106
121
|
# model="doubao",
|
107
122
|
|
@@ -113,3 +128,10 @@ if __name__ == '__main__':
|
|
113
128
|
)
|
114
129
|
|
115
130
|
arun(Completions().create(request))
|
131
|
+
|
132
|
+
# async def test():
|
133
|
+
# for i in await Completions().create(request):
|
134
|
+
# print(i)
|
135
|
+
#
|
136
|
+
#
|
137
|
+
# arun(test())
|
@@ -9,7 +9,6 @@
|
|
9
9
|
# @Description :
|
10
10
|
|
11
11
|
from openai import AsyncOpenAI
|
12
|
-
from fake_useragent import UserAgent
|
13
12
|
|
14
13
|
from meutils.pipe import *
|
15
14
|
from meutils.llm.openai_utils import to_openai_params
|
@@ -20,6 +19,9 @@ from meutils.schemas.openai_types import chat_completion, chat_completion_chunk,
|
|
20
19
|
FEISHU_URL = "https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?sheet=PP1PGr"
|
21
20
|
|
22
21
|
base_url = "https://chat.qwenlm.ai/api"
|
22
|
+
|
23
|
+
from fake_useragent import UserAgent
|
24
|
+
|
23
25
|
ua = UserAgent()
|
24
26
|
|
25
27
|
|
@@ -41,7 +43,7 @@ async def create(request: ChatCompletionRequest):
|
|
41
43
|
|
42
44
|
else:
|
43
45
|
response = await client.chat.completions.create(**data)
|
44
|
-
logger.info(response)
|
46
|
+
# logger.info(response)
|
45
47
|
yield response.choices[0].message.content
|
46
48
|
|
47
49
|
|
@@ -0,0 +1,87 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
# @Project : AI. @by PyCharm
|
4
|
+
# @File : reasoner
|
5
|
+
# @Time : 2025/2/6 08:35
|
6
|
+
# @Author : betterme
|
7
|
+
# @WeChat : meutils
|
8
|
+
# @Software : PyCharm
|
9
|
+
# @Description :
|
10
|
+
|
11
|
+
|
12
|
+
from openai import AsyncOpenAI
|
13
|
+
|
14
|
+
from meutils.pipe import *
|
15
|
+
from meutils.llm.clients import chatfire_client, AsyncOpenAI
|
16
|
+
from meutils.llm.openai_utils import to_openai_params
|
17
|
+
|
18
|
+
from meutils.schemas.openai_types import chat_completion, chat_completion_chunk, ChatCompletionRequest, CompletionUsage
|
19
|
+
|
20
|
+
|
21
|
+
class Completions(object):
|
22
|
+
|
23
|
+
def __init__(self, api_key: Optional[str] = None, base_url: Optional[str] = None):
|
24
|
+
self.api_key = api_key
|
25
|
+
self.base_url = base_url
|
26
|
+
|
27
|
+
self.client = AsyncOpenAI(
|
28
|
+
base_url=self.base_url, api_key=self.api_key,
|
29
|
+
)
|
30
|
+
|
31
|
+
async def create(self, request: ChatCompletionRequest):
|
32
|
+
data = to_openai_params(request)
|
33
|
+
|
34
|
+
if request.stream:
|
35
|
+
_chunk = ""
|
36
|
+
async for chunk in await self.client.chat.completions.create(**data):
|
37
|
+
delta = chunk.choices[0].delta
|
38
|
+
|
39
|
+
reasoning_content = "> Reasoning\n" # "> Reasoning\n" # 前缀 "> 思考中\n"
|
40
|
+
if hasattr(delta, 'reasoning_content'):
|
41
|
+
if reasoning_content:
|
42
|
+
reasoning_content += delta.reasoning_content
|
43
|
+
yield reasoning_content
|
44
|
+
reasoning_content = ''
|
45
|
+
else:
|
46
|
+
yield '\n'
|
47
|
+
|
48
|
+
yield delta.content
|
49
|
+
|
50
|
+
else:
|
51
|
+
completions = await self.client.chat.completions.create(**data)
|
52
|
+
yield completions
|
53
|
+
|
54
|
+
|
55
|
+
if __name__ == '__main__':
|
56
|
+
# [
|
57
|
+
# "qwen-plus-latest",
|
58
|
+
# "qvq-72b-preview",
|
59
|
+
# "qwq-32b-preview",
|
60
|
+
# "qwen2.5-coder-32b-instruct",
|
61
|
+
# "qwen-vl-max-latest",
|
62
|
+
# "qwen-turbo-latest",
|
63
|
+
# "qwen2.5-72b-instruct",
|
64
|
+
# "qwen2.5-32b-instruct"
|
65
|
+
# ]
|
66
|
+
request = ChatCompletionRequest(
|
67
|
+
# model="qwen-turbo-2024-11-01",
|
68
|
+
# model="qwen-max-latest",
|
69
|
+
# model="qwen-plus-latest",
|
70
|
+
|
71
|
+
model="deepseek-r1:1.5b",
|
72
|
+
# model="deepseek-r1",
|
73
|
+
|
74
|
+
messages=[
|
75
|
+
{
|
76
|
+
'role': 'system',
|
77
|
+
'content': '每次回答前务必思考之后m,回答问题'
|
78
|
+
},
|
79
|
+
{
|
80
|
+
'role': 'user',
|
81
|
+
'content': '你好'
|
82
|
+
},
|
83
|
+
|
84
|
+
],
|
85
|
+
stream=True,
|
86
|
+
)
|
87
|
+
arun(Completions().create(request))
|
meutils/schemas/metaso_types.py
CHANGED
@@ -15,11 +15,13 @@ FEISHU_URL = "https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?shee
|
|
15
15
|
|
16
16
|
|
17
17
|
class MetasoRequest(BaseModel):
|
18
|
-
|
18
|
+
model: Optional[Literal["ds-r1",]] = None
|
19
19
|
|
20
20
|
"""search-mini search search-pro"""
|
21
21
|
mode: Literal["concise", "detail", "research"] = "detail" # concise detail research
|
22
22
|
|
23
|
+
question: str = "Chatfire"
|
24
|
+
|
23
25
|
"""全网 文库 学术 图片 播客"""
|
24
26
|
engineType: str = "" # scholar
|
25
27
|
|
@@ -28,6 +30,10 @@ class MetasoRequest(BaseModel):
|
|
28
30
|
searchTopicId: Optional[str] = None
|
29
31
|
searchTopicName: Optional[str] = None
|
30
32
|
|
33
|
+
enableMix: bool = True
|
34
|
+
newEngine: bool = True
|
35
|
+
enableImage: bool = True
|
36
|
+
|
31
37
|
# 自定义字段
|
32
38
|
response_format: Optional[str] = None # 原生内容
|
33
39
|
|
meutils/schemas/oneapi/common.py
CHANGED
@@ -389,10 +389,10 @@ MODEL_RATIO = {
|
|
389
389
|
|
390
390
|
"qwen2.5-coder-7b-instruct": 0.05,
|
391
391
|
"qwen2.5-7b-instruct": 0.05,
|
392
|
-
"qwen2.5-14b-instruct": 0.
|
393
|
-
"qwen2.5-32b-instruct":
|
394
|
-
"qwen2.5-72b-instruct":
|
395
|
-
"qwen2.5-math-72b-instruct":
|
392
|
+
"qwen2.5-14b-instruct": 0.25,
|
393
|
+
"qwen2.5-32b-instruct": 0.5,
|
394
|
+
"qwen2.5-72b-instruct": 2,
|
395
|
+
"qwen2.5-math-72b-instruct": 2,
|
396
396
|
"qwen2.5-coder-32b-instruct": 0.5,
|
397
397
|
"qwen/qwq-32b-preview": 1,
|
398
398
|
"Qwen/QwQ-32B-Preview": 1,
|
@@ -774,6 +774,12 @@ COMPLETION_RATIO = {
|
|
774
774
|
"qwen2-vl-72b-instruct": 5,
|
775
775
|
"qwen-max-latest": 5,
|
776
776
|
|
777
|
+
"qwen2.5-7b-instruct": 4,
|
778
|
+
"qwen2.5-14b-instruct": 4,
|
779
|
+
"qwen2.5-32b-instruct": 4,
|
780
|
+
"qwen2.5-72b-instruct": 4,
|
781
|
+
"qwen2.5-math-72b-instruct": 4,
|
782
|
+
|
777
783
|
"deepseek-vl2": 4,
|
778
784
|
"deepseek-ai/deepseek-vl2": 4,
|
779
785
|
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|