mediapipe-nightly 0.10.10.post20240224__cp310-cp310-macosx_11_0_universal2.whl → 0.10.11.post20240229__cp310-cp310-macosx_11_0_universal2.whl

Sign up to get free protection for your applications and to get access to all the features.
mediapipe/__init__.py CHANGED
@@ -23,4 +23,4 @@ del modules
23
23
  del python
24
24
  del mediapipe
25
25
  del util
26
- __version__ = '0.10.10-20240224'
26
+ __version__ = '0.10.11-20240229'
@@ -14,6 +14,7 @@
14
14
 
15
15
  """MediaPipe Python API."""
16
16
 
17
+ from mediapipe.python._framework_bindings import model_ckpt_util
17
18
  from mediapipe.python._framework_bindings import resource_util
18
19
  from mediapipe.python._framework_bindings.calculator_graph import CalculatorGraph
19
20
  from mediapipe.python._framework_bindings.calculator_graph import GraphInputStreamAddMode
@@ -15,16 +15,16 @@ _sym_db = _symbol_database.Default()
15
15
  from mediapipe.tasks.cc.genai.inference.proto import transformer_params_pb2 as mediapipe_dot_tasks_dot_cc_dot_genai_dot_inference_dot_proto_dot_transformer__params__pb2
16
16
 
17
17
 
18
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n9mediapipe/tasks/cc/genai/inference/proto/llm_params.proto\x12\x1bmediapipe.tasks.genai.proto\x1a\x41mediapipe/tasks/cc/genai/inference/proto/transformer_params.proto\"\xaa\x01\n\rLlmParameters\x12R\n\x16transformer_parameters\x18\x01 \x01(\x0b\x32\x32.mediapipe.tasks.genai.proto.TransformerParameters\x12\x12\n\nvocab_size\x18\x02 \x01(\x05\x12\x16\n\x0estart_token_id\x18\x04 \x01(\x05\x12\x13\n\x0bstop_tokens\x18\x05 \x03(\tJ\x04\x08\x03\x10\x04*\xd1\x01\n\x0cLlmModelType\x12\x1a\n\x16LLM_MODEL_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bLLM_MODEL_TYPE_FALCON_RW_1B\x10\x05\x12\x1b\n\x17LLM_MODEL_TYPE_GEMMA_2B\x10\x06\x12#\n\x1fLLM_MODEL_TYPE_STABLELM_4E1T_3B\x10\x08\x12\x18\n\x14LLM_MODEL_TYPE_PHI_2\x10\x0b\"\x04\x08\x01\x10\x01\"\x04\x08\x02\x10\x02\"\x04\x08\x03\x10\x03\"\x04\x08\x04\x10\x04\"\x04\x08\x07\x10\x07\"\x04\x08\t\x10\t\"\x04\x08\n\x10\nB<\n&com.google.mediapipe.tasks.genai.protoB\x12LLMParametersProtob\x06proto3')
18
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n9mediapipe/tasks/cc/genai/inference/proto/llm_params.proto\x12\x10odml.infra.proto\x1a\x41mediapipe/tasks/cc/genai/inference/proto/transformer_params.proto\"\x9f\x01\n\rLlmParameters\x12G\n\x16transformer_parameters\x18\x01 \x01(\x0b\x32\'.odml.infra.proto.TransformerParameters\x12\x12\n\nvocab_size\x18\x02 \x01(\x05\x12\x16\n\x0estart_token_id\x18\x04 \x01(\x05\x12\x13\n\x0bstop_tokens\x18\x05 \x03(\tJ\x04\x08\x03\x10\x04*\xd1\x01\n\x0cLlmModelType\x12\x1a\n\x16LLM_MODEL_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bLLM_MODEL_TYPE_FALCON_RW_1B\x10\x05\x12\x1b\n\x17LLM_MODEL_TYPE_GEMMA_2B\x10\x06\x12#\n\x1fLLM_MODEL_TYPE_STABLELM_4E1T_3B\x10\x08\x12\x18\n\x14LLM_MODEL_TYPE_PHI_2\x10\x0b\"\x04\x08\x01\x10\x01\"\x04\x08\x02\x10\x02\"\x04\x08\x03\x10\x03\"\x04\x08\x04\x10\x04\"\x04\x08\x07\x10\x07\"\x04\x08\t\x10\t\"\x04\x08\n\x10\nB1\n\x1b\x63om.google.odml.infra.protoB\x12LLMParametersProtob\x06proto3')
19
19
 
20
20
  _globals = globals()
21
21
  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
22
22
  _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'mediapipe.tasks.cc.genai.inference.proto.llm_params_pb2', _globals)
23
23
  if _descriptor._USE_C_DESCRIPTORS == False:
24
24
  _globals['DESCRIPTOR']._options = None
25
- _globals['DESCRIPTOR']._serialized_options = b'\n&com.google.mediapipe.tasks.genai.protoB\022LLMParametersProto'
26
- _globals['_LLMMODELTYPE']._serialized_start=331
27
- _globals['_LLMMODELTYPE']._serialized_end=540
28
- _globals['_LLMPARAMETERS']._serialized_start=158
29
- _globals['_LLMPARAMETERS']._serialized_end=328
25
+ _globals['DESCRIPTOR']._serialized_options = b'\n\033com.google.odml.infra.protoB\022LLMParametersProto'
26
+ _globals['_LLMMODELTYPE']._serialized_start=309
27
+ _globals['_LLMMODELTYPE']._serialized_end=518
28
+ _globals['_LLMPARAMETERS']._serialized_start=147
29
+ _globals['_LLMPARAMETERS']._serialized_end=306
30
30
  # @@protoc_insertion_point(module_scope)
@@ -14,28 +14,28 @@ _sym_db = _symbol_database.Default()
14
14
 
15
15
 
16
16
 
17
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nAmediapipe/tasks/cc/genai/inference/proto/transformer_params.proto\x12\x1bmediapipe.tasks.genai.proto\"\x96\x0e\n\x15TransformerParameters\x12\x12\n\nbatch_size\x18\x01 \x01(\x05\x12\x16\n\x0emax_seq_length\x18\x02 \x01(\x05\x12\x15\n\rembedding_dim\x18\x03 \x01(\x05\x12\x18\n\x10hidden_dimension\x18\x04 \x01(\x05\x12\x16\n\x0ehead_dimension\x18\x05 \x01(\x05\x12\x11\n\tnum_heads\x18\x06 \x01(\x05\x12\x12\n\nnum_stacks\x18\x07 \x01(\x05\x12\x14\n\x0cnum_kv_heads\x18\t \x01(\x05\x12i\n\x17\x66\x65\x65\x64_forward_parameters\x18\x0b \x01(\x0b\x32H.mediapipe.tasks.genai.proto.TransformerParameters.FeedForwardParameters\x12k\n\x18\x66inal_project_parameters\x18\x0c \x01(\x0b\x32I.mediapipe.tasks.genai.proto.TransformerParameters.FinalProjectParameters\x12I\n\x08pre_norm\x18\r \x01(\x0e\x32\x37.mediapipe.tasks.genai.proto.TransformerParameters.Norm\x12J\n\tpost_norm\x18\x0e \x01(\x0e\x32\x37.mediapipe.tasks.genai.proto.TransformerParameters.Norm\x12K\n\nfinal_norm\x18\x0f \x01(\x0e\x32\x37.mediapipe.tasks.genai.proto.TransformerParameters.Norm\x12m\n\x19self_attention_parameters\x18\x10 \x01(\x0b\x32J.mediapipe.tasks.genai.proto.TransformerParameters.SelfAttentionParameters\x12+\n#skip_absolute_positional_embeddings\x18\x12 \x01(\x08\x1a\x92\x02\n\x15\x46\x65\x65\x64\x46orwardParameters\x12\x0f\n\x07no_bias\x18\x01 \x01(\x08\x12Q\n\nactivation\x18\x02 \x01(\x0e\x32=.mediapipe.tasks.genai.proto.TransformerParameters.Activation\x12I\n\x08pre_norm\x18\x03 \x01(\x0e\x32\x37.mediapipe.tasks.genai.proto.TransformerParameters.Norm\x12J\n\tpost_norm\x18\x04 \x01(\x0e\x32\x37.mediapipe.tasks.genai.proto.TransformerParameters.Norm\x1a\x41\n\x16\x46inalProjectParameters\x12\x0f\n\x07no_bias\x18\x01 \x01(\x08\x12\x16\n\x0esoft_cap_value\x18\x02 \x01(\x02\x1a\xc7\x02\n\x17SelfAttentionParameters\x12\x13\n\x0bqkv_no_bias\x18\x01 \x01(\x08\x12\x19\n\x11post_proj_no_bias\x18\x02 \x01(\x08\x12\x61\n\x13\x61ttention_mask_type\x18\x03 \x01(\x0e\x32\x44.mediapipe.tasks.genai.proto.TransformerParameters.AttentionMaskType\x12\x16\n\x0esoft_cap_value\x18\x04 \x01(\x02\x12h\n\x14\x61ttention_scale_type\x18\x05 \x01(\x0e\x32\x45.mediapipe.tasks.genai.proto.TransformerParameters.AttentionScaleTypeH\x00\x88\x01\x01\x42\x17\n\x15_attention_scale_type\"<\n\x11\x41ttentionMaskType\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\n\n\x06\x43\x41USAL\x10\x01\x12\n\n\x06PREFIX\x10\x02\"F\n\nActivation\x12\x1a\n\x16\x41\x43TIVATION_UNSPECIFIED\x10\x00\x12\x08\n\x04GELU\x10\x01\x12\x08\n\x04SILU\x10\x02\x12\x08\n\x04RELU\x10\x03\"G\n\x04Norm\x12\x14\n\x10NORM_UNSPECIFIED\x10\x00\x12\x0b\n\x07NO_NORM\x10\x01\x12\x0c\n\x08RMS_NORM\x10\x02\x12\x0e\n\nLAYER_NORM\x10\x03\"p\n\x12\x41ttentionScaleType\x12\x1a\n\x16SCALE_TYPE_UNSPECIFIED\x10\x00\x12\x1c\n\x18SCALE_TYPE_PER_DIM_SCALE\x10\x01\x12 \n\x1cSCALE_TYPE_INV_SQRT_HEAD_DIM\x10\x02J\x04\x08\x08\x10\tJ\x04\x08\n\x10\x0bJ\x04\x08\x11\x10\x12\x42\x44\n&com.google.mediapipe.tasks.genai.protoB\x1aTransformerParametersProtob\x06proto3')
17
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nAmediapipe/tasks/cc/genai/inference/proto/transformer_params.proto\x12\x10odml.infra.proto\"\x9d\r\n\x15TransformerParameters\x12\x12\n\nbatch_size\x18\x01 \x01(\x05\x12\x16\n\x0emax_seq_length\x18\x02 \x01(\x05\x12\x15\n\rembedding_dim\x18\x03 \x01(\x05\x12\x18\n\x10hidden_dimension\x18\x04 \x01(\x05\x12\x16\n\x0ehead_dimension\x18\x05 \x01(\x05\x12\x11\n\tnum_heads\x18\x06 \x01(\x05\x12\x12\n\nnum_stacks\x18\x07 \x01(\x05\x12\x14\n\x0cnum_kv_heads\x18\t \x01(\x05\x12^\n\x17\x66\x65\x65\x64_forward_parameters\x18\x0b \x01(\x0b\x32=.odml.infra.proto.TransformerParameters.FeedForwardParameters\x12`\n\x18\x66inal_project_parameters\x18\x0c \x01(\x0b\x32>.odml.infra.proto.TransformerParameters.FinalProjectParameters\x12>\n\x08pre_norm\x18\r \x01(\x0e\x32,.odml.infra.proto.TransformerParameters.Norm\x12?\n\tpost_norm\x18\x0e \x01(\x0e\x32,.odml.infra.proto.TransformerParameters.Norm\x12@\n\nfinal_norm\x18\x0f \x01(\x0e\x32,.odml.infra.proto.TransformerParameters.Norm\x12\x62\n\x19self_attention_parameters\x18\x10 \x01(\x0b\x32?.odml.infra.proto.TransformerParameters.SelfAttentionParameters\x12+\n#skip_absolute_positional_embeddings\x18\x12 \x01(\x08\x1a\xf1\x01\n\x15\x46\x65\x65\x64\x46orwardParameters\x12\x0f\n\x07no_bias\x18\x01 \x01(\x08\x12\x46\n\nactivation\x18\x02 \x01(\x0e\x32\x32.odml.infra.proto.TransformerParameters.Activation\x12>\n\x08pre_norm\x18\x03 \x01(\x0e\x32,.odml.infra.proto.TransformerParameters.Norm\x12?\n\tpost_norm\x18\x04 \x01(\x0e\x32,.odml.infra.proto.TransformerParameters.Norm\x1a\x41\n\x16\x46inalProjectParameters\x12\x0f\n\x07no_bias\x18\x01 \x01(\x08\x12\x16\n\x0esoft_cap_value\x18\x02 \x01(\x02\x1a\xb1\x02\n\x17SelfAttentionParameters\x12\x13\n\x0bqkv_no_bias\x18\x01 \x01(\x08\x12\x19\n\x11post_proj_no_bias\x18\x02 \x01(\x08\x12V\n\x13\x61ttention_mask_type\x18\x03 \x01(\x0e\x32\x39.odml.infra.proto.TransformerParameters.AttentionMaskType\x12\x16\n\x0esoft_cap_value\x18\x04 \x01(\x02\x12]\n\x14\x61ttention_scale_type\x18\x05 \x01(\x0e\x32:.odml.infra.proto.TransformerParameters.AttentionScaleTypeH\x00\x88\x01\x01\x42\x17\n\x15_attention_scale_type\"<\n\x11\x41ttentionMaskType\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\n\n\x06\x43\x41USAL\x10\x01\x12\n\n\x06PREFIX\x10\x02\"F\n\nActivation\x12\x1a\n\x16\x41\x43TIVATION_UNSPECIFIED\x10\x00\x12\x08\n\x04GELU\x10\x01\x12\x08\n\x04SILU\x10\x02\x12\x08\n\x04RELU\x10\x03\"G\n\x04Norm\x12\x14\n\x10NORM_UNSPECIFIED\x10\x00\x12\x0b\n\x07NO_NORM\x10\x01\x12\x0c\n\x08RMS_NORM\x10\x02\x12\x0e\n\nLAYER_NORM\x10\x03\"p\n\x12\x41ttentionScaleType\x12\x1a\n\x16SCALE_TYPE_UNSPECIFIED\x10\x00\x12\x1c\n\x18SCALE_TYPE_PER_DIM_SCALE\x10\x01\x12 \n\x1cSCALE_TYPE_INV_SQRT_HEAD_DIM\x10\x02J\x04\x08\x08\x10\tJ\x04\x08\n\x10\x0bJ\x04\x08\x11\x10\x12\x42\x39\n\x1b\x63om.google.odml.infra.protoB\x1aTransformerParametersProtob\x06proto3')
18
18
 
19
19
  _globals = globals()
20
20
  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
21
21
  _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'mediapipe.tasks.cc.genai.inference.proto.transformer_params_pb2', _globals)
22
22
  if _descriptor._USE_C_DESCRIPTORS == False:
23
23
  _globals['DESCRIPTOR']._options = None
24
- _globals['DESCRIPTOR']._serialized_options = b'\n&com.google.mediapipe.tasks.genai.protoB\032TransformerParametersProto'
25
- _globals['_TRANSFORMERPARAMETERS']._serialized_start=99
26
- _globals['_TRANSFORMERPARAMETERS']._serialized_end=1913
27
- _globals['_TRANSFORMERPARAMETERS_FEEDFORWARDPARAMETERS']._serialized_start=903
28
- _globals['_TRANSFORMERPARAMETERS_FEEDFORWARDPARAMETERS']._serialized_end=1177
29
- _globals['_TRANSFORMERPARAMETERS_FINALPROJECTPARAMETERS']._serialized_start=1179
30
- _globals['_TRANSFORMERPARAMETERS_FINALPROJECTPARAMETERS']._serialized_end=1244
31
- _globals['_TRANSFORMERPARAMETERS_SELFATTENTIONPARAMETERS']._serialized_start=1247
32
- _globals['_TRANSFORMERPARAMETERS_SELFATTENTIONPARAMETERS']._serialized_end=1574
33
- _globals['_TRANSFORMERPARAMETERS_ATTENTIONMASKTYPE']._serialized_start=1576
34
- _globals['_TRANSFORMERPARAMETERS_ATTENTIONMASKTYPE']._serialized_end=1636
35
- _globals['_TRANSFORMERPARAMETERS_ACTIVATION']._serialized_start=1638
36
- _globals['_TRANSFORMERPARAMETERS_ACTIVATION']._serialized_end=1708
37
- _globals['_TRANSFORMERPARAMETERS_NORM']._serialized_start=1710
38
- _globals['_TRANSFORMERPARAMETERS_NORM']._serialized_end=1781
39
- _globals['_TRANSFORMERPARAMETERS_ATTENTIONSCALETYPE']._serialized_start=1783
40
- _globals['_TRANSFORMERPARAMETERS_ATTENTIONSCALETYPE']._serialized_end=1895
24
+ _globals['DESCRIPTOR']._serialized_options = b'\n\033com.google.odml.infra.protoB\032TransformerParametersProto'
25
+ _globals['_TRANSFORMERPARAMETERS']._serialized_start=88
26
+ _globals['_TRANSFORMERPARAMETERS']._serialized_end=1781
27
+ _globals['_TRANSFORMERPARAMETERS_FEEDFORWARDPARAMETERS']._serialized_start=826
28
+ _globals['_TRANSFORMERPARAMETERS_FEEDFORWARDPARAMETERS']._serialized_end=1067
29
+ _globals['_TRANSFORMERPARAMETERS_FINALPROJECTPARAMETERS']._serialized_start=1069
30
+ _globals['_TRANSFORMERPARAMETERS_FINALPROJECTPARAMETERS']._serialized_end=1134
31
+ _globals['_TRANSFORMERPARAMETERS_SELFATTENTIONPARAMETERS']._serialized_start=1137
32
+ _globals['_TRANSFORMERPARAMETERS_SELFATTENTIONPARAMETERS']._serialized_end=1442
33
+ _globals['_TRANSFORMERPARAMETERS_ATTENTIONMASKTYPE']._serialized_start=1444
34
+ _globals['_TRANSFORMERPARAMETERS_ATTENTIONMASKTYPE']._serialized_end=1504
35
+ _globals['_TRANSFORMERPARAMETERS_ACTIVATION']._serialized_start=1506
36
+ _globals['_TRANSFORMERPARAMETERS_ACTIVATION']._serialized_end=1576
37
+ _globals['_TRANSFORMERPARAMETERS_NORM']._serialized_start=1578
38
+ _globals['_TRANSFORMERPARAMETERS_NORM']._serialized_end=1649
39
+ _globals['_TRANSFORMERPARAMETERS_ATTENTIONSCALETYPE']._serialized_start=1651
40
+ _globals['_TRANSFORMERPARAMETERS_ATTENTIONSCALETYPE']._serialized_end=1763
41
41
  # @@protoc_insertion_point(module_scope)