waterfall 0.1.5__tar.gz → 0.1.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: waterfall
3
- Version: 0.1.5
3
+ Version: 0.1.6
4
4
  Summary: Scalable Framework for Robust Text Watermarking and Provenance for LLMs
5
5
  Project-URL: Homepage, https://github.com/aoi3142/Waterfall
6
6
  Project-URL: Issues, https://github.com/aoi3142/Waterfall/issues
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "waterfall"
7
- version = "0.1.5"
7
+ version = "0.1.6"
8
8
  authors = [
9
9
  { name = "Xinyuan Niu", email="aperture@outlook.sg" }
10
10
  ]
@@ -3,6 +3,7 @@ import logging
3
3
  import os
4
4
  import gc
5
5
  import torch
6
+ import numpy as np
6
7
  from typing import List, Literal, Optional, Tuple
7
8
 
8
9
  from transformers import AutoTokenizer, AutoModelForCausalLM
@@ -83,7 +84,7 @@ def verify_texts(texts: List[str], id: int,
83
84
  k_p: Optional[int] = None,
84
85
  model_path: Optional[str] = "meta-llama/Llama-3.1-8B-Instruct",
85
86
  return_extracted_k_p: bool = False
86
- ) -> Tuple[float,float]:
87
+ ) -> np.ndarray | Tuple[np.ndarray,np.ndarray]:
87
88
  """Returns the q_score and extracted k_p"""
88
89
 
89
90
  if watermarker is None:
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes