fluff-cutter 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3 @@
1
+ """Paper Fluff Cutter - Extract the core value from academic papers."""
2
+
3
+ __version__ = "0.1.0"
@@ -0,0 +1,62 @@
1
+ """Core paper analysis logic."""
2
+
3
+ from .providers.base import BaseLLMProvider
4
+
5
+ ANALYSIS_PROMPT = """You are analyzing an academic paper. Your job is to cut through all the fluff and extract only what matters.
6
+
7
+ Answer these three questions concisely and critically:
8
+
9
+ 1. WHY SHOULD I CARE?
10
+ - What problem does this address?
11
+ - Why does it matter to the world (not just academia)?
12
+
13
+ 2. WHAT'S THE ACTUAL INNOVATION?
14
+ - What is the core idea or proposal?
15
+ - What makes it different from existing work?
16
+ - Describe it in plain terms, no jargon.
17
+
18
+ 3. IS THE EVIDENCE CONVINCING?
19
+ - What experiments or evidence do they provide?
20
+ - Are there obvious gaps or weaknesses?
21
+ - Does the evidence actually support their claims?
22
+
23
+ Be brutally honest. If the paper is weak, say so.
24
+ If it's mostly fluff with a tiny kernel of insight, identify that kernel.
25
+
26
+ Also extract the paper's title at the beginning of your response in this format:
27
+ TITLE: [Paper Title]
28
+
29
+ Then provide your analysis."""
30
+
31
+
32
+ def analyze_paper(provider: BaseLLMProvider, pdf_base64: str, filename: str) -> dict:
33
+ """
34
+ Analyze a paper using the provided LLM.
35
+
36
+ Args:
37
+ provider: The LLM provider to use for analysis.
38
+ pdf_base64: Base64-encoded PDF data.
39
+ filename: Original filename of the PDF.
40
+
41
+ Returns:
42
+ Dictionary with 'title', 'analysis', and 'model_info' keys.
43
+ """
44
+ raw_response = provider.analyze_paper(pdf_base64, filename, ANALYSIS_PROMPT)
45
+
46
+ # Try to extract the title from the response
47
+ title = "Unknown Title"
48
+ analysis = raw_response
49
+
50
+ lines = raw_response.strip().split("\n")
51
+ for i, line in enumerate(lines):
52
+ if line.strip().upper().startswith("TITLE:"):
53
+ title = line.split(":", 1)[1].strip()
54
+ # Remove the title line from the analysis
55
+ analysis = "\n".join(lines[i + 1 :]).strip()
56
+ break
57
+
58
+ return {
59
+ "title": title,
60
+ "analysis": analysis,
61
+ "model_info": provider.get_model_info(),
62
+ }
fluff_cutter/cli.py ADDED
@@ -0,0 +1,185 @@
1
+ """Command-line interface for Paper Fluff Cutter."""
2
+
3
+ import sys
4
+
5
+ import click
6
+
7
+ from . import __version__
8
+ from .analyzer import analyze_paper
9
+ from .config import (
10
+ get_api_key,
11
+ get_config_path,
12
+ get_default_provider,
13
+ is_configured,
14
+ load_config,
15
+ save_config,
16
+ )
17
+ from .output import print_analysis, save_analysis
18
+ from .pdf import get_pdf_filename, read_pdf_as_base64
19
+ from .providers import AnthropicProvider, OpenAIProvider
20
+
21
+
22
+ PROVIDERS = {
23
+ "openai": OpenAIProvider,
24
+ "anthropic": AnthropicProvider,
25
+ }
26
+
27
+
28
+ @click.group()
29
+ @click.version_option(version=__version__)
30
+ def main():
31
+ """Paper Fluff Cutter - Extract the core value from academic papers."""
32
+ pass
33
+
34
+
35
+ @main.command()
36
+ def init():
37
+ """Initialize configuration with API keys."""
38
+ click.echo("Paper Fluff Cutter Configuration")
39
+ click.echo("=" * 40)
40
+ click.echo()
41
+
42
+ config = {}
43
+
44
+ # OpenAI API Key
45
+ click.echo("Enter your API keys (press Enter to skip):")
46
+ click.echo()
47
+
48
+ openai_key = click.prompt(
49
+ "OpenAI API Key",
50
+ default="",
51
+ hide_input=True,
52
+ show_default=False,
53
+ )
54
+ if openai_key:
55
+ config["openai_api_key"] = openai_key
56
+ click.echo(" ✓ OpenAI API key saved")
57
+
58
+ # Anthropic API Key
59
+ anthropic_key = click.prompt(
60
+ "Anthropic API Key",
61
+ default="",
62
+ hide_input=True,
63
+ show_default=False,
64
+ )
65
+ if anthropic_key:
66
+ config["anthropic_api_key"] = anthropic_key
67
+ click.echo(" ✓ Anthropic API key saved")
68
+
69
+ if not config:
70
+ click.echo()
71
+ click.echo("No API keys provided. Configuration not saved.")
72
+ click.echo("You can set keys via environment variables instead:")
73
+ click.echo(" export OPENAI_API_KEY=sk-...")
74
+ click.echo(" export ANTHROPIC_API_KEY=sk-ant-...")
75
+ return
76
+
77
+ # Default provider
78
+ click.echo()
79
+ available_providers = []
80
+ if "openai_api_key" in config:
81
+ available_providers.append("openai")
82
+ if "anthropic_api_key" in config:
83
+ available_providers.append("anthropic")
84
+
85
+ if len(available_providers) > 1:
86
+ default_provider = click.prompt(
87
+ "Default provider",
88
+ type=click.Choice(available_providers),
89
+ default="anthropic" if "anthropic" in available_providers else "openai",
90
+ )
91
+ else:
92
+ default_provider = available_providers[0]
93
+
94
+ config["default_provider"] = default_provider
95
+
96
+ # Save configuration
97
+ save_config(config)
98
+
99
+ click.echo()
100
+ click.echo(f"Configuration saved to: {get_config_path()}")
101
+ click.echo(f"Default provider: {default_provider}")
102
+ click.echo()
103
+ click.echo("You're ready to analyze papers!")
104
+ click.echo(" fluff-cutter analyze <paper.pdf>")
105
+
106
+
107
+ @main.command()
108
+ @click.argument("paper_path", type=click.Path(exists=True))
109
+ @click.option(
110
+ "-p",
111
+ "--provider",
112
+ type=click.Choice(["openai", "anthropic"]),
113
+ help="LLM provider to use",
114
+ )
115
+ @click.option(
116
+ "-m",
117
+ "--model",
118
+ help="Specific model to use (overrides provider default)",
119
+ )
120
+ @click.option(
121
+ "-o",
122
+ "--output",
123
+ type=click.Path(),
124
+ help="Save output to file instead of printing",
125
+ )
126
+ def analyze(paper_path: str, provider: str | None, model: str | None, output: str | None):
127
+ """Analyze an academic paper and extract its core value."""
128
+ # Check configuration
129
+ if not is_configured():
130
+ click.echo("Error: No API keys configured.", err=True)
131
+ click.echo("Run 'fluff-cutter init' to set up your API keys.", err=True)
132
+ click.echo("Or set environment variables:", err=True)
133
+ click.echo(" export OPENAI_API_KEY=sk-...", err=True)
134
+ click.echo(" export ANTHROPIC_API_KEY=sk-ant-...", err=True)
135
+ sys.exit(1)
136
+
137
+ # Load config and determine provider
138
+ config = load_config()
139
+ provider_name = provider or get_default_provider(config)
140
+
141
+ # Get API key for the selected provider
142
+ api_key = get_api_key(provider_name, config)
143
+ if not api_key:
144
+ click.echo(f"Error: No API key configured for {provider_name}.", err=True)
145
+ click.echo(f"Run 'fluff-cutter init' or set {provider_name.upper()}_API_KEY.", err=True)
146
+ sys.exit(1)
147
+
148
+ # Create provider instance
149
+ provider_class = PROVIDERS[provider_name]
150
+ llm_provider = provider_class(api_key=api_key, model=model)
151
+
152
+ click.echo(f"Analyzing paper: {paper_path}")
153
+ click.echo(f"Using: {llm_provider.get_model_info()}")
154
+ click.echo()
155
+
156
+ # Read PDF
157
+ click.echo("Reading PDF...")
158
+ try:
159
+ pdf_base64 = read_pdf_as_base64(paper_path)
160
+ filename = get_pdf_filename(paper_path)
161
+ click.echo(" PDF loaded successfully")
162
+ except Exception as e:
163
+ click.echo(f"Error reading PDF: {e}", err=True)
164
+ sys.exit(1)
165
+
166
+ # Analyze the paper
167
+ click.echo("Analyzing paper (this may take a minute)...")
168
+ try:
169
+ result = analyze_paper(llm_provider, pdf_base64, filename)
170
+ except Exception as e:
171
+ click.echo(f"Error during analysis: {e}", err=True)
172
+ sys.exit(1)
173
+
174
+ click.echo()
175
+
176
+ # Output results
177
+ if output:
178
+ save_analysis(result["title"], result["analysis"], result["model_info"], output)
179
+ click.echo(f"Analysis saved to: {output}")
180
+ else:
181
+ print_analysis(result["title"], result["analysis"], result["model_info"])
182
+
183
+
184
+ if __name__ == "__main__":
185
+ main()
fluff_cutter/config.py ADDED
@@ -0,0 +1,118 @@
1
+ """Configuration management for Paper Fluff Cutter."""
2
+
3
+ import json
4
+ import os
5
+ from pathlib import Path
6
+ from typing import Any
7
+
8
+ # Config file location
9
+ CONFIG_DIR = Path.home() / ".config" / "fluff-cutter"
10
+ CONFIG_FILE = CONFIG_DIR / "config.json"
11
+
12
+ # Default values
13
+ DEFAULT_PROVIDER = "anthropic"
14
+
15
+
16
+ def get_config_path() -> Path:
17
+ """Get the path to the config file."""
18
+ return CONFIG_FILE
19
+
20
+
21
+ def load_config_file() -> dict[str, Any]:
22
+ """
23
+ Load configuration from the config file.
24
+
25
+ Returns:
26
+ Dictionary with config values, or empty dict if file doesn't exist.
27
+ """
28
+ if not CONFIG_FILE.exists():
29
+ return {}
30
+
31
+ try:
32
+ with open(CONFIG_FILE, "r", encoding="utf-8") as f:
33
+ return json.load(f)
34
+ except (json.JSONDecodeError, OSError):
35
+ return {}
36
+
37
+
38
+ def save_config(config: dict[str, Any]) -> None:
39
+ """
40
+ Save configuration to the config file.
41
+
42
+ Args:
43
+ config: Dictionary with config values to save.
44
+ """
45
+ CONFIG_DIR.mkdir(parents=True, exist_ok=True)
46
+
47
+ with open(CONFIG_FILE, "w", encoding="utf-8") as f:
48
+ json.dump(config, f, indent=2)
49
+
50
+
51
+ def load_config() -> dict[str, Any]:
52
+ """
53
+ Load configuration with precedence:
54
+ 1. Environment variables (highest)
55
+ 2. Config file (lowest)
56
+
57
+ Returns:
58
+ Merged configuration dictionary.
59
+ """
60
+ # Start with config file values
61
+ config = load_config_file()
62
+
63
+ # Override with environment variables
64
+ if os.environ.get("OPENAI_API_KEY"):
65
+ config["openai_api_key"] = os.environ["OPENAI_API_KEY"]
66
+
67
+ if os.environ.get("ANTHROPIC_API_KEY"):
68
+ config["anthropic_api_key"] = os.environ["ANTHROPIC_API_KEY"]
69
+
70
+ if os.environ.get("FLUFF_CUTTER_PROVIDER"):
71
+ config["default_provider"] = os.environ["FLUFF_CUTTER_PROVIDER"]
72
+
73
+ return config
74
+
75
+
76
+ def get_api_key(provider: str, config: dict[str, Any] | None = None) -> str | None:
77
+ """
78
+ Get the API key for a specific provider.
79
+
80
+ Args:
81
+ provider: The provider name ('openai' or 'anthropic').
82
+ config: Optional pre-loaded config. If None, loads config.
83
+
84
+ Returns:
85
+ The API key or None if not configured.
86
+ """
87
+ if config is None:
88
+ config = load_config()
89
+
90
+ key_name = f"{provider}_api_key"
91
+ return config.get(key_name)
92
+
93
+
94
+ def get_default_provider(config: dict[str, Any] | None = None) -> str:
95
+ """
96
+ Get the default provider.
97
+
98
+ Args:
99
+ config: Optional pre-loaded config. If None, loads config.
100
+
101
+ Returns:
102
+ The default provider name.
103
+ """
104
+ if config is None:
105
+ config = load_config()
106
+
107
+ return config.get("default_provider", DEFAULT_PROVIDER)
108
+
109
+
110
+ def is_configured() -> bool:
111
+ """
112
+ Check if at least one provider is configured.
113
+
114
+ Returns:
115
+ True if at least one API key is available.
116
+ """
117
+ config = load_config()
118
+ return bool(config.get("openai_api_key") or config.get("anthropic_api_key"))
fluff_cutter/output.py ADDED
@@ -0,0 +1,56 @@
1
+ """Output formatting for paper analysis."""
2
+
3
+ from datetime import datetime
4
+
5
+
6
+ def format_analysis(title: str, analysis: str, model_info: str) -> str:
7
+ """
8
+ Format the analysis as clean markdown.
9
+
10
+ Args:
11
+ title: The paper title.
12
+ analysis: The raw analysis from the LLM.
13
+ model_info: Information about the model used.
14
+
15
+ Returns:
16
+ Formatted markdown string.
17
+ """
18
+ date_str = datetime.now().strftime("%Y-%m-%d")
19
+
20
+ output = f"""# Paper Analysis: {title}
21
+
22
+ {analysis}
23
+
24
+ ---
25
+ *Analyzed with {model_info} on {date_str}*
26
+ """
27
+ return output
28
+
29
+
30
+ def print_analysis(title: str, analysis: str, model_info: str) -> None:
31
+ """
32
+ Print the formatted analysis to stdout.
33
+
34
+ Args:
35
+ title: The paper title.
36
+ analysis: The raw analysis from the LLM.
37
+ model_info: Information about the model used.
38
+ """
39
+ print(format_analysis(title, analysis, model_info))
40
+
41
+
42
+ def save_analysis(
43
+ title: str, analysis: str, model_info: str, output_path: str
44
+ ) -> None:
45
+ """
46
+ Save the formatted analysis to a file.
47
+
48
+ Args:
49
+ title: The paper title.
50
+ analysis: The raw analysis from the LLM.
51
+ model_info: Information about the model used.
52
+ output_path: Path to save the output file.
53
+ """
54
+ content = format_analysis(title, analysis, model_info)
55
+ with open(output_path, "w", encoding="utf-8") as f:
56
+ f.write(content)
fluff_cutter/pdf.py ADDED
@@ -0,0 +1,45 @@
1
+ """PDF handling for LLM analysis."""
2
+
3
+ import base64
4
+ from pathlib import Path
5
+
6
+
7
+ def read_pdf_as_base64(pdf_path: str | Path) -> str:
8
+ """
9
+ Read a PDF file and encode it as base64.
10
+
11
+ Args:
12
+ pdf_path: Path to the PDF file.
13
+
14
+ Returns:
15
+ Base64-encoded PDF data.
16
+
17
+ Raises:
18
+ FileNotFoundError: If the PDF file doesn't exist.
19
+ ValueError: If the file is not a PDF.
20
+ """
21
+ pdf_path = Path(pdf_path)
22
+
23
+ if not pdf_path.exists():
24
+ raise FileNotFoundError(f"PDF file not found: {pdf_path}")
25
+
26
+ if pdf_path.suffix.lower() != ".pdf":
27
+ raise ValueError(f"File is not a PDF: {pdf_path}")
28
+
29
+ with open(pdf_path, "rb") as f:
30
+ pdf_data = f.read()
31
+
32
+ return base64.standard_b64encode(pdf_data).decode("utf-8")
33
+
34
+
35
+ def get_pdf_filename(pdf_path: str | Path) -> str:
36
+ """
37
+ Get the filename from a PDF path.
38
+
39
+ Args:
40
+ pdf_path: Path to the PDF file.
41
+
42
+ Returns:
43
+ The filename.
44
+ """
45
+ return Path(pdf_path).name
@@ -0,0 +1,7 @@
1
+ """LLM provider implementations."""
2
+
3
+ from .base import BaseLLMProvider
4
+ from .openai import OpenAIProvider
5
+ from .anthropic import AnthropicProvider
6
+
7
+ __all__ = ["BaseLLMProvider", "OpenAIProvider", "AnthropicProvider"]
@@ -0,0 +1,59 @@
1
+ """Anthropic provider implementation with native PDF support."""
2
+
3
+ import anthropic
4
+
5
+ from .base import BaseLLMProvider
6
+
7
+
8
+ class AnthropicProvider(BaseLLMProvider):
9
+ """Anthropic Claude provider with native PDF support."""
10
+
11
+ @property
12
+ def default_model(self) -> str:
13
+ return "claude-sonnet-4-20250514"
14
+
15
+ @property
16
+ def provider_name(self) -> str:
17
+ return "Anthropic"
18
+
19
+ def analyze_paper(self, pdf_base64: str, filename: str, prompt: str) -> str:
20
+ """
21
+ Analyze a paper using Anthropic's native PDF support.
22
+
23
+ Args:
24
+ pdf_base64: Base64-encoded PDF data.
25
+ filename: Original filename of the PDF.
26
+ prompt: The analysis prompt to send to the model.
27
+
28
+ Returns:
29
+ The model's analysis as a string.
30
+ """
31
+ client = anthropic.Anthropic(api_key=self.api_key)
32
+
33
+ response = client.messages.create(
34
+ model=self.model,
35
+ max_tokens=4096,
36
+ messages=[
37
+ {
38
+ "role": "user",
39
+ "content": [
40
+ {
41
+ "type": "document",
42
+ "source": {
43
+ "type": "base64",
44
+ "media_type": "application/pdf",
45
+ "data": pdf_base64,
46
+ },
47
+ },
48
+ {
49
+ "type": "text",
50
+ "text": prompt,
51
+ },
52
+ ],
53
+ }
54
+ ],
55
+ )
56
+
57
+ # Extract text from the response
58
+ text_blocks = [block.text for block in response.content if block.type == "text"]
59
+ return "\n".join(text_blocks)
@@ -0,0 +1,49 @@
1
+ """Base class for LLM providers."""
2
+
3
+ from abc import ABC, abstractmethod
4
+
5
+
6
+ class BaseLLMProvider(ABC):
7
+ """Abstract base class for LLM providers with PDF support."""
8
+
9
+ def __init__(self, api_key: str, model: str | None = None):
10
+ """
11
+ Initialize the provider.
12
+
13
+ Args:
14
+ api_key: API key for the provider.
15
+ model: Optional model override. If not provided, uses default.
16
+ """
17
+ self.api_key = api_key
18
+ self.model = model or self.default_model
19
+
20
+ @property
21
+ @abstractmethod
22
+ def default_model(self) -> str:
23
+ """The default model to use for this provider."""
24
+ pass
25
+
26
+ @property
27
+ @abstractmethod
28
+ def provider_name(self) -> str:
29
+ """Human-readable name of the provider."""
30
+ pass
31
+
32
+ @abstractmethod
33
+ def analyze_paper(self, pdf_base64: str, filename: str, prompt: str) -> str:
34
+ """
35
+ Analyze a paper PDF using native PDF support.
36
+
37
+ Args:
38
+ pdf_base64: Base64-encoded PDF data.
39
+ filename: Original filename of the PDF.
40
+ prompt: The analysis prompt to send to the model.
41
+
42
+ Returns:
43
+ The model's analysis as a string.
44
+ """
45
+ pass
46
+
47
+ def get_model_info(self) -> str:
48
+ """Get a string describing the provider and model being used."""
49
+ return f"{self.provider_name} ({self.model})"
@@ -0,0 +1,54 @@
1
+ """OpenAI provider implementation with native PDF support."""
2
+
3
+ from openai import OpenAI
4
+
5
+ from .base import BaseLLMProvider
6
+
7
+
8
+ class OpenAIProvider(BaseLLMProvider):
9
+ """OpenAI GPT-4o provider with native PDF support."""
10
+
11
+ @property
12
+ def default_model(self) -> str:
13
+ return "gpt-4o"
14
+
15
+ @property
16
+ def provider_name(self) -> str:
17
+ return "OpenAI"
18
+
19
+ def analyze_paper(self, pdf_base64: str, filename: str, prompt: str) -> str:
20
+ """
21
+ Analyze a paper using OpenAI's native PDF support.
22
+
23
+ Args:
24
+ pdf_base64: Base64-encoded PDF data.
25
+ filename: Original filename of the PDF.
26
+ prompt: The analysis prompt to send to the model.
27
+
28
+ Returns:
29
+ The model's analysis as a string.
30
+ """
31
+ client = OpenAI(api_key=self.api_key)
32
+
33
+ # Use the Responses API with native PDF support
34
+ response = client.responses.create(
35
+ model=self.model,
36
+ input=[
37
+ {
38
+ "role": "user",
39
+ "content": [
40
+ {
41
+ "type": "input_file",
42
+ "filename": filename,
43
+ "file_data": f"data:application/pdf;base64,{pdf_base64}",
44
+ },
45
+ {
46
+ "type": "input_text",
47
+ "text": prompt,
48
+ },
49
+ ],
50
+ }
51
+ ],
52
+ )
53
+
54
+ return response.output_text or ""
@@ -0,0 +1,104 @@
1
+ Metadata-Version: 2.4
2
+ Name: fluff-cutter
3
+ Version: 0.1.0
4
+ Summary: A CLI tool to analyze academic papers and extract their core value
5
+ Requires-Python: >=3.10
6
+ Description-Content-Type: text/markdown
7
+ License-File: LICENSE
8
+ Requires-Dist: click>=8.0
9
+ Requires-Dist: openai>=1.0
10
+ Requires-Dist: anthropic>=0.18
11
+ Requires-Dist: python-dotenv>=1.0
12
+ Provides-Extra: dev
13
+ Requires-Dist: pytest>=7.0; extra == "dev"
14
+ Requires-Dist: ruff>=0.1.0; extra == "dev"
15
+ Dynamic: license-file
16
+
17
+ # Paper Fluff Cutter
18
+
19
+ A CLI tool that cuts through academic paper fluff to extract what actually matters.
20
+
21
+ Most research has close to zero value. This tool uses multimodal LLMs to analyze papers and answer the three questions every paper should be able to answer:
22
+
23
+ 1. **Why should I care?** - What problem does this address and why does it matter?
24
+ 2. **What's the actual innovation?** - What's the core idea in plain terms?
25
+ 3. **Is the evidence convincing?** - Do the experiments actually support the claims?
26
+
27
+ ## Installation
28
+
29
+ ### Prerequisites
30
+
31
+ - Python 3.10+
32
+
33
+ ### Install the tool
34
+
35
+ ```bash
36
+ pip install -e .
37
+ ```
38
+
39
+ ## Configuration
40
+
41
+ ### Option 1: Interactive setup (recommended)
42
+
43
+ ```bash
44
+ fluff-cutter init
45
+ ```
46
+
47
+ This will prompt you for your API keys and save them to `~/.config/fluff-cutter/config.json`.
48
+
49
+ ### Option 2: Environment variables
50
+
51
+ ```bash
52
+ export OPENAI_API_KEY=sk-your-key-here
53
+ export ANTHROPIC_API_KEY=sk-ant-your-key-here
54
+ export FLUFF_CUTTER_PROVIDER=anthropic # optional, default provider
55
+ ```
56
+
57
+ ## Usage
58
+
59
+ ### Basic usage
60
+
61
+ ```bash
62
+ fluff-cutter analyze paper.pdf
63
+ ```
64
+
65
+ ### Specify provider
66
+
67
+ ```bash
68
+ fluff-cutter analyze paper.pdf --provider openai
69
+ fluff-cutter analyze paper.pdf --provider anthropic
70
+ ```
71
+
72
+ ### Specify model
73
+
74
+ ```bash
75
+ fluff-cutter analyze paper.pdf --provider openai --model gpt-4o
76
+ fluff-cutter analyze paper.pdf --provider anthropic --model claude-sonnet-4-20250514
77
+ ```
78
+
79
+ ### Save output to file
80
+
81
+ ```bash
82
+ fluff-cutter analyze paper.pdf --output analysis.md
83
+ ```
84
+
85
+ ## Supported Providers
86
+
87
+ | Provider | Default Model | Environment Variable |
88
+ |----------|---------------|---------------------|
89
+ | OpenAI | gpt-4o | `OPENAI_API_KEY` |
90
+ | Anthropic | claude-sonnet-4-20250514 | `ANTHROPIC_API_KEY` |
91
+
92
+ Both providers now support native PDF input - no external dependencies like poppler needed.
93
+
94
+ ## Configuration Precedence
95
+
96
+ Configuration is loaded with the following precedence (highest to lowest):
97
+
98
+ 1. Command-line arguments (`--provider`, `--model`)
99
+ 2. Environment variables
100
+ 3. Config file (`~/.config/fluff-cutter/config.json`)
101
+
102
+ ## License
103
+
104
+ MIT
@@ -0,0 +1,16 @@
1
+ fluff_cutter/__init__.py,sha256=m54r3DTMukXFMTUNb6dqISi4-rYP-3cAwNlxO85B3WY,95
2
+ fluff_cutter/analyzer.py,sha256=Er_ZFva5x12evkg4jiOLhYllGFjcTlch7OSHz0b-SAQ,2006
3
+ fluff_cutter/cli.py,sha256=Txg4sAvbmzn1-HhViMeXWfOKJnqyuBItEfbHK--8Ytw,5354
4
+ fluff_cutter/config.py,sha256=Ly5ZKz8JDWNxUPJx8m9jOEcWoxF24LX64Wr0nXNkfU4,2918
5
+ fluff_cutter/output.py,sha256=vQblPNEdPs8jaeZT0RDw376FGPD9cqH39KMZlP5iDlg,1409
6
+ fluff_cutter/pdf.py,sha256=9HYEMRbYwaJeZPFPvvUEkNbSxLLt0uwz1izBK-UZdkg,992
7
+ fluff_cutter/providers/__init__.py,sha256=GHUJmPYf-f46LdiBN9RtCY25bJMIdp7Bp1PtIwbK_cw,217
8
+ fluff_cutter/providers/anthropic.py,sha256=rtE8Io3QLu-svw8tD72WoTpfvY2_RAiZaHTomrUA4bA,1791
9
+ fluff_cutter/providers/base.py,sha256=6KkfAgh1jSy3IeXiI6mp0oIX4aQMs9188JWWf9g6WXk,1385
10
+ fluff_cutter/providers/openai.py,sha256=ObYGNBRMkvnlmK6jbCTv8nN6ReVrCTmTlV1oneq-Ii4,1559
11
+ fluff_cutter-0.1.0.dist-info/licenses/LICENSE,sha256=Web8HWLb3-BT76oD6gp0yLeRc-6trinrOCE03-NCsWM,1070
12
+ fluff_cutter-0.1.0.dist-info/METADATA,sha256=jaEyBW2AKz9DMDV5h-IjnGClrHrosff0-PT8H1-v2fk,2528
13
+ fluff_cutter-0.1.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
14
+ fluff_cutter-0.1.0.dist-info/entry_points.txt,sha256=XbYJJc_MN1PgHB47-NWk76BnQ5l0Ba0B5jb9vwAEU5Y,55
15
+ fluff_cutter-0.1.0.dist-info/top_level.txt,sha256=Hb1MmR3LbLIc9PXSm60Jn4a-fht1wwvCT5kxhnyB6VI,13
16
+ fluff_cutter-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.10.2)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ fluff-cutter = fluff_cutter.cli:main
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Weijian Zhang
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ fluff_cutter