Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,4 @@ __pycache__/
.mypy_cache/
.coverage
htmlcov/
build/
10 changes: 6 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ nano ~/.pretty-release-notes/config.toml

### Configuration Format

The configuration file uses TOML format with sections for GitHub credentials, OpenAI settings, database caching, and filters. See [`config.toml.example`](config.toml.example) for the complete structure and all available options.
The configuration file uses TOML format with sections for GitHub credentials, LLM settings, database caching, and filters. The canonical section name is `[llm]`, while the legacy `[openai]` section is still accepted for backward compatibility. Plain model names still default to OpenAI, and `provider:model` targets other `any-llm` providers. See [`config.toml.example`](config.toml.example) for the complete structure and all available options.

You can override the config location using the `--config-path` flag.

Expand Down Expand Up @@ -130,7 +130,7 @@ from pretty_release_notes import ReleaseNotesBuilder
client = (
ReleaseNotesBuilder()
.with_github_token("ghp_your_token")
.with_openai("sk_your_key", model="gpt-4")
.with_llm("sk_your_key", model="gpt-4") # or model="anthropic:claude-sonnet-4-5"
.with_database("sqlite", enabled=True)
.with_filters(
exclude_types={"chore", "ci", "refactor"},
Expand Down Expand Up @@ -196,14 +196,16 @@ curl -X POST http://localhost:8000/generate \
"tag": "v15.38.4",
"previous_tag_name": "v15.38.0",
"github_token": "ghp_your_token_here",
"openai_key": "sk-your_key_here",
"openai_model": "gpt-4",
"llm_key": "sk-your_key_here",
"llm_model": "gpt-4",
"exclude_types": ["chore", "ci", "refactor"],
"exclude_labels": ["skip-release-notes"],
"exclude_authors": ["dependabot[bot]"]
}'
```

Legacy `openai_key` and `openai_model` request fields are still accepted for backward compatibility.

Response:
```json
{
Expand Down
12 changes: 7 additions & 5 deletions config.toml.example
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,15 @@ token = ""
# If not set, must be specified via --owner flag
owner = "frappe"

[openai]
# OpenAI API key (required)
# Get one at: https://platform.openai.com/api-keys
[llm]
# LLM provider API key (required)
# For plain OpenAI models, use an OpenAI key. For other providers, use that provider's key.
# Legacy [openai] is still accepted for backward compatibility.
api_key = ""

# OpenAI model to use (default: "gpt-4.1")
model = "o1"
# Model to use. Plain names default to OpenAI.
# For other providers, use "provider:model" (for example: "anthropic:claude-sonnet-4-5")
model = "gpt-4.1"

# Maximum patch size before fallback to commit message (default: 10000)
max_patch_size = 10000
Expand Down
14 changes: 6 additions & 8 deletions examples/library_usage.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def basic_usage():
client = (
ReleaseNotesBuilder()
.with_github_token("ghp_xxxxx") # Replace with your token
.with_openai("sk-xxxxx") # Replace with your API key
.with_llm("sk-xxxxx") # Replace with your API key
.build()
)

Expand All @@ -37,7 +37,7 @@ def advanced_usage():
client = (
ReleaseNotesBuilder()
.with_github_token("ghp_xxxxx") # Replace with your token
.with_openai("sk-xxxxx", model="gpt-4", max_patch_size=15000)
.with_llm("sk-xxxxx", model="gpt-4", max_patch_size=15000)
.with_database("sqlite", enabled=True)
.with_filters(
exclude_types={"chore", "refactor", "ci", "style", "test"},
Expand Down Expand Up @@ -69,14 +69,14 @@ def direct_config_usage():
DatabaseConfig,
FilterConfig,
GitHubConfig,
OpenAIConfig,
LLMConfig,
ReleaseNotesClient,
ReleaseNotesConfig,
)

config = ReleaseNotesConfig(
github=GitHubConfig(token="ghp_xxxxx"), # Replace with your token
openai=OpenAIConfig(api_key="sk-xxxxx", model="gpt-4.1"), # Replace with your key
llm=LLMConfig(api_key="sk-xxxxx", model="gpt-4.1"), # Replace with your key
database=DatabaseConfig(type="sqlite", enabled=True),
filters=FilterConfig(
exclude_change_types={"chore", "refactor"},
Expand All @@ -101,18 +101,16 @@ def silent_usage():
client = (
ReleaseNotesBuilder()
.with_github_token("ghp_xxxxx") # Replace with your token
.with_openai("sk-xxxxx") # Replace with your API key
.with_llm("sk-xxxxx") # Replace with your API key
.build()
) # No progress reporter = NullProgressReporter used by default

notes = client.generate_release_notes(
return client.generate_release_notes(
owner="frappe",
repo="erpnext",
tag="v15.38.4",
)

return notes


if __name__ == "__main__":
print("Example 1: Basic Usage")
Expand Down
2 changes: 2 additions & 0 deletions pretty_release_notes/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
DatabaseConfig,
FilterConfig,
GitHubConfig,
LLMConfig,
OpenAIConfig,
ReleaseNotesConfig,
)
Expand All @@ -25,6 +26,7 @@
# Configuration
"ReleaseNotesConfig",
"GitHubConfig",
"LLMConfig",
"OpenAIConfig",
"DatabaseConfig",
"FilterConfig",
Expand Down
28 changes: 16 additions & 12 deletions pretty_release_notes/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
FilterConfig,
GitHubConfig,
GroupingConfig,
OpenAIConfig,
LLMConfig,
ReleaseNotesConfig,
)
from .core.interfaces import NullProgressReporter, ProgressReporter
Expand Down Expand Up @@ -72,8 +72,8 @@ class ReleaseNotesBuilder:

def __init__(self):
self._github_token = None
self._openai_key = None
self._openai_model = "gpt-4.1"
self._llm_key = None
self._llm_model = "gpt-4.1"
self._max_patch_size = 10000
self._db_type = "sqlite"
self._db_name = "stored_lines"
Expand All @@ -93,13 +93,17 @@ def with_github_token(self, token: str) -> "ReleaseNotesBuilder":
self._github_token = token
return self

def with_openai(self, api_key: str, model: str = "gpt-4.1", max_patch_size: int = 10000) -> "ReleaseNotesBuilder":
"""Set OpenAI configuration."""
self._openai_key = api_key
self._openai_model = model
def with_llm(self, api_key: str, model: str = "gpt-4.1", max_patch_size: int = 10000) -> "ReleaseNotesBuilder":
"""Set LLM configuration."""
self._llm_key = api_key
self._llm_model = model
self._max_patch_size = max_patch_size
return self

def with_openai(self, api_key: str, model: str = "gpt-4.1", max_patch_size: int = 10000) -> "ReleaseNotesBuilder":
"""Backward-compatible alias for with_llm()."""
return self.with_llm(api_key=api_key, model=model, max_patch_size=max_patch_size)

def with_database(
self, db_type: str = "sqlite", db_name: str = "stored_lines", enabled: bool = True
) -> "ReleaseNotesBuilder":
Expand Down Expand Up @@ -179,14 +183,14 @@ def build(self) -> ReleaseNotesClient:
"""
if not self._github_token:
raise ValueError("GitHub token is required")
if not self._openai_key:
raise ValueError("OpenAI API key is required")
if not self._llm_key:
raise ValueError("LLM API key is required")

config = ReleaseNotesConfig(
github=GitHubConfig(token=self._github_token),
openai=OpenAIConfig(
api_key=self._openai_key,
model=self._openai_model,
llm=LLMConfig(
api_key=self._llm_key,
model=self._llm_model,
max_patch_size=self._max_patch_size,
),
database=DatabaseConfig(
Expand Down
46 changes: 42 additions & 4 deletions pretty_release_notes/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,17 @@ def __post_init__(self):


@dataclass
class OpenAIConfig:
class LLMConfig:
api_key: str
model: str = "gpt-4.1"
max_patch_size: int = 10000

def __post_init__(self):
if not self.api_key:
raise ValueError("OpenAI API key is required")
raise ValueError("LLM API key is required")


OpenAIConfig = LLMConfig


@dataclass
Expand Down Expand Up @@ -87,12 +90,47 @@ def _get_default_prompt_path() -> Path:
return package_dir / "prompt.txt"


@dataclass
@dataclass(init=False)
class ReleaseNotesConfig:
github: GitHubConfig
openai: OpenAIConfig
llm: LLMConfig
database: DatabaseConfig = field(default_factory=DatabaseConfig)
filters: FilterConfig = field(default_factory=FilterConfig)
grouping: GroupingConfig = field(default_factory=GroupingConfig)
prompt_path: Path = field(default_factory=_get_default_prompt_path)
force_use_commits: bool = False

def __init__(
self,
github: GitHubConfig,
llm: LLMConfig | None = None,
openai: LLMConfig | None = None,
database: DatabaseConfig | None = None,
filters: FilterConfig | None = None,
grouping: GroupingConfig | None = None,
prompt_path: Path | None = None,
force_use_commits: bool = False,
):
if llm is not None and openai is not None and llm != openai:
raise ValueError("Pass either llm or openai configuration, not both")

resolved_llm = llm or openai
if resolved_llm is None:
raise ValueError("LLM configuration is required")

self.github = github
self.llm = resolved_llm
self.database = database if database is not None else DatabaseConfig()
self.filters = filters if filters is not None else FilterConfig()
self.grouping = grouping if grouping is not None else GroupingConfig()
self.prompt_path = prompt_path if prompt_path is not None else _get_default_prompt_path()
self.force_use_commits = force_use_commits

@property
def openai(self) -> LLMConfig:
"""Backward-compatible alias for llm configuration."""
return self.llm

@openai.setter
def openai(self, value: LLMConfig) -> None:
self.llm = value
44 changes: 24 additions & 20 deletions pretty_release_notes/core/config_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
FilterConfig,
GitHubConfig,
GroupingConfig,
OpenAIConfig,
LLMConfig,
ReleaseNotesConfig,
_get_default_prompt_path,
)
Expand All @@ -30,14 +30,18 @@ def __init__(self, config_dict: dict[str, Any]):
self.config_dict = config_dict

def load(self) -> ReleaseNotesConfig:
llm_api_key = self.config_dict.get("llm_api_key", self.config_dict.get("openai_api_key"))
if llm_api_key is None:
raise KeyError("llm_api_key")

return ReleaseNotesConfig(
github=GitHubConfig(
token=self.config_dict["github_token"],
owner=self.config_dict.get("github_owner"),
),
openai=OpenAIConfig(
api_key=self.config_dict["openai_api_key"],
model=self.config_dict.get("openai_model", "gpt-4.1"),
llm=LLMConfig(
api_key=llm_api_key,
model=self.config_dict.get("llm_model", self.config_dict.get("openai_model", "gpt-4.1")),
max_patch_size=self.config_dict.get("max_patch_size", 10000),
),
database=DatabaseConfig(
Expand Down Expand Up @@ -79,21 +83,21 @@ def __init__(self, env_path: str = ".env"):
def load(self) -> ReleaseNotesConfig:
config = dotenv_values(self.env_path)

# Required fields - will raise KeyError if missing
# Required fields
github_token = config["GH_TOKEN"]
openai_key = config["OPENAI_API_KEY"]
llm_key = config.get("LLM_API_KEY") or config.get("OPENAI_API_KEY")

# Ensure github_token and openai_key are not None
# Ensure github_token and llm_key are not None
if github_token is None:
raise ValueError("GH_TOKEN is required in .env file")
if openai_key is None:
raise ValueError("OPENAI_API_KEY is required in .env file")
if llm_key is None:
raise ValueError("LLM_API_KEY is required in .env file")

return ReleaseNotesConfig(
github=GitHubConfig(token=github_token, owner=config.get("DEFAULT_OWNER")),
openai=OpenAIConfig(
api_key=openai_key,
model=config.get("OPENAI_MODEL") or "gpt-4.1",
llm=LLMConfig(
api_key=llm_key,
model=config.get("LLM_MODEL") or config.get("OPENAI_MODEL") or "gpt-4.1",
max_patch_size=int(config.get("MAX_PATCH_SIZE") or "10000"),
),
database=DatabaseConfig(
Expand Down Expand Up @@ -155,29 +159,29 @@ def load(self) -> ReleaseNotesConfig:

# Extract nested sections with defaults
github_config = config.get("github", {})
openai_config = config.get("openai", {})
llm_config = {**config.get("openai", {}), **config.get("llm", {})}
database_config = config.get("database", {})
filters_config = config.get("filters", {})
grouping_config = config.get("grouping", {})

# Required fields
github_token = github_config.get("token")
openai_key = openai_config.get("api_key")
llm_key = llm_config.get("api_key")

if not github_token:
raise ValueError("github.token is required in config file")
if not openai_key:
raise ValueError("openai.api_key is required in config file")
if not llm_key:
raise ValueError("llm.api_key is required in config file")

return ReleaseNotesConfig(
github=GitHubConfig(
token=github_token,
owner=github_config.get("owner"),
),
openai=OpenAIConfig(
api_key=openai_key,
model=openai_config.get("model", "gpt-4.1"),
max_patch_size=openai_config.get("max_patch_size", 10000),
llm=LLMConfig(
api_key=llm_key,
model=llm_config.get("model", "gpt-4.1"),
max_patch_size=llm_config.get("max_patch_size", 10000),
),
database=DatabaseConfig(
type=database_config.get("type", "sqlite"),
Expand Down
Loading