Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feature: Add CLI option to run in offline mode. #52

Merged
merged 1 commit into from
Sep 24, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 3 additions & 7 deletions .github/release-drafter.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,6 @@

name-template: 'v$RESOLVED_VERSION'
tag-template: 'v$RESOLVED_VERSION'
template: |
## What Changed 👀

$CHANGES
**Full Changelog**: https://github.com/$OWNER/$REPOSITORY/compare/$PREVIOUS_TAG...v$RESOLVED_VERSION
categories:
- title: 🚀 Features
labels:
Expand Down Expand Up @@ -54,6 +49,7 @@ version-resolver:
labels:
- patch
default: patch
template: |
## Changes

exclude-labels:
- skip-changelog
$CHANGES
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,12 @@ All notable changes to the *readme-ai* project will be documented in this file.

## [v0.1.1] - *2023-09-24*

### 🚀 Features

- Add CLI option to run *readme-ai* offline, generating the same README output excluding the LLM generated text.
- This option is useful for users who want to generate READMEs without an API key.
- The option can be used by passing the `-offline-mode` flag to the CLI.

### 🛠 Changes

- Refactor remaining dataclasses in [conf.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/conf.py) to Pydantic models.
Expand Down
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -403,8 +403,9 @@ To generate a *README.md* file, use the `readmeai` command in your terminal, alo

| Short Flag | Long Flag | Description | Status |
|------------|----------------|---------------------------------------------------|--------------|
| `-k` | `--api-key` | Your OpenAI API secret key. | Required |
| `-k` | `--api-key` | Your OpenAI API secret key. | Optional |
| `-e` | `--engine` | OpenAI GPT language model engine (gpt-3.5-turbo) | Optional |
| `-f` | `--offline-mode`| Run offline without calling the OpenAI API. | Optional |
| `-o` | `--output` | The output path for your README.md file. | Optional |
| `-r` | `--repository` | The URL or path to your code repository. | Required |
| `-t` | `--temperature`| The temperature (randomness) of the model | Optional |
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry]
name = "readmeai"
version = "0.3.070"
version = "0.3.071"
description = "🚀 Generate beautiful README files automatically, powered by GPT-4 🪐"
authors = ["Eli <0x.eli.64s@gmail.com>"]
license = "MIT"
Expand Down
42 changes: 37 additions & 5 deletions readmeai/builder.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
"""Builds the README Markdown file for your codebase."""

import os
import subprocess
import urllib.parse
from pathlib import Path
from typing import List, Tuple

Expand All @@ -21,7 +23,9 @@ def build_markdown_file(
readme_sections = create_markdown_sections(config, helper, packages, summaries)
readme_file = "\n".join(readme_sections)
readme_path = Path(config.paths.readme)

factory.FileHandler().write(readme_path, readme_file)

logger.info(f"README file generated at: {readme_path}")


Expand All @@ -31,10 +35,11 @@ def create_markdown_sections(
packages: list,
summaries: tuple,
) -> List[str]:
"""Creates each section of the README Markdown file."""
"""Constructs each section of the README file."""
name = config.git.name
repository = config.git.repository
user_repo = utils.get_user_repository_name(repository)

badges_path = resource_filename(__package__, f"{config.paths.badges}")
badges_dict = factory.FileHandler().read(badges_path)

Expand All @@ -46,19 +51,21 @@ def create_markdown_sections(
if "invalid" in user_repo.lower()
else markdown_badges
)
markdown_tables = create_tables(
create_markdown_tables(summaries), config.md.dropdown, user_repo
)

markdown_setup_guide = create_setup_guide(config, helper, summaries)

if not config.api.offline_mode:
tables = create_markdown_tables(summaries)
config.md.tables = create_tables(tables, config.md.dropdown, user_repo)

markdown_sections = [
config.md.header,
markdown_badges,
config.md.toc.format(name),
config.md.intro,
config.md.tree,
config.md.modules,
markdown_tables,
config.md.tables,
config.md.setup.format(name, repository, *markdown_setup_guide),
config.md.ending,
]
Expand Down Expand Up @@ -197,6 +204,31 @@ def create_table(data: List[Tuple[str, str]], user_repo_name: str) -> str:
return "\n".join(formatted_lines)


def generate_code_summary_table(base_url: str, directory: Path, level=0) -> str:
"""Creates a Markdown table structure for the given directory."""
markdown = ""
markdown += "| File | Summary |\n"
markdown += "| --- | --- |\n"

for item in sorted(directory.iterdir()):
if item.is_file():
relative_path = os.path.relpath(item, start=directory)
url_path = urllib.parse.quote(relative_path)
full_url = urllib.parse.urljoin(base_url, url_path)
markdown += f"| [{item.name}]({full_url}) | Summary of {item.name} |\n"

for item in sorted(directory.iterdir()):
if item.is_dir():
# If it is a sub-directory, create a collapsible section
markdown += f"\n<details closed><summary>{item.name}</summary>\n\n"
# Recursive call for sub-directory
markdown += generate_code_summary_table(base_url, item, level + 1)
# Close the collapsible section
markdown += "\n</details>\n\n"

return markdown


def create_directory_tree(repo_path: Path) -> str:
"""Creates a directory tree for the project."""
try:
Expand Down
2 changes: 2 additions & 0 deletions readmeai/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ class ApiConfig(BaseModel):
tokens: int
tokens_max: int
temperature: float
offline_mode: bool
api_key: Optional[SecretStr] = Field(default=None)

@validator("api_key", pre=True, always=True)
Expand Down Expand Up @@ -115,6 +116,7 @@ class MarkdownConfig(BaseModel):
intro: str
modules: str
setup: str
tables: str
toc: str
tree: str

Expand Down
6 changes: 4 additions & 2 deletions readmeai/conf/conf.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ rate_limit = 5
tokens = 650
tokens_max = 3800
temperature = 1.2
offline_mode = false

# Repository
[git]
Expand Down Expand Up @@ -55,9 +56,10 @@ Craft 3-4 sentences that encapsulate the core functionalities of the project, it
"""
slogan = "Conceptualize a catchy and memorable slogan for the GitHub project: {}. Limit your response to 80 characters."

# Markdown Templates
# Markdown Template Code
[md]
default = "`ℹ️ INSERT-DESCRIPTION`"
tables = ""
default = "▶︎ INSERT-DESCRIPTION"
dropdown = """<details closed><summary>{}</summary>\n\n{}\n\n</details>\n"""
header = """<div align="center">
<h1 align="center">
Expand Down
53 changes: 33 additions & 20 deletions readmeai/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@
config_helper = conf.load_config_helper(config_model)


async def main(repository: str) -> None:
async def main(repository: str, offline: bool) -> None:
"""Main entrypoint for the readme-ai application."""
config.git = conf.GitConfig(repository=repository)
llm = model.OpenAIHandler(config)
await generate_readme(llm)
await generate_readme(llm, offline)


async def generate_readme(llm: model.OpenAIHandler) -> None:
async def generate_readme(llm: model.OpenAIHandler, offline: bool) -> None:
"""Orchestrates the README file generation process."""
name = config.git.name
repository = config.git.repository
Expand All @@ -41,28 +41,32 @@ async def generate_readme(llm: model.OpenAIHandler) -> None:
logger.info(f"Dependencies: {dependencies}")
logger.info(f"Total files: {len(file_text)}")

if offline:
logger.warning("Skipping OpenAI API calls as offline mode is enabled.")
config.md.tables = builder.generate_code_summary_table(repository, temp_dir)
code_summary = config.md.tables
slogan, overview, features = (
config.md.default,
config.md.default,
config.md.default,
)
else:
code_summary = await generate_code_to_text(llm, file_text)
slogan, overview, features = await generate_markdown_text(
llm, repository, code_summary
)
await llm.close()

config.md.header = config.md.header.format(name, slogan)
config.md.intro = config.md.intro.format(overview, features)
builder.build_markdown_file(config, config_helper, dependencies, code_summary)

except Exception as excinfo:
logger.error(f"Exception: {excinfo}")
raise excinfo

finally:
shutil.rmtree(temp_dir)

try:
code_summary, slogan, overview, features = {}, "", "", ""
code_summary = await generate_code_to_text(llm, file_text)
slogan, overview, features = await generate_markdown_text(
llm, repository, code_summary
)
except Exception as excinfo:
logger.error(f"Exception: {excinfo}")
finally:
await llm.close()

config.md.header = config.md.header.format(name, slogan)
config.md.intro = config.md.intro.format(overview, features)
builder.build_markdown_file(config, config_helper, dependencies, code_summary)


async def generate_code_to_text(
llm: model.OpenAIHandler, file_text: str
Expand Down Expand Up @@ -99,6 +103,13 @@ async def generate_markdown_text(
default="gpt-3.5-turbo",
help="OpenAI language model engine to use.",
)
@click.option(
"-f",
"--offline-mode",
is_flag=True,
default=False,
help="Run the tool in offline mode without calling the OpenAI API.",
)
@click.option(
"-o",
"--output",
Expand Down Expand Up @@ -130,6 +141,7 @@ async def generate_markdown_text(
def cli(
api_key: str,
engine: Optional[str],
offline_mode: bool,
output: Optional[str],
repository: str,
temperature: Optional[float],
Expand All @@ -141,13 +153,14 @@ def cli(
config.api.api_key = api_key
config.api.engine = engine
config.api.temperature = temperature
config.api.offline_mode = offline_mode

logger.info("README-AI is now executing.")
logger.info(f"Output file: {config.paths.readme}")
logger.info(f"OpenAI Engine: {config.api.engine}")
logger.info(f"OpenAI Temperature: {config.api.temperature}")

asyncio.run(main(repository))
asyncio.run(main(repository, offline_mode))

logger.info("README-AI execution complete.")

Expand Down
Loading