Skip to content

Commit

Permalink
fix(prepare): store temporary query files inside the workspace
Browse files Browse the repository at this point in the history
  • Loading branch information
aschey committed Mar 11, 2023
1 parent aff632a commit 4fd18dd
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 4 deletions.
7 changes: 6 additions & 1 deletion FAQ.md
Original file line number Diff line number Diff line change
Expand Up @@ -200,11 +200,16 @@ as an ergonomic choice it does _not_ block committing if `cargo sqlx prepare` fa

We're working on a way for the macros to save their data to the filesystem automatically which should be part of SQLx 0.7,
so your pre-commit hook would then just need to stage the changed files. This can be enabled by creating a directory
and setting the `SQLX_OFFLINE_DIR` environment variable to it before compiling, e.g.
and setting the `SQLX_OFFLINE_DIR` environment variable to it before compiling. Additionally, you may want to set the `SQLX_TMP`
variable in order to store temporary query files somewhere that isn't picked up by git.
These files should get cleaned up automatically, but they may not if there's a failure. For example:

```shell
$ mkdir .sqlx
$ export SQLX_OFFLINE_DIR="./.sqlx"`
$ # Optional, ensures temp files won't get picked up by git on failure
$ mkdir ./target/sqlx-tmp
$ export SQLX_TMP="./target/sqlx-tmp"
$ cargo check
```

Expand Down
9 changes: 9 additions & 0 deletions sqlx-cli/src/prepare.rs
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,14 @@ fn run_prepare_step(ctx: &PrepareCtx, cache_dir: &Path) -> anyhow::Result<()> {
"Failed to create query cache directory: {:?}",
cache_dir
))?;

// Create directory to hold temporary query files before they get persisted to SQLX_OFFLINE_DIR
let tmp_dir = ctx.metadata.target_directory().join("sqlx-tmp");
fs::create_dir_all(&tmp_dir).context(format!(
"Failed to create temporary query cache directory: {:?}",
cache_dir
))?;

// Only delete sqlx-*.json files to avoid accidentally deleting any user data.
for query_file in glob_query_files(cache_dir).context("Failed to read query cache files")? {
fs::remove_file(&query_file)
Expand All @@ -163,6 +171,7 @@ fn run_prepare_step(ctx: &PrepareCtx, cache_dir: &Path) -> anyhow::Result<()> {
check_command
.arg("check")
.args(&ctx.cargo_args)
.env("SQLX_TMP", tmp_dir)
.env("DATABASE_URL", &ctx.connect_opts.database_url)
.env("SQLX_OFFLINE", "false")
.env("SQLX_OFFLINE_DIR", cache_dir);
Expand Down
11 changes: 9 additions & 2 deletions sqlx-macros-core/src/query/data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -157,10 +157,17 @@ where
}
}

pub(super) fn save_in(&self, dir: impl AsRef<Path>) -> crate::Result<()> {
pub(super) fn save_in(
&self,
dir: impl AsRef<Path>,
tmp_dir: impl AsRef<Path>,
) -> crate::Result<()> {
// Output to a temporary file first, then move it atomically to avoid clobbering
// other invocations trying to write to the same path.
let mut tmp_file = tempfile::NamedTempFile::new()

// Use a temp directory inside the workspace to avoid potential issues
// with persisting the file across filesystems.
let mut tmp_file = tempfile::NamedTempFile::new_in(tmp_dir)
.map_err(|err| format!("failed to create query file: {:?}", err))?;
serde_json::to_writer_pretty(tmp_file.as_file_mut(), self)
.map_err(|err| format!("failed to serialize query data to file: {:?}", err))?;
Expand Down
6 changes: 5 additions & 1 deletion sqlx-macros-core/src/query/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -356,6 +356,10 @@ where
// Note: in a cargo workspace this path is relative to the root.
if let Ok(dir) = env("SQLX_OFFLINE_DIR") {
let path = PathBuf::from(&dir);
let tmp_dir = match env("SQLX_TMP") {
Ok(out_dir) => PathBuf::from(out_dir),
Err(_) => path.clone(),
};

match fs::metadata(&path) {
Err(e) => {
Expand All @@ -376,7 +380,7 @@ where
}

// .sqlx exists and is a directory, store data.
data.save_in(path)?;
data.save_in(path, tmp_dir)?;
}
}
}
Expand Down

0 comments on commit 4fd18dd

Please sign in to comment.