Skip to content
This repository has been archived by the owner on Aug 3, 2023. It is now read-only.

[Wrangler Ignore option 2] exclude/include logic in wrangler.toml for static asset uploads #760

Merged
merged 39 commits into from
Oct 7, 2019
Merged
Show file tree
Hide file tree
Changes from 38 commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
b8292d2
Add .wignore parsing logic. currently only does exact file matches; s…
gabbifish Sep 24, 2019
02a1f1a
Ignore node_modules and gatsby sourcemaps
gabbifish Sep 24, 2019
ebf22b6
s/if_ignored_prefix/contains_ignored_prefix
gabbifish Sep 24, 2019
115037e
intermediate progress on using walkdir
gabbifish Sep 25, 2019
c52e6e4
Great advice from ashley--filter at the dir walking level when possib…
gabbifish Sep 25, 2019
96c7c20
add regression test too
gabbifish Sep 25, 2019
07b65a6
remove gatsby sourcemap ignore logic
gabbifish Sep 25, 2019
4dfe170
fix clippy warnings
gabbifish Sep 25, 2019
8b3d523
Add message for ignoring files (#713)
EverlastingBugstopper Sep 25, 2019
3cd8bae
fix merge conflict stuff
gabbifish Sep 25, 2019
f331fff
intermediate progress
gabbifish Sep 25, 2019
56427f3
fix residual merge errors
gabbifish Sep 26, 2019
66c1bab
Add .wranglerignore functionality
gabbifish Sep 26, 2019
e5533d2
fix clippy error
gabbifish Sep 26, 2019
171c56d
remove unused walkdir dependency
gabbifish Sep 26, 2019
21490a3
Add .wignore parsing logic. currently only does exact file matches; s…
gabbifish Sep 24, 2019
8aa18c6
fix merge conflict
gabbifish Oct 2, 2019
b67151d
s/if_ignored_prefix/contains_ignored_prefix
gabbifish Sep 24, 2019
056c574
intermediate progress on using walkdir
gabbifish Sep 25, 2019
4a84cc3
Great advice from ashley--filter at the dir walking level when possib…
gabbifish Sep 25, 2019
192a9e4
add regression test too
gabbifish Sep 25, 2019
0b04354
remove gatsby sourcemap ignore logic
gabbifish Sep 25, 2019
76a7e6e
fix clippy warnings
gabbifish Sep 25, 2019
0588ad4
Add message for ignoring files (#713)
EverlastingBugstopper Sep 25, 2019
988019a
fix merge conflict stuff
gabbifish Sep 25, 2019
85beeca
intermediate progress
gabbifish Sep 25, 2019
c27bf2b
fix residual merge errors
gabbifish Sep 26, 2019
67dbcbe
Add .wranglerignore functionality
gabbifish Sep 26, 2019
3ec8ed1
fix clippy error
gabbifish Sep 26, 2019
5ce0950
remove unused walkdir dependency
gabbifish Sep 26, 2019
e07fa25
fix merge conflict
gabbifish Oct 2, 2019
f26724b
Now ignore node_modules by default; no need to have in .wranglerignore
gabbifish Oct 3, 2019
393e398
intermediate progress
gabbifish Oct 3, 2019
51aea07
use include/exclude semantics for wranglerignore
gabbifish Oct 3, 2019
8fe368c
Merge branch 'master' into gabbi/wrangler-exclude-include
gabbifish Oct 4, 2019
c181061
Merge branch 'master' into gabbi/wrangler-exclude-include
gabbifish Oct 4, 2019
005b746
Merge branch 'master' into gabbi/wrangler-exclude-include
gabbifish Oct 7, 2019
0a406e8
fix Cargo.lock
gabbifish Oct 7, 2019
3cfe244
Respond to avery comments pt 1
gabbifish Oct 7, 2019
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
300 changes: 138 additions & 162 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,12 @@ prettytable-rs = "0.8.0"
notify = "4.0.12"
ws = "0.9.0"
url = "2.1.0"
walkdir = "2.2.9"
percent-encoding = "1.0.1"
http = "0.1.1"
regex = "1"
sha2 = "0.8.0"
data-encoding = "2.1.2"
ignore = "0.4.10"

[dev-dependencies]
assert_cmd = "0.11.1"
Expand Down
2 changes: 1 addition & 1 deletion src/commands/generate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ pub fn generate(
} else {
None
};
Manifest::generate(name.to_string(), target_type, config_path, generated_site)?;
Manifest::generate(name.to_string(), target_type, &config_path, generated_site)?;

Ok(())
}
Expand Down
8 changes: 6 additions & 2 deletions src/commands/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,12 @@ pub fn init(
let target_type = target_type.unwrap_or_default();
let config_path = PathBuf::from("./");
let initialized_site = if site { Some(Site::default()) } else { None };
let manifest =
Manifest::generate(name.to_string(), target_type, config_path, initialized_site)?;
let manifest = Manifest::generate(
name.to_string(),
target_type,
&config_path,
initialized_site,
)?;
message::success("Succesfully created a `wrangler.toml`");

if site {
Expand Down
281 changes: 201 additions & 80 deletions src/commands/kv/bucket/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,22 +15,24 @@ use std::path::Path;

use cloudflare::endpoints::workerskv::write_bulk::KeyValuePair;

use walkdir::{DirEntry, WalkDir};
use ignore::overrides::{Override, OverrideBuilder};
use ignore::{Walk, WalkBuilder};

use crate::settings::target::Target;
use crate::terminal::message;

// Returns the hashed key and value pair for all files in a directory.
pub fn directory_keys_values(
target: &Target,
directory: &Path,
verbose: bool,
) -> Result<(Vec<KeyValuePair>, AssetManifest), failure::Error> {
let mut upload_vec: Vec<KeyValuePair> = Vec::new();
let mut asset_manifest: AssetManifest = AssetManifest::new();

for entry in WalkDir::new(directory)
.into_iter()
.filter_entry(|e| !is_ignored(e))
{
let dir_walker = get_dir_iterator(target, directory)?;
gabbifish marked this conversation as resolved.
Show resolved Hide resolved

for entry in dir_walker {
let entry = entry.unwrap();
let path = entry.path();
if path.is_file() {
Expand Down Expand Up @@ -60,9 +62,12 @@ pub fn directory_keys_values(
}

// Returns only the hashed keys for a directory's files.
fn directory_keys_only(directory: &Path) -> Result<Vec<String>, failure::Error> {
fn directory_keys_only(target: &Target, directory: &Path) -> Result<Vec<String>, failure::Error> {
let mut upload_vec: Vec<String> = Vec::new();
for entry in WalkDir::new(directory) {

let dir_walker = get_dir_iterator(target, directory)?;

for entry in dir_walker {
gabbifish marked this conversation as resolved.
Show resolved Hide resolved
let entry = entry.unwrap();
let path = entry.path();
if path.is_file() {
Expand All @@ -79,34 +84,36 @@ fn directory_keys_only(directory: &Path) -> Result<Vec<String>, failure::Error>
Ok(upload_vec)
}

// todo(gabbi): Replace all the logic below with a proper .wignore implementation
// when possible.
const KNOWN_UNNECESSARY_DIRS: &[&str] = &[
"node_modules", // npm vendoring
];
const KNOWN_UNNECESSARY_FILE_PREFIXES: &[&str] = &[
".", // hidden files
];
fn is_ignored(entry: &DirEntry) -> bool {
let stem = entry.file_name().to_str().unwrap();
// First, ensure that files with specified prefixes are ignored
for prefix in KNOWN_UNNECESSARY_FILE_PREFIXES {
if stem.starts_with(prefix) {
// Just need to check prefix
message::info(&format!("ignoring file {}", stem));
return true;
}
fn get_dir_iterator(target: &Target, directory: &Path) -> Result<Walk, failure::Error> {
let ignore = build_ignore(target, directory)?;
Ok(WalkBuilder::new(directory).overrides(ignore).build())
}

const REQUIRED_IGNORE_FILES: &[&str] = &["node_modules"];

fn build_ignore(target: &Target, directory: &Path) -> Result<Override, failure::Error> {
let mut required_override = OverrideBuilder::new(directory);
// First include files that must be ignored.
for ignored in REQUIRED_IGNORE_FILES {
required_override.add(&format!("!{}", ignored))?;
}

// Then, ensure files in ignored directories are also ignored.
for dir in KNOWN_UNNECESSARY_DIRS {
if stem == *dir {
// Need to check for full equality here
message::info(&format!("ignoring directory {}", dir));
return true;
if let Some(s) = &target.site {
gabbifish marked this conversation as resolved.
Show resolved Hide resolved
// If `include` present, use it and don't touch the `exclude` field
if let Some(included) = &s.include {
for i in included {
required_override.add(&i)?;
}
// If `exclude` only present, ignore anything in it.
} else if let Some(excluded) = &s.exclude {
for e in excluded {
required_override.add(&format!("!{}", e))?;
}
}
}
false

let exclude = required_override.build()?;
Ok(exclude)
}

// Courtesy of Steve Klabnik's PoC :) Used for bulk operations (write, delete)
Expand Down Expand Up @@ -194,82 +201,196 @@ mod tests {
use regex::Regex;
use std::fs;
use std::path::{Path, PathBuf};
use walkdir::WalkDir;

use crate::settings::target::{Site, Target, TargetType};

fn make_target(site: Site) -> Target {
Target {
account_id: "".to_string(),
kv_namespaces: None,
name: "".to_string(),
target_type: TargetType::JavaScript,
route: None,
routes: None,
webpack_config: None,
workers_dev: true,
zone_id: None,
site: Some(site),
}
}

#[test]
fn it_can_ignore_dir() {
let dir_name = "node_modules";
fn it_can_ignore_node_modules() {
let target = make_target(Site {
bucket: "fake".to_string(),
entry_point: None,
include: None,
exclude: None,
});

let test_dir = "test1";
// If test dir already exists, delete it.
if fs::metadata(dir_name).is_ok() {
fs::remove_dir_all(dir_name).unwrap();
if fs::metadata(test_dir).is_ok() {
fs::remove_dir_all(test_dir).unwrap();
}

fs::create_dir(dir_name).unwrap();
fs::File::create(format!("{}/ignore_me.txt", dir_name)).unwrap();
fs::create_dir_all(format!("{}/node_modules", test_dir)).unwrap();
let test_pathname = format!("{}/node_modules/ignore_me.txt", test_dir);
let test_path = PathBuf::from(&test_pathname);
fs::File::create(&test_path).unwrap();

let files: Vec<_> = get_dir_iterator(&target, Path::new(test_dir))
.unwrap()
.map(|entry| entry.unwrap().path().to_owned())
.collect();

assert!(!files.contains(&test_path));

let mut actual_count = 0;
for _ in WalkDir::new(dir_name)
.into_iter()
.filter_entry(|e| !is_ignored(e))
{
actual_count = actual_count + 1;
fs::remove_dir_all(test_dir).unwrap();
}

#[test]
fn it_can_ignore_hidden() {
let target = make_target(Site {
bucket: "fake".to_string(),
entry_point: None,
include: None,
exclude: None,
});

let test_dir = "test2";
// If test dir already exists, delete it.
if fs::metadata(test_dir).is_ok() {
fs::remove_dir_all(test_dir).unwrap();
}

fs::remove_dir_all(dir_name).unwrap();
fs::create_dir(test_dir).unwrap();
let test_pathname = format!("{}/.ignore_me.txt", test_dir);
let test_path = PathBuf::from(&test_pathname);
fs::File::create(&test_path).unwrap();

// No iterations should happen above because "node_modules" and its contents are ignored.
let expected_count = 0;
assert!(actual_count == expected_count);
let files: Vec<_> = get_dir_iterator(&target, Path::new(test_dir))
.unwrap()
.map(|entry| entry.unwrap().path().to_owned())
.collect();

assert!(!files.contains(&test_path));

fs::remove_dir_all(test_dir).unwrap();
}

#[test]
fn it_can_ignore_prefix() {
let file_name = ".dotfile";
// If test file already exists, delete it.
if fs::metadata(file_name).is_ok() {
fs::remove_file(file_name).unwrap();
fn it_can_allow_unfiltered_files() {
let target = make_target(Site {
bucket: "fake".to_string(),
entry_point: None,
include: None,
exclude: None,
});

let test_dir = "test3";
// If test dir already exists, delete it.
if fs::metadata(test_dir).is_ok() {
fs::remove_dir_all(test_dir).unwrap();
}

fs::File::create(file_name).unwrap();
fs::create_dir(test_dir).unwrap();
let test_pathname = format!("{}/notice_me.txt", test_dir);
let test_path = PathBuf::from(&test_pathname);
fs::File::create(&test_path).unwrap();

let files: Vec<_> = get_dir_iterator(&target, Path::new(test_dir))
.unwrap()
.map(|entry| entry.unwrap().path().to_owned())
.collect();

let mut actual_count = 0;
for _ in WalkDir::new(file_name)
.into_iter()
.filter_entry(|e| !is_ignored(e))
{
actual_count = actual_count + 1;
assert!(files.contains(&test_path));

fs::remove_dir_all(test_dir).unwrap();
}

#[test]
fn it_can_filter_by_include() {
let target = make_target(Site {
bucket: "fake".to_string(),
entry_point: None,
include: Some(vec!["this_isnt_here.txt".to_string()]),
exclude: None,
});

let test_dir = "test4";
// If test dir already exists, delete it.
if fs::metadata(test_dir).is_ok() {
fs::remove_dir_all(test_dir).unwrap();
}

fs::remove_file(file_name).unwrap();
fs::create_dir(test_dir).unwrap();
let test_pathname = format!("{}/ignore_me.txt", test_dir);
let test_path = PathBuf::from(&test_pathname);
fs::File::create(&test_path).unwrap();

// No iterations should happen above because dotfiles are ignored.
let expected_count = 0;
assert!(actual_count == expected_count);
let files: Vec<_> = get_dir_iterator(&target, Path::new(test_dir))
.unwrap()
.map(|entry| entry.unwrap().path().to_owned())
.collect();

assert!(!files.contains(&test_path));
}

#[test]
fn it_can_allow_unfiltered_files() {
let file_name = "my_file";
// If test file already exists, delete it.
if fs::metadata(file_name).is_ok() {
fs::remove_file(file_name).unwrap();
fn it_can_filter_by_exclude() {
let target = make_target(Site {
bucket: "fake".to_string(),
entry_point: None,
include: None,
exclude: Some(vec!["ignore_me.txt".to_string()]),
});

let test_dir = "test5";
// If test dir already exists, delete it.
if fs::metadata(test_dir).is_ok() {
fs::remove_dir_all(test_dir).unwrap();
}

fs::File::create(file_name).unwrap();
fs::create_dir(test_dir).unwrap();
let test_pathname = format!("{}/ignore_me.txt", test_dir);
let test_path = PathBuf::from(&test_pathname);
fs::File::create(&test_path).unwrap();

let mut actual_count = 0;
for _ in WalkDir::new(file_name)
.into_iter()
.filter_entry(|e| !is_ignored(e))
{
actual_count = actual_count + 1;
let files: Vec<_> = get_dir_iterator(&target, Path::new(test_dir))
.unwrap()
.map(|entry| entry.unwrap().path().to_owned())
.collect();

assert!(!files.contains(&test_path));
}

#[test]
fn it_can_prioritize_include_over_exclude() {
let target = make_target(Site {
bucket: "fake".to_string(),
entry_point: None,
include: Some(vec!["notice_me.txt".to_string()]),
exclude: Some(vec!["notice_me.txt".to_string()]),
});

let test_dir = "test6";
// If test dir already exists, delete it.
if fs::metadata(test_dir).is_ok() {
fs::remove_dir_all(test_dir).unwrap();
}

fs::remove_file(file_name).unwrap();
fs::create_dir(test_dir).unwrap();
let test_pathname = format!("{}/notice_me.txt", test_dir);
let test_path = PathBuf::from(&test_pathname);
fs::File::create(&test_path).unwrap();

let files: Vec<_> = get_dir_iterator(&target, Path::new(test_dir))
.unwrap()
.map(|entry| entry.unwrap().path().to_owned())
.collect();

// No iterations should happen above because dotfiles are ignored.
let expected_count = 1;
assert!(actual_count == expected_count);
assert!(files.contains(&test_path));
}

#[test]
Expand Down
2 changes: 1 addition & 1 deletion src/commands/kv/bucket/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ pub fn sync(
// Now delete files from Workers KV that exist in remote but no longer exist locally.
// Get local keys
let local_keys_vec: Vec<String> = match &metadata(path) {
Ok(file_type) if file_type.is_dir() => directory_keys_only(path),
Ok(file_type) if file_type.is_dir() => directory_keys_only(target, path),
Ok(_) => failure::bail!("{} should be a directory", path.display()),
Err(e) => failure::bail!("{}", e),
}?;
Expand Down
Loading