diff --git a/.github/DOCS.md b/.github/DOCS.md
new file mode 100644
index 0000000..e932784
--- /dev/null
+++ b/.github/DOCS.md
@@ -0,0 +1,23 @@
+# Github config and workflows
+
+In this folder there is configuration for codecoverage, dependabot, and ci
+workflows that check the library more deeply than the default configurations.
+
+This folder can be or was merged using a --allow-unrelated-histories merge
+strategy from which provides a
+reasonably sensible base for writing your own ci on. By using this strategy
+the history of the CI repo is included in your repo, and future updates to
+the CI can be merged later.
+
+To perform this merge run:
+
+```shell
+git remote add ci https://github.com/jonhoo/rust-ci-conf.git
+git fetch ci
+git merge --allow-unrelated-histories ci/main
+```
+
+An overview of the files in this project is available at:
+, which contains some
+rationale for decisions and runs through an example of solving minimal version
+and OpenSSL issues.
diff --git a/.github/codecov.yml b/.github/codecov.yml
new file mode 100644
index 0000000..cd5ce8f
--- /dev/null
+++ b/.github/codecov.yml
@@ -0,0 +1,21 @@
+# ref: https://docs.codecov.com/docs/codecovyml-reference
+coverage:
+ # Hold ourselves to a high bar
+ range: 85..100
+ round: down
+ precision: 1
+ status:
+ # ref: https://docs.codecov.com/docs/commit-status
+ project:
+ default:
+ # Avoid false negatives
+ threshold: 1%
+
+# Test files aren't important for coverage
+ignore:
+ - "tests"
+
+# Make comments less noisy
+comment:
+ layout: "files"
+ require_changes: true
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..d0f091e
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,19 @@
+version: 2
+updates:
+ - package-ecosystem: github-actions
+ directory: /
+ schedule:
+ interval: daily
+ - package-ecosystem: cargo
+ directory: /
+ schedule:
+ interval: daily
+ ignore:
+ - dependency-name: "*"
+ # patch and minor updates don't matter for libraries as consumers of this library build
+ # with their own lockfile, rather than the version specified in this library's lockfile
+ # remove this ignore rule if your package has binaries to ensure that the binaries are
+ # built with the exact set of dependencies and those are up to date.
+ update-types:
+ - "version-update:semver-patch"
+ - "version-update:semver-minor"
diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml
new file mode 100644
index 0000000..aed5529
--- /dev/null
+++ b/.github/workflows/check.yml
@@ -0,0 +1,94 @@
+# This workflow runs whenever a PR is opened or updated, or a commit is pushed to main. It runs
+# several checks:
+# - fmt: checks that the code is formatted according to rustfmt
+# - clippy: checks that the code does not contain any clippy warnings
+# - doc: checks that the code can be documented without errors
+# - hack: check combinations of feature flags
+# - msrv: check that the msrv specified in the crate is correct
+permissions:
+ contents: read
+# This configuration allows maintainers of this repo to create a branch and pull request based on
+# the new branch. Restricting the push trigger to the main branch ensures that the PR only gets
+# built once.
+on:
+ push:
+ branches: [main]
+ pull_request:
+# If new code is pushed to a PR branch, then cancel in progress workflows for that PR. Ensures that
+# we don't waste CI time, and returns results quicker https://github.com/jonhoo/rust-ci-conf/pull/5
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+name: check
+jobs:
+ fmt:
+ runs-on: ubuntu-latest
+ name: stable / fmt
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ submodules: true
+ - name: Install stable
+ uses: dtolnay/rust-toolchain@stable
+ with:
+ components: rustfmt
+ - name: cargo fmt --check
+ run: cargo fmt --check
+ clippy:
+ runs-on: ubuntu-latest
+ name: ${{ matrix.toolchain }} / clippy
+ permissions:
+ contents: read
+ checks: write
+ strategy:
+ fail-fast: false
+ matrix:
+ # Get early warning of new lints which are regularly introduced in beta channels.
+ toolchain: [stable, beta]
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ submodules: true
+ - name: Install ${{ matrix.toolchain }}
+ uses: dtolnay/rust-toolchain@master
+ with:
+ toolchain: ${{ matrix.toolchain }}
+ components: clippy
+ - name: cargo clippy
+ uses: giraffate/clippy-action@v1
+ with:
+ reporter: 'github-pr-check'
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ doc:
+ # run docs generation on nightly rather than stable. This enables features like
+ # https://doc.rust-lang.org/beta/unstable-book/language-features/doc-cfg.html which allows an
+ # API be documented as only available in some specific platforms.
+ runs-on: ubuntu-latest
+ name: nightly / doc
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ submodules: true
+ - name: Install nightly
+ uses: dtolnay/rust-toolchain@nightly
+ - name: cargo doc
+ run: cargo doc --no-deps --all-features
+ env:
+ RUSTDOCFLAGS: --cfg docsrs
+ hack:
+ # cargo-hack checks combinations of feature flags to ensure that features are all additive
+ # which is required for feature unification
+ runs-on: ubuntu-latest
+ name: ubuntu / stable / features
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ submodules: true
+ - name: Install stable
+ uses: dtolnay/rust-toolchain@stable
+ - name: cargo install cargo-hack
+ uses: taiki-e/install-action@cargo-hack
+ # intentionally no target specifier; see https://github.com/jonhoo/rust-ci-conf/pull/4
+ # --feature-powerset runs for every combination of features
+ - name: cargo hack
+ run: cargo hack --feature-powerset check
diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml
new file mode 100644
index 0000000..02aa275
--- /dev/null
+++ b/.github/workflows/scheduled.yml
@@ -0,0 +1,58 @@
+# Run scheduled (rolling) jobs on a nightly basis, as your crate may break independently of any
+# given PR. E.g., updates to rust nightly and updates to this crates dependencies. See check.yml for
+# information about how the concurrency cancellation and workflow triggering works
+permissions:
+ contents: read
+on:
+ push:
+ branches: [main]
+ pull_request:
+ schedule:
+ - cron: '7 7 * * *'
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+name: rolling
+jobs:
+ # https://twitter.com/mycoliza/status/1571295690063753218
+ nightly:
+ runs-on: ubuntu-latest
+ name: ubuntu / nightly
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ submodules: true
+ - name: Install nightly
+ uses: dtolnay/rust-toolchain@nightly
+ - name: cargo generate-lockfile
+ if: hashFiles('Cargo.lock') == ''
+ run: cargo generate-lockfile
+ - name: cargo test --locked
+ run: cargo test --locked --all-features --all-targets
+ # https://twitter.com/alcuadrado/status/1571291687837732873
+ update:
+ # This action checks that updating the dependencies of this crate to the latest available that
+ # satisfy the versions in Cargo.toml does not break this crate. This is important as consumers
+ # of this crate will generally use the latest available crates. This is subject to the standard
+ # Cargo semver rules (i.e cargo does not update to a new major version unless explicitly told
+ # to).
+ runs-on: ubuntu-latest
+ name: ubuntu / beta / updated
+ # There's no point running this if no Cargo.lock was checked in in the first place, since we'd
+ # just redo what happened in the regular test job. Unfortunately, hashFiles only works in if on
+ # steps, so we repeat it.
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ submodules: true
+ - name: Install beta
+ if: hashFiles('Cargo.lock') != ''
+ uses: dtolnay/rust-toolchain@beta
+ - name: cargo update
+ if: hashFiles('Cargo.lock') != ''
+ run: cargo update
+ - name: cargo test
+ if: hashFiles('Cargo.lock') != ''
+ run: cargo test --locked --all-features --all-targets
+ env:
+ RUSTFLAGS: -D deprecated
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
new file mode 100644
index 0000000..470ef8d
--- /dev/null
+++ b/.github/workflows/test.yml
@@ -0,0 +1,109 @@
+# This is the main CI workflow that runs the test suite on all pushes to main and all pull requests.
+# It runs the following jobs:
+# - required: runs the test suite on ubuntu with stable and beta rust toolchains
+# - minimal: runs the test suite with the minimal versions of the dependencies that satisfy the
+# requirements of this crate, and its dependencies
+# - os-check: runs the test suite on mac and windows
+# - coverage: runs the test suite and collects coverage information
+# See check.yml for information about how the concurrency cancellation and workflow triggering works
+permissions:
+ contents: read
+on:
+ push:
+ branches: [main]
+ pull_request:
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+name: test
+jobs:
+ required:
+ runs-on: ubuntu-latest
+ name: ubuntu / ${{ matrix.toolchain }}
+ strategy:
+ matrix:
+ # run on stable and beta to ensure that tests won't break on the next version of the rust
+ # toolchain
+ toolchain: [stable, beta]
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ submodules: true
+ - name: Install ${{ matrix.toolchain }}
+ uses: dtolnay/rust-toolchain@master
+ with:
+ toolchain: ${{ matrix.toolchain }}
+ - name: cargo generate-lockfile
+ # enable this ci template to run regardless of whether the lockfile is checked in or not
+ if: hashFiles('Cargo.lock') == ''
+ run: cargo generate-lockfile
+ # https://twitter.com/jonhoo/status/1571290371124260865
+ - name: cargo test --locked
+ run: cargo test --locked --all-features --all-targets
+ # https://github.com/rust-lang/cargo/issues/6669
+ - name: cargo test --doc
+ run: cargo test --locked --all-features --doc
+ minimal:
+ # This action chooses the oldest version of the dependencies permitted by Cargo.toml to ensure
+ # that this crate is compatible with the minimal version that this crate and its dependencies
+ # require. This will pickup issues where this create relies on functionality that was introduced
+ # later than the actual version specified (e.g., when we choose just a major version, but a
+ # method was added after this version).
+ #
+ # This particular check can be difficult to get to succeed as often transitive dependencies may
+ # be incorrectly specified (e.g., a dependency specifies 1.0 but really requires 1.1.5). There
+ # is an alternative flag available -Zdirect-minimal-versions that uses the minimal versions for
+ # direct dependencies of this crate, while selecting the maximal versions for the transitive
+ # dependencies. Alternatively, you can add a line in your Cargo.toml to artificially increase
+ # the minimal dependency, which you do with e.g.:
+ # ```toml
+ # # for minimal-versions
+ # [target.'cfg(any())'.dependencies]
+ # openssl = { version = "0.10.55", optional = true } # needed to allow foo to build with -Zminimal-versions
+ # ```
+ # The optional = true is necessary in case that dependency isn't otherwise transitively required
+ # by your library, and the target bit is so that this dependency edge never actually affects
+ # Cargo build order. See also
+ # https://github.com/jonhoo/fantoccini/blob/fde336472b712bc7ebf5b4e772023a7ba71b2262/Cargo.toml#L47-L49.
+ # This action is run on ubuntu with the stable toolchain, as it is not expected to fail
+ runs-on: ubuntu-latest
+ name: ubuntu / stable / minimal-versions
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ submodules: true
+ - name: Install stable
+ uses: dtolnay/rust-toolchain@stable
+ - name: Install nightly for -Zminimal-versions
+ uses: dtolnay/rust-toolchain@nightly
+ - name: rustup default stable
+ run: rustup default stable
+ - name: cargo update -Zminimal-versions
+ run: cargo +nightly update -Zminimal-versions
+ - name: cargo test
+ run: cargo test --locked --all-features --all-targets
+ os-check:
+ # run cargo test on mac and windows
+ runs-on: ${{ matrix.os }}
+ name: ${{ matrix.os }} / stable
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [macos-latest, windows-latest]
+ steps:
+ # if your project needs OpenSSL, uncomment this to fix Windows builds.
+ # it's commented out by default as the install command takes 5-10m.
+ # - run: echo "VCPKG_ROOT=$env:VCPKG_INSTALLATION_ROOT" | Out-File -FilePath $env:GITHUB_ENV -Append
+ # if: runner.os == 'Windows'
+ # - run: vcpkg install openssl:x64-windows-static-md
+ # if: runner.os == 'Windows'
+ - uses: actions/checkout@v4
+ with:
+ submodules: true
+ - name: Install stable
+ uses: dtolnay/rust-toolchain@stable
+ - name: cargo generate-lockfile
+ if: hashFiles('Cargo.lock') == ''
+ run: cargo generate-lockfile
+ - name: cargo test
+ run: cargo test --locked --all-features --all-targets
diff --git a/.gitignore b/.gitignore
index 38da3ee..0176326 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,2 @@
/target
-Cargo.lock
*test*
diff --git a/Cargo.lock b/Cargo.lock
new file mode 100644
index 0000000..b6841db
--- /dev/null
+++ b/Cargo.lock
@@ -0,0 +1,352 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "addr2line"
+version = "0.21.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"
+dependencies = [
+ "gimli",
+]
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "aho-corasick"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca"
+
+[[package]]
+name = "async-recursion"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "backtrace"
+version = "0.3.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837"
+dependencies = [
+ "addr2line",
+ "cc",
+ "cfg-if",
+ "libc",
+ "miniz_oxide",
+ "object",
+ "rustc-demangle",
+]
+
+[[package]]
+name = "cc"
+version = "1.0.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "chrono"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "158b0bd7d75cbb6bf9c25967a48a2e9f77da95876b858eadfabaa99cd069de6e"
+dependencies = [
+ "num",
+ "time",
+]
+
+[[package]]
+name = "filetools"
+version = "0.3.0"
+dependencies = [
+ "anyhow",
+ "async-recursion",
+ "chrono",
+ "regex",
+ "tokio",
+ "uuid",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "gimli"
+version = "0.28.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253"
+
+[[package]]
+name = "hermit-abi"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5d3d0e0f38255e7fa3cf31335b3a56f05febd18025f4db5ef7a0cfb4f8da651f"
+
+[[package]]
+name = "libc"
+version = "0.2.153"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
+
+[[package]]
+name = "memchr"
+version = "2.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
+
+[[package]]
+name = "miniz_oxide"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
+dependencies = [
+ "adler",
+]
+
+[[package]]
+name = "num"
+version = "0.1.42"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4703ad64153382334aa8db57c637364c322d3372e097840c72000dabdcf6156e"
+dependencies = [
+ "num-integer",
+ "num-iter",
+ "num-traits",
+]
+
+[[package]]
+name = "num-integer"
+version = "0.1.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
+dependencies = [
+ "autocfg",
+ "num-traits",
+]
+
+[[package]]
+name = "num-iter"
+version = "0.1.42"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2021c8337a54d21aca0d59a92577a029af9431cb59b909b03252b9c164fad59"
+dependencies = [
+ "autocfg",
+ "num-integer",
+ "num-traits",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.16.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
+dependencies = [
+ "hermit-abi",
+ "libc",
+]
+
+[[package]]
+name = "object"
+version = "0.32.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.78"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "regex"
+version = "1.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-automata",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
+
+[[package]]
+name = "rustc-demangle"
+version = "0.1.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
+
+[[package]]
+name = "syn"
+version = "2.0.48"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "time"
+version = "0.1.43"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "tokio"
+version = "1.35.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104"
+dependencies = [
+ "backtrace",
+ "num_cpus",
+ "pin-project-lite",
+ "tokio-macros",
+]
+
+[[package]]
+name = "tokio-macros"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
+
+[[package]]
+name = "uuid"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "wasi"
+version = "0.10.2+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/Cargo.toml b/Cargo.toml
index e3c402d..2ab5be2 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,17 +1,23 @@
[package]
name = "filetools"
-version = "0.2.0"
+version = "0.3.0"
authors = ["Graham Keenan "]
-edition = "2018"
-description = "Port of Jonathan Grizou's filetools Python library (https://github.com/jgrizou/filetools)"
+edition = "2021"
+description = "Helper functions for path operations"
repository = "https://github.com/Tyrannican/filetools-rs"
-license = "MIT"
-keywords = ["files", "utility", "filesystem"]
+license = "MIT OR Apache-2.0"
+keywords = ["files", "utility", "filesystem", "path"]
categories = ["filesystem"]
readme = "README.md"
-# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
-
[dependencies]
+anyhow = "1.0.79"
+async-recursion = "1.0.5"
chrono = "0.3"
-uuid = { version = "0.8.1", features = ["v4"]}
\ No newline at end of file
+regex = "1.10.3"
+tokio = { version = "1.35.1", features = ["fs"] }
+uuid = { version = "0.8.1", features = ["v4"]}
+
+[dev-dependencies]
+tokio = { version = "1.35.1", features = ["macros", "rt-multi-thread"] }
+regex = "1.10.3"
diff --git a/LICENSE-APACHE b/LICENSE-APACHE
new file mode 100644
index 0000000..1e5006d
--- /dev/null
+++ b/LICENSE-APACHE
@@ -0,0 +1,202 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
diff --git a/LICENSE-MIT b/LICENSE-MIT
new file mode 100644
index 0000000..632d183
--- /dev/null
+++ b/LICENSE-MIT
@@ -0,0 +1,22 @@
+MIT License
+
+Copyright (c) 2024 Graham Keenan
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
diff --git a/README.md b/README.md
index 29afb83..371161c 100644
--- a/README.md
+++ b/README.md
@@ -1,11 +1,45 @@
# Filetools
-Simple port of the filetools library written by Jonathan Grizou here: https://github.com/jgrizou/filetools
-This library allows for simple filename generation and finding all files/folders in a directory.
+Simple crate for perfoming some small `Path` operations in Rust.
+
+Offers the user the ability to:
+
+* Create directories (single / multiple at a time)
+* Check given filepaths match a pattern
+* List all files / directories in a path
+ * This can be just the files / directories inside the path root
+ * This can also include files / directories in **ALL** subdirectories contained in the path
+* List files / directories as above but filter the results based on a Filter Pattern
+* Some general naming functions for creating `PathBuf` names
+
+More will be added in the future but this should suffice for small path operations.
## Usage
+
Add to your `Cargo.toml`
+
```toml
[dependencies]
-filetools = "0.2.0"
+filetools = "0.3.0"
+```
+
+Then import into your project:
+
+```rust
+use filetools::{FtFilter, list_nested_files_with_filter};
+
+#[tokio::main]
+async fn main() -> anyhow::Result<()> {
+ // Get all Lua files in the Neovim directory
+ let root_path = "/home/user/.config/nvim";
+ let filter = FtFilter::Raw("lua".to_string());
+ let lua_files = list_nested_files_with_filter(&root_path, filter).await?;
+
+ // Delete them all, we hate Lua
+ for lua_file in lua_files.into_iter() {
+ tokio::fs::remove_file(lua_file).await?;
+ }
+
+ Ok(())
+}
```
diff --git a/src/filehelpers.rs b/src/filehelpers.rs
deleted file mode 100644
index 3a3574d..0000000
--- a/src/filehelpers.rs
+++ /dev/null
@@ -1,167 +0,0 @@
-//! Functions that help in iterating files and folders
-//!
-//! # Examples
-//!
-//! ```
-//! use std::path::PathBuf;
-//! use filetools::filehelpers;
-//!
-//! fn main() -> Result<(), Box> {
-//! /// Creating a directory
-//! let new_path = PathBuf::from("./test");
-//! let _ = filehelpers::ensure_dir(new_path)?;
-//!
-//! /// Iterating through all files in a directory
-//! let nr_search = PathBuf::from("./test");
-//! let r_search = PathBuf::from("./test");
-//!
-//! // Non-recursive search of directroy, just files in search folder
-//! let non_recursed_files = filehelpers::list_files(nr_search, false);
-//!
-//! // Recursive search of directory, gets all files in directory and all sub-directories
-//! let recursed_files = filehelpers::list_files(r_search, true);
-//!
-//! /// Iterating through all folders in a directory
-//! let nr_search = PathBuf::from("./test");
-//! let r_search = PathBuf::from("./test");
-//!
-//! // Non-recursive search for all folders, just folders in search directory
-//! let non_recursive_folders = filehelpers::list_folders(nr_search, false);
-//!
-//! // Recursive search of all folders, all subfolders in a directory as well
-//! let recursive_folders = filehelpers::list_folders(r_search, true);
-//!
-//! Ok(())
-//! }
-//! ```
-//!
-
-use std::fs;
-use std::path::{Path, PathBuf};
-
-/// Ensures a directory is created from a `PathBuf`
-/// Does nothing if the directory already exists
-///
-/// Returns `Ok` if successful, `Err` if not
-pub fn ensure_dir(dir_name: PathBuf) -> Result <(), Box> {
- let path = Path::new(&dir_name);
- if !path.exists() {
- fs::create_dir(path)?;
- }
-
- Ok(())
-}
-
-/// Determines if a `path` if a subdirectory of the given `directory`
-/// Creates the absolute paths and checks the `ancestors` of `path` to determine if a subdirectory
-///
-/// Note::Not entirely sure this works perfectly fine, use at own risk
-///
-/// Returns `Ok(true)` if `path` is a subdirectory, `Ok(false)` if not, `Err` if error occured
-pub fn is_subdir(path: PathBuf, directory: PathBuf) -> Result> {
- // Get absolute paths
- let directory = fs::canonicalize(Path::new(&directory))?;
- let path = fs::canonicalize(Path::new(&path))?;
-
- let mut is_subdir = Ok(false);
-
- // Iterate through all ancestors of the path
- for ancestor in path.ancestors() {
- // Found directory, current path is a subdirectory
- if ancestor == directory {
- is_subdir = Ok(true);
- break;
- }
- }
-
- is_subdir
-}
-
-/// Determines if a given `PathBuf` contains a search string
-///
-/// Returns `true` if search string present, else `false`
-pub fn path_contains(path: PathBuf, search_str: &str) -> bool {
- // Path successfully converted to str
- if let Some(p) = path.to_str() {
- // Contains string, return true
- if p.contains(search_str) {
- return true
- }
- }
-
- // Search string not found
- false
-}
-
-/// Lists all files in a given `path`
-/// If `recursive` is set, iterates through all subfolders recursively to find all files
-/// If `recursive` not set, just finds all files in the current directory
-///
-/// Return `Vec` of all files in a directory and subdirectories
-pub fn list_files(path: PathBuf, recursive: bool) -> Result, Box> {
- let mut found_files = Vec::new();
- let search_path = Path::new(&path);
-
- // Iterate through all entries in the directory
- for entry in fs::read_dir(search_path)? {
- // Get File metadata
- let entry = entry?;
- let path = entry.path();
- let metadata = fs::metadata(&path)?;
-
- // Entry is a file, add to array
- if metadata.is_file() {
- found_files.push(path);
- } else if metadata.is_dir() && recursive {
- // Found a directory and recursively looking
- let subfiles = list_files(path, recursive)?;
-
- // Add all found subfiles to array
- for file in subfiles.iter() {
- found_files.push(file.to_path_buf());
- }
- } else {
- continue;
- }
- }
-
- Ok(found_files)
-}
-
-/// Lists all folders in a given `path`
-/// If `recursive` is set, iterates through all subfolders recursively to find all folders
-/// If `recursive` not set, just finds all files in the current directory
-/// Mirrors the functionality of `filehelpers::list_files()`
-///
-/// Return `Vec` of all folders in a directory and subdirectories
-pub fn list_folders(path: PathBuf, recursive: bool) -> Result, Box> {
- let mut found_folders = Vec::new();
- let search_path = Path::new(&path);
-
- // Iterate through all entries in the directory
- for entry in fs::read_dir(search_path)? {
- // Get File metadata
- let entry = entry?;
- let path = entry.path();
- let metadata = fs::metadata(&path)?;
-
- // Entry is a directory, add to array
- if metadata.is_dir() {
- found_folders.push(path);
-
- // Recursively looking
- if recursive {
- // Search recursively
- let f_path = entry.path();
- let subfolders = list_folders(f_path, recursive)?;
-
- // Add all subfolders to array
- for subfolder in subfolders.iter() {
- found_folders.push(subfolder.to_path_buf());
- }
- }
- }
- }
-
- Ok(found_folders)
-}
\ No newline at end of file
diff --git a/src/filenaming.rs b/src/filenaming.rs
deleted file mode 100644
index df0ac95..0000000
--- a/src/filenaming.rs
+++ /dev/null
@@ -1,79 +0,0 @@
-//! Functions that generate PathBuf filenames
-//!
-//! # Examples
-//!
-//! ```
-//! use std::path::PathBuf;
-//! use filetools::filenaming;
-//!
-//! fn main() {
-//! let custom_name = filenaming::generate_name("test", ".pdf");
-//! assert_eq!(custom_name, PathBuf::from("test.pdf"));
-//!
-//! // Name will be suffixed by the current time it was generated
-//! let timestamped_name = filenaming::generate_default_timestamped_name("test", ".pdf");
-//!
-//! // Random name is a UUIDv4 string suffixed by the extension
-//! let random_name = filenaming::generate_random_name(".pdf");
-//!
-//! // N-digit name is a number prefixed by X zeros
-//! let n_digit_name = filenaming::generate_n_digit_name(5, 4, ".pdf");
-//! assert_eq!(n_digit_name, PathBuf::from("0005.pdf"));
-//! }
-//! ```
-//!
-
-use std::path::PathBuf;
-use uuid::Uuid;
-use chrono::prelude::*;
-
-/// Generates a `PathBuf` from a given and extension
-///
-/// Returns a `PathBuf` of the form `name.ext`
-pub fn generate_name(name: &str, ext: &str) -> PathBuf {
- PathBuf::from(format!("{}{}", name, ext))
-}
-
-/// Generates a `PathBuf` from a name and extention with a default timestamp of "DD_MM_YY_HHMMSS"
-/// If `fname` is "", just uses the timestamp and extension
-///
-/// Returns `PathBuf` in the form `fname_timestamp.ext`
-pub fn generate_default_timestamped_name(fname: &str, ext: &str) -> PathBuf {
- let dt = UTC::now().format("%d_%m_%Y_%Hh%Mm%Ss");
-
- if fname.len() == 0 {
- PathBuf::from(format!("{}{}", dt, ext))
- } else {
- PathBuf::from(format!("{}_{}{}", fname, dt, ext))
- }
-}
-
-/// Generates a `PathBuf` from a name and extension with a given timestamp format
-/// If `fname` is an empty string, returns just the timestamp suffixed with the extension.
-///
-/// Returns `PathBuf` in the form `fname_timestamp.ext`
-pub fn generate_timestamped_name(fname: &str, ext: &str, fmt: &str) -> PathBuf {
- let dt = UTC::now().format(fmt);
-
- if fname.len() == 0 {
- PathBuf::from(format!("{}{}", dt, ext))
- } else {
- PathBuf::from(format!("{}_{}{}", fname, dt, ext))
- }
-}
-
-/// Generates a random UUIDv4 `PathBuf`
-///
-/// Returns `PathBuf` in the form `uuid.ext`
-pub fn generate_random_name(ext: &str) -> PathBuf {
- let unique = Uuid::new_v4();
-
- PathBuf::from(format!("{}{}", unique.to_string(), ext))
-}
-
-/// Generates a `PathBuf` from a `number` prefixed by `n_digits` zeros
-///
-/// Returns `PathBuf` of the form e.g `0005.ext`
-pub fn generate_n_digit_name(number: i32, n_digits: usize, ext: &str) -> PathBuf {
- PathBuf::from(format!("{:0fill$}{}", number, ext, fill=n_digits))
-}
\ No newline at end of file
diff --git a/src/lib.rs b/src/lib.rs
index 77b3255..9cf23dc 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,104 +1,998 @@
-//! Simple functions to help with file naming and file iteration
-//! Ported from the [filetools](https://github.com/jgrizou/filetools) library written by Jonathan Grizou
-//!
-//! # Examples
-//!
-//! ```
-//! use filetools::filehelpers;
-//! use std::path::PathBuf;
-//!
-//! fn main() -> Result <(), Box> {
-//! /// Creating a directory
-//! let new_path = PathBuf::from("./test");
-//! let _ = filehelpers::ensure_dir(new_path)?;
-//!
-//! /// Iterating through all files in a directory
-//! let nr_search = PathBuf::from("./test");
-//! let r_search = PathBuf::from("./test");
-//!
-//! // Non-recursive search of directroy, just files in search folder
-//! let non_recursed_files = filehelpers::list_files(nr_search, false);
-//!
-//! // Recursive search of directory, gets all files in directory and all sub-directories
-//! let recursed_files = filehelpers::list_files(r_search, true);
-//!
-//! /// Iterating through all folders in a directory
-//! let nr_search = PathBuf::from("./test");
-//! let r_search = PathBuf::from("./test");
-//!
-//! // Non-recursive search for all folders, just folders in search directory
-//! let non_recursive_folders = filehelpers::list_folders(nr_search, false);
-//!
-//! // Recursive search of all folders, all subfolders in a directory as well
-//! let recursive_folders = filehelpers::list_folders(r_search, true);
-//!
+//! Crate to help with simple file / folder operations.
+//!
+//! Provides helper functions to:
+//!
+//! * Create directories
+//! * Check filepaths contain a pattern
+//! * List files / directories both iteratively and recursively
+//! * List files / directories both iteratively and recursively with filters
+//! * Generate names for files / directories
+//!
+//! ## Async vs Sync
+//!
+//! The operations in this crate are designed for async/await, however sync variations
+//! of the operations exist in the [`crate::sync`] module.
+//!
+//! # Example
+//!
+//! ```rust,no_run
+//! use filetools::{FtFilter, list_nested_files_with_filter};
+//!
+//! #[tokio::main]
+//! async fn main() -> anyhow::Result<()> {
+//! // Get all Lua files in the Neovim directory
+//! let root_path = "/home/user/.config/nvim";
+//! let filter = FtFilter::Raw("lua".to_string());
+//! let lua_files = list_nested_files_with_filter(&root_path, filter).await?;
+//!
+//! // Delete them all, we hate Lua
+//! for lua_file in lua_files.into_iter() {
+//! tokio::fs::remove_file(lua_file).await?;
+//! }
+//!
//! Ok(())
-//! }
+//! }
//! ```
-//!
-pub mod filenaming;
-pub mod filehelpers;
+pub mod naming;
+pub mod sync;
+pub(crate) mod util;
+
+use anyhow::{Context, Result};
+use regex::Regex;
+use std::path::{Component, Path, PathBuf};
+use tokio::fs;
+
+use util::{iteritems, FtIterItemState};
+
+/// Filter types for listing files / directories
+///
+/// # Example
+///
+/// ```rust
+/// use filetools::FtFilter;
+/// use std::path::PathBuf;
+/// use regex::Regex;
+///
+/// // Use a raw String filter to match an item containing ".log"
+/// let filter = FtFilter::Raw(".log".to_string());
+///
+/// // Use the Path filter to match paths that contain `sub/path/to/math`
+/// let filter = FtFilter::Path(PathBuf::from("sub/path/to/match"));
+///
+/// // Use a Regex filter to match all files ending with `.rs`
+/// let re = Regex::new(r"(.*)\.rs").expect("unable to create regex");
+/// let filter = FtFilter::Regex(re);
+/// ```
+#[derive(Debug)]
+pub enum FtFilter {
+ /// Filter based on a raw String pattern
+ Raw(String),
+
+ /// Filter based on a PathBuf pattern
+ Path(PathBuf),
+
+ /// Filter based on a regex pattern
+ Regex(Regex),
+}
+
+/// Checks if a given pattern is considered a subdirectory of the given path
+///
+/// # Example
+///
+/// ```rust
+/// use filetools::is_subdir;
+///
+/// let path = "directory/to/check/for/sub/directory";
+/// let check = "for";
+///
+/// // As "for" is a subdirectory in this path, this returns true
+/// let result = is_subdir(path, check);
+/// ```
+pub fn is_subdir(path: impl AsRef, dir: impl AsRef) -> bool {
+ for component in path.as_ref().components() {
+ if let Component::Normal(p) = component {
+ if p == dir.as_ref().as_os_str() {
+ return true;
+ }
+ }
+ }
+
+ false
+}
+
+/// Determines if a path contains a given pattern
+///
+/// Converts both the path and the pattern to a string and performs simple matching
+///
+/// # Example
+///
+/// ```rust
+/// use filetools::path_contains;
+///
+/// let path = "This/is/a/path/with/a/file.txt";
+/// let pattern = "file.txt";
+///
+/// // The path contains the pattern file.txt so this returns true
+/// let result = path_contains(path, pattern);
+/// ```
+pub fn path_contains(path: impl AsRef, pattern: impl AsRef /* maybe */) -> bool {
+ if let Some(p) = path.as_ref().to_str() {
+ if let Some(pat) = pattern.as_ref().to_str() {
+ return p.contains(pat);
+ }
+ }
+
+ false
+}
+
+/// Creates a directory at the given path.
+///
+/// If the directory already exists, nothing is done
+///
+/// ## Sync
+///
+/// For the `sync` version, see [`crate::sync::ensure_directory`]
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::ensure_directory;
+///
+/// #[tokio::main]
+/// async fn main() -> anyhow::Result<()> {
+/// let target_path = "directory/to/create";
+/// ensure_directory(target_path).await?;
+///
+/// Ok(())
+/// }
+/// ```
+pub async fn ensure_directory(dir: impl AsRef) -> Result<()> {
+ if !dir.as_ref().exists() {
+ fs::create_dir_all(dir)
+ .await
+ .context("unable to create directory")?;
+ }
+
+ Ok(())
+}
+
+/// Creates multiple directories inside the target path.
+///
+/// ## Sync
+///
+/// For the `sync` version, see [`crate::sync::create_multiple_directories`]
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::create_multiple_directories;
+///
+/// #[tokio::main]
+/// async fn main() -> anyhow::Result<()> {
+/// let root = "dir/to/populate";
+/// let to_create = ["dir1", "dir2", "dir3"];
+///
+/// // Will create:
+/// // `dir/to/populate/dir1`
+/// // `dir/to/populate/dir2`
+/// // `dir/to/populate/dir3`
+/// create_multiple_directories(root, &to_create);
+///
+/// Ok(())
+/// }
+/// ```
+pub async fn create_multiple_directories(
+ path: impl AsRef,
+ directories: &[impl AsRef],
+) -> Result<()> {
+ for dir in directories {
+ let target = path.as_ref().join(dir);
+ ensure_directory(target).await?;
+ }
+
+ Ok(())
+}
+
+/// Creates a range of numeric folders in the given path
+///
+/// Directories can be padded with X zeros using the `fill` parameter.
+///
+/// ## Sync
+///
+/// For the `sync` version, see [`crate::sync::create_numeric_directories`]
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::create_numeric_directories;
+///
+/// #[tokio::main]
+/// async fn main() -> anyhow::Result<()> {
+/// let root = "some/root/path";
+///
+/// // This will create the following directories:
+/// // "some/root/path/0000"
+/// // ...
+/// // "some/root/path/0099"
+/// create_numeric_directories(root, 0, 100, 4).await?;
+/// Ok(())
+/// }
+/// ```
+pub async fn create_numeric_directories(
+ path: impl AsRef,
+ start: usize,
+ end: usize,
+ fill: usize,
+) -> Result<()> {
+ for i in start..end {
+ let name = path
+ .as_ref()
+ .join(naming::generate_n_digit_name(i, fill, ""));
+ ensure_directory(name)
+ .await
+ .context("creating numeric directories")?;
+ }
+
+ Ok(())
+}
+
+/// Lists all files in the given directory (not including subdirectories).
+///
+/// ## Sync
+///
+/// For the `sync` version, see [`crate::sync::list_files`]
+///
+/// # Errors
+///
+/// This function will return an error in the following situations:
+///
+/// * The path given is a file and not a directory
+/// * The given path does not exist
+///
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::list_files;
+///
+/// #[tokio::main]
+/// async fn main() -> anyhow::Result<()> {
+/// let target_folder = "folder/containing/files";
+///
+/// // Will return a Vec containing all files in the folder
+/// let files = list_files(target_folder).await?;
+/// Ok(())
+/// }
+/// ```
+pub async fn list_files + Send>(path: P) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+
+ iteritems(path, FtIterItemState::File, None).await
+}
+
+/// Lists all files in a directory including ALL subdirectories
+///
+/// Use responsibly.
+///
+/// ## Sync
+///
+/// For the `sync` version, see [`crate::sync::list_nested_files`]
+///
+/// # Errors
+///
+/// This function will return an error in the following situations:
+///
+/// * The given path is a file and not a directory
+/// * The given path does not exist
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::list_nested_files;
+///
+/// #[tokio::main]
+/// async fn main() -> anyhow::Result<()> {
+/// let target_folder = "directory/containing/nested/files";
+///
+/// // This will return a Vec of ALL files contained within the directory
+/// // (including in all subdirectories)
+/// let files = list_nested_files(target_folder).await?;
+/// Ok(())
+/// }
+/// ```
+pub async fn list_nested_files + Send>(path: P) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+
+ iteritems(path, FtIterItemState::RFile, None).await
+}
+/// Lists files in a folder (not including subdirectories) matching a filter pattern.
+///
+/// This pattern can be a `String`, `PathBuf`, or a [`regex::Regex`] pattern.
+///
+/// ## Sync
+///
+/// For the `sync` version, see [`crate::sync::list_files_with_filter`]
+///
+/// # Errors
+///
+/// This function will return an error in the following situations:
+///
+/// * The given path is a file and not a directory
+/// * The given path does not exist
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use regex::Regex;
+/// use std::path::PathBuf;
+/// use filetools::{list_files_with_filter, FtFilter};
+///
+/// #[tokio::main]
+/// async fn main() -> anyhow::Result<()> {
+/// let root = "some/path/containing/files";
+///
+/// // List all files containing the phrase `log`
+/// let mut filter = FtFilter::Raw("log".to_string());
+/// let mut results = list_files_with_filter(&root, filter).await?;
+///
+/// // List all files containing the path segment `files/test`
+/// filter = FtFilter::Path(PathBuf::from("files/test"));
+/// results = list_files_with_filter(&root, filter).await?;
+///
+/// // List all files ending with `.rs`
+/// let re = Regex::new(r"(.*)\.rs").expect("unable to create regex");
+/// filter = FtFilter::Regex(re);
+/// results = list_files_with_filter(&root, filter).await?;
+///
+/// Ok(())
+/// }
+/// ```
+pub async fn list_files_with_filter + Send>(
+ path: P,
+ pattern: FtFilter,
+) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+
+ iteritems(path, FtIterItemState::File, Some(&pattern)).await
+}
+
+/// Lists files in a folder (including ALL subdirectories) matching a filter pattern.
+///
+/// This pattern can be a `String`, `PathBuf`, or a [`regex::Regex`] pattern.
+///
+/// Use responsibly.
+///
+/// ## Sync
+///
+/// For the `sync` version, see [`crate::sync::list_nested_files_with_filter`]
+///
+/// # Errors
+///
+/// This function will return an error in the following situations:
+///
+/// * The given path is a file and not a directory
+/// * The given path does not exist
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use regex::Regex;
+/// use std::path::PathBuf;
+/// use filetools::{list_nested_files_with_filter, FtFilter};
+///
+/// #[tokio::main]
+/// async fn main() -> anyhow::Result<()> {
+/// let root = "some/path/containing/nested/folders/with/files";
+///
+/// // List all files containing the phrase `log`
+/// let mut filter = FtFilter::Raw("log".to_string());
+/// let mut results = list_nested_files_with_filter(&root, filter).await?;
+///
+/// // List all files containing the path segment `files/test`
+/// filter = FtFilter::Path(PathBuf::from("files/test"));
+/// results = list_nested_files_with_filter(&root, filter).await?;
+///
+/// // List all files ending with `.rs`
+/// let re = Regex::new(r"(.*)\.rs").expect("unable to create regex");
+/// filter = FtFilter::Regex(re);
+/// results = list_nested_files_with_filter(&root, filter).await?;
+///
+/// Ok(())
+/// }
+/// ```
+pub async fn list_nested_files_with_filter + Send>(
+ path: P,
+ pattern: FtFilter,
+) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+
+ iteritems(path, FtIterItemState::RFile, Some(&pattern)).await
+}
+
+/// Lists all directories in the given directory (not including subdirectories).
+///
+/// ## Sync
+///
+/// For the `sync` version, see [`crate::sync::list_directories`]
+///
+/// # Errors
+///
+/// This function will return an error in the following situations:
+///
+/// * The given path is a file and not a directory
+/// * The given path does not exist
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::list_directories;
+///
+/// #[tokio::main]
+/// async fn main() -> anyhow::Result<()> {
+/// let target_folder = "directory/containing/other/directories";
+///
+/// // Will return a Vec containing all directories in the folder
+/// let directories = list_directories(target_folder).await?;
+/// Ok(())
+/// }
+/// ```
+pub async fn list_directories + Send>(path: P) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+
+ iteritems(path, FtIterItemState::Dir, None).await
+}
+
+/// Lists all directories in a directory including ALL subdirectories
+///
+/// Use responsibly.
+///
+/// ## Sync
+///
+/// For the `sync` version, see [`crate::sync::list_nested_directories`]
+///
+/// # Errors
+///
+/// This function will return an error in the following situations:
+///
+/// * The given path is a file and not a directory
+/// * The given path does not exist
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::list_nested_directories;
+///
+/// #[tokio::main]
+/// async fn main() -> anyhow::Result<()> {
+/// let target_folder = "directory/containing/nested/files";
+///
+/// // This will return a Vec of ALL directories contained within the directory
+/// // (including in all subdirectories)
+/// let directories = list_nested_directories(target_folder).await?;
+/// Ok(())
+/// }
+/// ```
+pub async fn list_nested_directories + Send>(path: P) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ iteritems(path, FtIterItemState::RDir, None).await
+}
+
+/// Lists directories in a given directory (not including subdirectories) matching a filter pattern.
+///
+/// This pattern can be a `String`, `PathBuf`, or a [`regex::Regex`] pattern.
+///
+/// ## Sync
+///
+/// For the `sync` version, see [`crate::sync::list_directories_with_filter`]
+///
+/// # Errors
+///
+/// This function will return an error in the following situations:
+///
+/// * The given path is a file and not a directory
+/// * The given path does not exist
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use regex::Regex;
+/// use std::path::PathBuf;
+/// use filetools::{list_directories_with_filter, FtFilter};
+///
+/// #[tokio::main]
+/// async fn main() -> anyhow::Result<()> {
+/// let root = "some/path/containing/dirs";
+///
+/// // List all dirs containing the phrase `log`
+/// let mut filter = FtFilter::Raw("log".to_string());
+/// let mut results = list_directories_with_filter(&root, filter).await?;
+///
+/// // List all dirs containing the path segment `files/test`
+/// filter = FtFilter::Path(PathBuf::from("files/test"));
+/// results = list_directories_with_filter(&root, filter).await?;
+///
+/// // List all dirs ending with `_test`
+/// let re = Regex::new(r"(.*)_test").expect("unable to create regex");
+/// filter = FtFilter::Regex(re);
+/// results = list_directories_with_filter(&root, filter).await?;
+///
+/// Ok(())
+/// }
+/// ```
+pub async fn list_directories_with_filter + Send>(
+ path: P,
+ filter: FtFilter,
+) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+
+ iteritems(path, FtIterItemState::Dir, Some(&filter)).await
+}
+
+/// Lists directories in a given directory (including ALL subdirectories) matching a filter pattern.
+///
+/// This pattern can be a `String`, `PathBuf`, or a [`regex::Regex`] pattern.
+///
+/// Use responsibly.
+///
+/// ## Sync
+///
+/// For the `sync` version, see [`crate::sync::list_nested_directories_with_filter`]
+///
+/// # Errors
+///
+/// This function will return an error in the following situations:
+///
+/// * The given path is a file and not a directory
+/// * The given path does not exist
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use regex::Regex;
+/// use std::path::PathBuf;
+/// use filetools::{list_nested_directories_with_filter, FtFilter};
+///
+/// #[tokio::main]
+/// async fn main() -> anyhow::Result<()> {
+/// let root = "some/path/containing/dirs";
+///
+/// // List all dirs containing the phrase `log`
+/// let mut filter = FtFilter::Raw("log".to_string());
+/// let mut results = list_nested_directories_with_filter(&root, filter).await?;
+///
+/// // List all dirs containing the path segment `files/test`
+/// filter = FtFilter::Path(PathBuf::from("files/test"));
+/// results = list_nested_directories_with_filter(&root, filter).await?;
+///
+/// // List all dirs ending with `_test`
+/// let re = Regex::new(r"(.*)_test").expect("unable to create regex");
+/// filter = FtFilter::Regex(re);
+/// results = list_nested_directories_with_filter(&root, filter).await?;
+///
+/// Ok(())
+/// }
+/// ```
+pub async fn list_nested_directories_with_filter + Send>(
+ path: P,
+ filter: FtFilter,
+) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+
+ iteritems(path, FtIterItemState::RDir, Some(&filter)).await
+}
#[cfg(test)]
mod tests {
- use crate::{filehelpers, filenaming};
+ use super::*;
+ use anyhow::{Context, Result};
use std::path::PathBuf;
+ use util::TempPath;
- #[test]
- fn iterate_files_and_folders() -> Result<(), Box> {
- let files = filehelpers::list_files(PathBuf::from("src"), true)?;
- let folders = filehelpers::list_folders(PathBuf::from("."), false)?;
+ // This is kind of redundant as it just wraps `tokio::fs::create_dir_all`
+ // but yay for test coverage i suppose
+ #[tokio::test]
+ async fn creates_a_directory() -> Result<()> {
+ let tmp = std::env::temp_dir();
- // filehelpers.rs filenaming.rs lib.rs
- assert_eq!(files.len(), 3);
+ // Creates a single directory
+ let single_path = tmp.join("create_dir");
+ ensure_directory(&single_path)
+ .await
+ .context("create directory single")?;
+
+ assert!(single_path.exists());
+
+ // Nested directories
+ let nested_path = tmp.join("create_dir/test/this/is/nested");
+ ensure_directory(&nested_path)
+ .await
+ .context("create directory nested")?;
+
+ assert!(nested_path.exists());
+
+ std::fs::remove_dir_all(single_path)?;
- // target/ src/ .git/
- assert_eq!(folders.len(), 4);
Ok(())
}
- #[test]
- fn folder_creation() {
- let _ = filehelpers::ensure_dir(PathBuf::from("./test/func"));
+ #[tokio::test]
+ async fn checks_if_a_directory_is_a_subdirectory() -> Result<()> {
+ let root = TempPath::new("is_subdir").await?;
+ let nested = root
+ .nest_folders(vec!["this", "is", "a", "nested", "tmp", "dir"])
+ .await?;
+ let mut result = is_subdir(&nested.path, "nested");
+
+ assert!(result);
+
+ result = is_subdir(&nested.path, "not_valid");
+
+ assert!(!result);
+ Ok(())
}
#[test]
- fn generate_filenames() -> Result<(), Box> {
- let name1 = filenaming::generate_default_timestamped_name("", ".pdf");
- let name2 = filenaming::generate_default_timestamped_name("test_file", ".dxf");
- let name3 = filenaming::generate_random_name(".docx");
- let name4 = filenaming::generate_n_digit_name(55, 6, ".pdf");
+ fn check_path_contains_subpath() {
+ // Basic str
+ let main = "I/am/a/path/hello/there";
+ assert!(path_contains(main, "a/path"));
+ assert!(!path_contains(main, "not"));
- println!("Name1: {:?}", name1);
- println!("Name2: {:?}", name2);
- println!("Name3: {:?}", name3);
- println!("Name4: {:?}", name4);
+ // Check it works for paths
+ let main = Path::new(main);
+ assert!(path_contains(main, Path::new("a/path")));
+ assert!(!path_contains(main, Path::new("not")));
- assert_eq!(name4, PathBuf::from("000055.pdf"));
+ // Pathbufs?
+ let main = PathBuf::from("I/am/a/path/hello/there");
+ assert!(path_contains(&main, PathBuf::from("a/path")));
+ assert!(!path_contains(main, PathBuf::from("not")));
+
+ // What about strings?
+ assert!(path_contains(
+ String::from("I/am/a/path/hello/there"),
+ String::from("a/path")
+ ));
+ assert!(!path_contains(
+ String::from("I/am/a/path/hello/there"),
+ String::from("not")
+ ));
+ }
+
+ #[tokio::test]
+ async fn check_list_files_works() -> Result<()> {
+ let root = TempPath::new("lf_test").await?;
+ root.multi_file(vec!["first.rs", "second.c", "third.js", "fourth.rb"])
+ .await?;
+
+ let res = list_files(root.path.clone()).await?;
+ assert_eq!(res.len(), 4);
+
+ assert!(list_files("IDoNotExistAsADirectoryOrShouldntAtLeAst")
+ .await
+ .is_err());
Ok(())
}
- #[test]
- fn path_does_contains() -> Result<(), Box> {
- let path1 = PathBuf::from("./target/doc/cfg_if");
- let path2 = PathBuf::from("./target/chrono/datetime");
- let path3 = PathBuf::from("./target");
+ #[tokio::test]
+ async fn check_list_nested_files_works() -> Result<()> {
+ let root = TempPath::new("lfr_test").await?;
+ let ffolder = root.new_folder("ffolder").await?;
+ let sfolder = root.new_folder("sfolder").await?;
+ let tfolder = root.new_folder("tfolder").await?;
- let target_paths: Vec = filehelpers::list_files(path3, true)?
- .into_iter()
- .filter(|x| filehelpers::path_contains(x.to_path_buf(), "doc"))
- .collect();
+ root.new_file("initial.pdf").await?;
+ ffolder.new_file("first.rs").await?;
+ sfolder.multi_file(vec!["second.txt", "third.php"]).await?;
+ tfolder.new_file("fourth.cpp").await?;
- assert_eq!(filehelpers::path_contains(path1, "doc"), true);
- assert_eq!(filehelpers::path_contains(path2, "debug"), false);
+ let res = list_nested_files(&root.path).await?;
+ assert_eq!(res.len(), 5);
- for path in target_paths.iter() {
- assert_eq!(filehelpers::path_contains(path.to_path_buf(), "doc"), true);
+ assert!(list_files("IDoNotExistAsADirectoryOrShouldntAtLeAst")
+ .await
+ .is_err());
+
+ Ok(())
+ }
+
+ #[tokio::test]
+ async fn check_list_directories_works() -> Result<()> {
+ let root = TempPath::new("lfolder_test").await?;
+ root.multi_folder(vec!["folder1", "folder2", "folder3", "folder4"])
+ .await?;
+
+ let res = list_directories(root.path.clone()).await?;
+ assert_eq!(res.len(), 4);
+
+ assert!(list_directories("non-existant_path").await.is_err());
+
+ Ok(())
+ }
+
+ #[tokio::test]
+ async fn check_list_nested_directories_works() -> Result<()> {
+ let root = TempPath::new("lfolderrec_test").await?;
+ root.multi_folder(vec!["folder1", "folder2"]).await?;
+
+ let f1 = TempPath::new(root.join("folder1")).await?;
+ f1.multi_folder(vec!["sub1", "sub2", "sub3"]).await?;
+
+ let s2 = TempPath::new(f1.join("sub2")).await?;
+ s2.multi_folder(vec!["deep1", "deep2"]).await?;
+
+ let res = list_nested_directories(root.path.clone()).await?;
+ assert_eq!(res.len(), 7);
+
+ assert!(list_nested_directories("not-a-valId_pathd").await.is_err());
+
+ Ok(())
+ }
+
+ #[tokio::test]
+ async fn numeric_directories() -> Result<()> {
+ let tmp = TempPath::new("numeric_directories").await?;
+ create_numeric_directories(&tmp.path, 0, 100, 4).await?;
+ let mut folders = list_directories(&tmp.path).await?;
+ folders.sort();
+ assert_eq!(folders.len(), 100);
+
+ for (i, folder) in folders.into_iter().enumerate() {
+ let test = &tmp.path.join(format!("{:0fill$}", i, fill = 4));
+ assert_eq!(&folder, test);
}
Ok(())
}
+
+ #[tokio::test]
+ async fn multiple_directory_creation() -> Result<()> {
+ let tmp = TempPath::new("create_multiple_dirs").await?;
+ let dirs = ["config", "src", "tests"];
+
+ create_multiple_directories(&tmp.path, &dirs).await?;
+ let folders = list_directories(&tmp.path).await?;
+ assert_eq!(folders.len(), 3);
+
+ for check in dirs {
+ let target = tmp.path.join(check);
+ assert!(folders.contains(&target));
+ }
+
+ Ok(())
+ }
+
+ #[tokio::test]
+ async fn files_filter() -> Result<()> {
+ let root = TempPath::new("filter_files").await?;
+ root.multi_file(vec!["first.rs", "second.rs", "third.js", "fourth.rb"])
+ .await?;
+
+ // Raw string filter
+ let mut filter = FtFilter::Raw("fourth".to_string());
+ let mut result = list_files_with_filter(&root.path, filter).await?;
+ assert_eq!(result.len(), 1);
+ assert_eq!(result[0], root.path.join("fourth.rb"));
+
+ // PathBuf filter
+ filter = FtFilter::Path(PathBuf::from("third.js"));
+ result = list_files_with_filter(&root.path, filter).await?;
+ assert_eq!(result.len(), 1);
+ assert_eq!(result[0], root.path.join("third.js"));
+
+ // Regex filter
+ filter = FtFilter::Regex(Regex::new(r"(.*)\.rs").unwrap());
+ result = list_files_with_filter(&root.path, filter).await?;
+ assert_eq!(result.len(), 2);
+ assert!(result.contains(&root.path.join("first.rs")));
+ assert!(result.contains(&root.path.join("second.rs")));
+
+ Ok(())
+ }
+
+ #[tokio::test]
+ async fn files_filter_is_empty() -> Result<()> {
+ let root = TempPath::new("filter_files_empty").await?;
+
+ // Raw string filter (normal + nested)
+ let mut filter = FtFilter::Raw("non-existant".to_string());
+ let mut result = list_files_with_filter(&root.path, filter).await?;
+ assert!(result.is_empty());
+ filter = FtFilter::Raw("non-existant".to_string());
+ result = list_nested_files_with_filter(&root.path, filter).await?;
+ assert!(result.is_empty());
+
+ // PathBuf Filter
+ filter = FtFilter::Path(PathBuf::from("another-missing"));
+ result = list_files_with_filter(&root.path, filter).await?;
+ assert!(result.is_empty());
+ filter = FtFilter::Path(PathBuf::from("another-missing"));
+ result = list_nested_files_with_filter(&root.path, filter).await?;
+ assert!(result.is_empty());
+
+ // Regex filter
+ filter = FtFilter::Regex(Regex::new(r"(.*)\.rs").unwrap());
+ result = list_files_with_filter(&root.path, filter).await?;
+ assert!(result.is_empty());
+ filter = FtFilter::Regex(Regex::new(r"(.*)\.rs").unwrap());
+ result = list_nested_files_with_filter(&root.path, filter).await?;
+ assert!(result.is_empty());
+ Ok(())
+ }
+
+ #[tokio::test]
+ async fn list_items_error() -> Result<()> {
+ let root = TempPath::new("filter_files_error").await?;
+ let test = root.new_file("test.js").await?;
+
+ assert!(list_files(&test.path).await.is_err());
+ assert!(list_nested_files(&test.path).await.is_err());
+ assert!(
+ list_files_with_filter(&test.path, FtFilter::Raw("filter".to_string()))
+ .await
+ .is_err()
+ );
+ assert!(
+ list_nested_files_with_filter(&test.path, FtFilter::Raw("filter".to_string()))
+ .await
+ .is_err()
+ );
+ assert!(list_directories(&test.path).await.is_err());
+ assert!(list_nested_directories(&test.path).await.is_err());
+ assert!(
+ list_directories_with_filter(&test.path, FtFilter::Raw("filter".to_string()))
+ .await
+ .is_err()
+ );
+ assert!(list_nested_directories_with_filter(
+ &test.path,
+ FtFilter::Raw("filter".to_string())
+ )
+ .await
+ .is_err());
+ Ok(())
+ }
+
+ #[tokio::test]
+ async fn nested_files_filter() -> Result<()> {
+ let root = TempPath::new("nested_filter_files").await?;
+ let ffolder = root.new_folder("ffolder").await?;
+ let sfolder = root.new_folder("sfolder").await?;
+ let tfolder = root.new_folder("tfolder").await?;
+
+ root.new_file("initial.pdf").await?;
+ ffolder.new_file("first.rs").await?;
+ sfolder.multi_file(vec!["second.txt", "third.rs"]).await?;
+ tfolder.new_file("initial.cpp").await?;
+
+ let mut filter = FtFilter::Raw("initial".to_string());
+ let mut result = list_nested_files_with_filter(&root.path, filter).await?;
+ assert_eq!(result.len(), 2);
+ assert!(result.contains(&root.path.join("tfolder/initial.cpp")));
+ assert!(result.contains(&root.path.join("initial.pdf")));
+
+ filter = FtFilter::Path(PathBuf::from("second.txt"));
+ result = list_nested_files_with_filter(&root.path, filter).await?;
+ assert_eq!(result.len(), 1);
+ assert_eq!(result[0], root.path.join("sfolder/second.txt"));
+ Ok(())
+ }
+
+ #[tokio::test]
+ async fn directories_filter() -> Result<()> {
+ let root = TempPath::new("dir_filter").await?;
+ root.multi_folder(vec!["log_var", "store_var", "config", "etc"])
+ .await?;
+
+ // Raw string filter
+ let mut filter = FtFilter::Raw("config".to_string());
+ let mut result = list_directories_with_filter(&root.path, filter).await?;
+ assert_eq!(result.len(), 1);
+ assert_eq!(result[0], root.path.join("config"));
+
+ // PathBuf filter
+ filter = FtFilter::Path(PathBuf::from("etc"));
+ result = list_directories_with_filter(&root.path, filter).await?;
+ assert_eq!(result.len(), 1);
+ assert_eq!(result[0], root.path.join("etc"));
+
+ // Regex filter
+ filter = FtFilter::Regex(Regex::new(r"(.*)_var").unwrap());
+ result = list_directories_with_filter(&root.path, filter).await?;
+ assert_eq!(result.len(), 2);
+ assert!(result.contains(&root.path.join("log_var")));
+ assert!(result.contains(&root.path.join("store_var")));
+ Ok(())
+ }
+
+ #[tokio::test]
+ async fn nested_directories_filter() -> Result<()> {
+ let root = TempPath::new("nested_dir_filter_test").await?;
+ root.multi_folder(vec!["folder1", "folder2"]).await?;
+
+ let f1 = TempPath::new(root.join("folder1")).await?;
+ f1.multi_folder(vec!["sub1", "sub_2", "sub3"]).await?;
+
+ let s2 = TempPath::new(f1.join("sub_2")).await?;
+ s2.multi_folder(vec!["deep_1", "deep2"]).await?;
+
+ // Raw filter
+ let mut filter = FtFilter::Raw("deep".to_string());
+ let mut result = list_nested_directories_with_filter(&root.path, filter).await?;
+ assert_eq!(result.len(), 2);
+ assert!(result.contains(&root.path.join("folder1/sub_2/deep_1")));
+ assert!(result.contains(&root.path.join("folder1/sub_2/deep2")));
+
+ // Path filter
+ filter = FtFilter::Path(PathBuf::from("folder1"));
+ result = list_nested_directories_with_filter(&root.path, filter).await?;
+ assert_eq!(result.len(), 6);
+
+ filter = FtFilter::Regex(Regex::new(r"(.*)_[0-9]{1}").unwrap());
+ result = list_nested_directories_with_filter(&root.path, filter).await?;
+ assert_eq!(result.len(), 3);
+
+ Ok(())
+ }
+
+ #[tokio::test]
+ async fn list_dirs_empty() -> Result<()> {
+ let root = TempPath::new("list_dirs_empty").await?;
+
+ // Raw string filter (normal + nested)
+ let mut filter = FtFilter::Raw("non-existant".to_string());
+ let mut result = list_directories_with_filter(&root.path, filter).await?;
+ assert!(result.is_empty());
+ filter = FtFilter::Raw("non-existant".to_string());
+ result = list_nested_directories_with_filter(&root.path, filter).await?;
+ assert!(result.is_empty());
+
+ // PathBuf Filter
+ filter = FtFilter::Path(PathBuf::from("another-missing"));
+ result = list_directories_with_filter(&root.path, filter).await?;
+ assert!(result.is_empty());
+ filter = FtFilter::Path(PathBuf::from("another-missing"));
+ result = list_nested_directories_with_filter(&root.path, filter).await?;
+ assert!(result.is_empty());
+
+ // Regex filter
+ filter = FtFilter::Regex(Regex::new(r"(.*)\.rs").unwrap());
+ result = list_directories_with_filter(&root.path, filter).await?;
+ assert!(result.is_empty());
+ filter = FtFilter::Regex(Regex::new(r"(.*)\.rs").unwrap());
+ result = list_nested_directories_with_filter(&root.path, filter).await?;
+ assert!(result.is_empty());
+ Ok(())
+ }
}
diff --git a/src/naming.rs b/src/naming.rs
new file mode 100644
index 0000000..146b75e
--- /dev/null
+++ b/src/naming.rs
@@ -0,0 +1,168 @@
+//! Functions that generate PathBuf filenames
+//!
+//! # Examples
+//!
+//! ```
+//! use std::path::PathBuf;
+//! use filetools::naming;
+//!
+//! // Generates the name `test.pdf`
+//! let custom_name = naming::generate_name("test", "pdf");
+//!
+//! // Name will be suffixed by the current time it was generated
+//! // E.g. `test_[Timestamp].pdf`
+//! let timestamped_name = naming::generate_timestamped_name("test", "pdf");
+//!
+//! // Random name is a UUIDv4 string suffixed by the extension
+//! // E.g. `00762527-012a-43c1-a673-cad9bc5eef64.pdf`
+//! let random_name = naming::generate_uuid4_name("pdf");
+//!
+//! // N-digit name is a number prefixed by X zeros (e.g. 0005.pdf)
+//! let n_digit_name = naming::generate_n_digit_name(5, 4, "pdf");
+//! ```
+//!
+
+use chrono::prelude::*;
+use std::path::PathBuf;
+use uuid::Uuid;
+
+/// Helper for makeing extensions
+///
+/// Literally just preprends a .
+fn make_extension(ext: impl AsRef) -> String {
+ if ext.as_ref().is_empty() {
+ return String::new();
+ }
+
+ format!(".{}", ext.as_ref())
+}
+
+/// Generates a `PathBuf` from a given and extension
+///
+/// # Example
+///
+/// ```rust
+/// use filetools::naming::generate_name;
+///
+/// // Will generate the name `test.json`
+/// let name = generate_name("test", "json");
+/// ```
+pub fn generate_name(name: &str, ext: &str) -> PathBuf {
+ PathBuf::from(format!("{}{}", name, make_extension(ext)))
+}
+
+/// Generates a `PathBuf` from a name and extention with a default timestamp of "DD_MM_YY_HHMMSS"
+/// If `fname` is empty, just uses the timestamp and extension
+///
+/// # Example
+///
+/// ```rust
+/// use filetools::naming::generate_timestamped_name;
+///
+/// // Will generate the name `some_file_[Timestamp].pdf`
+/// let ts_with_filename = generate_timestamped_name("some_file", ".pdf");
+///
+/// // Will generate the name `[Timestamp].txt`
+/// let ts_no_filename = generate_timestamped_name("", ".txt");
+/// ```
+pub fn generate_timestamped_name(fname: &str, ext: &str) -> PathBuf {
+ let dt = UTC::now().format("%d_%m_%Y_%Hh%Mm%Ss");
+
+ if fname.is_empty() {
+ return PathBuf::from(format!("{}{}", dt, make_extension(ext)));
+ }
+
+ PathBuf::from(format!("{}_{}{}", fname, dt, make_extension(ext)))
+}
+
+/// Generates a random UUIDv4 `PathBuf`
+///
+/// # Example
+///
+/// ```rust
+/// use filetools::naming::generate_uuid4_name;
+///
+/// // Will generate a UUIDv4 name (e.g. `b1faa2c3-d25c-43bb-b578-9f259d7aabaf.log`)
+/// let name = generate_uuid4_name("log");
+/// ```
+pub fn generate_uuid4_name(ext: &str) -> PathBuf {
+ let unique = Uuid::new_v4();
+
+ PathBuf::from(format!("{}{}", unique, make_extension(ext)))
+}
+
+/// Generates a `PathBuf` from a `number` prefixed by `n_digits` zeros.
+///
+/// If `ext` is empty, will just return the filled number.
+///
+/// # Example
+///
+/// ```rust
+/// use filetools::naming::generate_n_digit_name;
+///
+/// // Will generate the name `0005.json`
+/// let name = generate_n_digit_name(5, 4, "json");
+///
+/// // Will generate the name `000128.log`
+/// let another_name = generate_n_digit_name(128, 6, "log");
+/// ```
+pub fn generate_n_digit_name(number: usize, fill: usize, ext: &str) -> PathBuf {
+ PathBuf::from(format!(
+ "{:0fill$}{}",
+ number,
+ make_extension(ext),
+ fill = fill
+ ))
+}
+
+#[cfg(test)]
+mod naming_tests {
+ use super::*;
+ use regex::Regex;
+ use std::path::PathBuf;
+
+ #[test]
+ fn generates_expected_name() {
+ assert_eq!(generate_name("test", "pdf"), PathBuf::from("test.pdf"));
+ assert_eq!(
+ generate_name("another", "txt"),
+ PathBuf::from("another.txt")
+ );
+ assert_eq!(generate_name("main", "c"), PathBuf::from("main.c"));
+ assert_eq!(generate_name("app", "js"), PathBuf::from("app.js"));
+ assert_eq!(
+ generate_name("somephotothing", "H4AC"),
+ PathBuf::from("somephotothing.H4AC")
+ );
+ }
+
+ #[test]
+ // Don't judge me on regex...
+ fn generates_timestamped_name_ok() {
+ let ts_re = Regex::new(r"(.*)_\d{2}_\d{2}_\d{4}_\d{2}h\d{2}m\d{2}s").unwrap();
+ let ts_name = generate_timestamped_name("with_filename", "txt");
+
+ // Pathbuf checks need the full path component
+ let ts_name = ts_name.to_str().unwrap();
+ assert!(ts_name.starts_with("with_filename"));
+ assert!(ts_re.is_match(ts_name));
+ assert!(ts_name.ends_with(".txt"));
+
+ let no_prefix_re = Regex::new(r"\d{2}_\d{2}_\d{4}_\d{2}h\d{2}m\d{2}s").unwrap();
+ let no_prefix = generate_timestamped_name("", "pdf");
+
+ let no_prefix = no_prefix.to_str().unwrap();
+ assert!(no_prefix.ends_with("pdf"));
+ assert!(no_prefix_re.is_match(no_prefix));
+ }
+
+ #[test]
+ fn checks_random_names_are_ok() {
+ let uuid_re =
+ Regex::new(r"[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}").unwrap();
+ let rn = generate_uuid4_name("json");
+ let rn_name = rn.to_str().unwrap();
+ assert!(uuid_re.is_match(rn_name));
+ assert!(rn_name.ends_with(".json"));
+ }
+}
diff --git a/src/sync.rs b/src/sync.rs
new file mode 100644
index 0000000..3934759
--- /dev/null
+++ b/src/sync.rs
@@ -0,0 +1,409 @@
+//! Sync variations of the main [`crate`] functions
+//!
+//! All operations are identical to those defined in the `async` version.
+use crate::util::FtIterItemState;
+use crate::{naming::generate_n_digit_name, util::iteritems_sync, FtFilter};
+use anyhow::{Context, Result};
+use std::fs;
+use std::path::{Path, PathBuf};
+
+/// Creates a directory at the given path.
+///
+/// If the directory already exists, nothing is done
+///
+/// ## Async
+///
+/// For the `async` version, see: [`crate::ensure_directory`]
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::sync::ensure_directory;
+///
+/// let target_path = "directory/to/create";
+/// ensure_directory(target_path).expect("unable to create directory");
+///
+/// ```
+pub fn ensure_directory(dir: impl AsRef) -> Result<()> {
+ if !dir.as_ref().exists() {
+ fs::create_dir_all(dir).context("unable to create directory")?;
+ }
+
+ Ok(())
+}
+
+/// Creates a range of numeric folders in the given path
+///
+/// Directories can be padded with X zeros using the `fill` parameter.
+///
+/// ## Async
+///
+/// For the `async` version, see: [`crate::create_numeric_directories`]
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::sync::create_numeric_directories;
+///
+/// let root = "some/root/path";
+///
+/// // This will create the following directories:
+/// // "some/root/path/0000"
+/// // ...
+/// // "some/root/path/0099"
+/// create_numeric_directories(root, 0, 100, 4).expect("unable to create numeric directories");
+/// ```
+pub fn create_numeric_directories(
+ path: impl AsRef,
+ start: usize,
+ end: usize,
+ fill: usize,
+) -> Result<()> {
+ for i in start..end {
+ let name = path.as_ref().join(generate_n_digit_name(i, fill, ""));
+ ensure_directory(name).context("creating numeric directories")?;
+ }
+
+ Ok(())
+}
+
+/// Creates multiple directories inside the target path.
+///
+/// ## Async
+///
+/// For the `async` version, see: [`crate::create_multiple_directories`]
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::sync::create_multiple_directories;
+///
+/// let root = "dir/to/populate";
+/// let to_create = ["dir1", "dir2", "dir3"];
+///
+/// // Will create:
+/// // `dir/to/populate/dir1`
+/// // `dir/to/populate/dir2`
+/// // `dir/to/populate/dir3`
+/// create_multiple_directories(root, &to_create).expect("unable to create multiple directories");
+/// ```
+pub fn create_multiple_directories(
+ path: impl AsRef,
+ directories: &[impl AsRef],
+) -> Result<()> {
+ for dir in directories {
+ let target = path.as_ref().join(dir);
+ ensure_directory(target)?;
+ }
+
+ Ok(())
+}
+
+/// Lists all files in the given directory (not including subdirectories).
+///
+/// ## Async
+///
+/// For the `async` version, see: [`crate::list_files`]
+///
+/// # Errors
+///
+/// This function will return an error in the following situations:
+///
+/// * The path given is a file and not a directory
+/// * The given path does not exist
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::sync::list_files;
+///
+/// let target_dir = "some/dir/containing/files";
+///
+/// // Will return a Vec containing paths to all files in the directory
+/// let files = list_files(target_dir).expect("unable to list files");
+/// ```
+pub fn list_files>(path: P) -> Result>> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+
+ iteritems_sync(path, FtIterItemState::File, None)
+}
+
+/// Lists all files in a directory including ALL subdirectories
+///
+/// Use responsibly.
+///
+/// ## Async
+///
+/// For the `async` version, see: [`crate::list_nested_files`]
+///
+/// # Errors
+///
+/// This function will return an error in the following situations:
+///
+/// * The given path is a file and not a directory
+/// * The given path does not exist
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::sync::list_nested_files;
+///
+/// let target_dir = "some/dir/containing/nested/files";
+///
+/// // Will return a Vec containing all files in the directory (including all subdirectories)
+/// let files = list_nested_files(target_dir).expect("unable to list files recursively");
+/// ```
+pub fn list_nested_files>(path: P) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+
+ iteritems_sync(path, FtIterItemState::RFile, None)
+}
+
+/// Lists files in a folder (not including subdirectories) matching a filter pattern.
+///
+/// This pattern can be a `String`, `PathBuf`, or a [`regex::Regex`] pattern.
+///
+/// ## Async
+///
+/// For the `async` version, see: [`crate::list_files_with_filter`]
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use regex::Regex;
+/// use std::path::PathBuf;
+/// use filetools::{sync::list_files_with_filter, FtFilter};
+///
+/// let root = "some/path/containing/files";
+///
+/// // List all files containing the phrase `log`
+/// let mut filter = FtFilter::Raw("log".to_string());
+/// let mut results = list_files_with_filter(&root, filter).expect("unable to list filtered files");
+///
+/// // List all files containing the path segment `files/test`
+/// filter = FtFilter::Path(PathBuf::from("files/test"));
+/// results = list_files_with_filter(&root, filter).expect("unable to list filtered files");
+///
+/// // List all files ending with `.rs`
+/// let re = Regex::new(r"(.*)\.rs").expect("unable to create regex");
+/// filter = FtFilter::Regex(re);
+/// results = list_files_with_filter(&root, filter).expect("unable to list filtered files");
+///
+///
+/// ```
+pub fn list_files_with_filter>(path: P, filter: FtFilter) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+
+ iteritems_sync(path, FtIterItemState::File, Some(&filter))
+}
+
+/// Lists files in a folder (including ALL subdirectories) matching a filter pattern.
+///
+/// This pattern can be a `String`, `PathBuf`, or a [`regex::Regex`] pattern.
+///
+/// Use responsibly.
+///
+/// ## Async
+///
+/// For the `async` version, see: [`crate::list_nested_files_with_filter`]
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use regex::Regex;
+/// use std::path::PathBuf;
+/// use filetools::{sync::list_nested_files_with_filter, FtFilter};
+///
+/// let root = "some/path/containing/nested/folders/with/files";
+///
+/// // List all files containing the phrase `log`
+/// let mut filter = FtFilter::Raw("log".to_string());
+/// let mut results = list_nested_files_with_filter(&root, filter).expect("unable to list nested files with filter");
+///
+/// // List all files containing the path segment `files/test`
+/// filter = FtFilter::Path(PathBuf::from("files/test"));
+/// results = list_nested_files_with_filter(&root, filter).expect("unable to list nested files with filter");
+///
+/// // List all files ending with `.rs`
+/// let re = Regex::new(r"(.*)\.rs").expect("unable to create regex");
+/// filter = FtFilter::Regex(re);
+/// results = list_nested_files_with_filter(&root, filter).expect("unable to list nested files with filter");
+/// ```
+pub fn list_nested_files_with_filter>(
+ path: P,
+ filter: FtFilter,
+) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+
+ iteritems_sync(path, FtIterItemState::RFile, Some(&filter))
+}
+
+/// Lists all directories in the given directory (not including subdirectories).
+///
+/// ## Async
+///
+/// For the `async` version, see: [`crate::list_directories`]
+///
+/// # Errors
+///
+/// This function will return an error in the following situations:
+///
+/// * The path given is a file and not a directory
+/// * The given path does not exist
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::sync::list_directories;
+///
+/// let target_dir = "some/dir/containing/files";
+///
+/// // Will return a Vec containing paths to all directories in the directory
+/// let dirs = list_directories(target_dir).expect("unable to list directories");
+/// ```
+pub fn list_directories>(path: P) -> Result>> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+ iteritems_sync(path, FtIterItemState::Dir, None)
+}
+
+/// Lists all directories in a directory including ALL subdirectories
+///
+/// Use responsibly.
+///
+/// ## Async
+///
+/// For the `async` version, see: [`crate::list_nested_directories`]
+///
+/// # Errors
+///
+/// This function will return an error in the following situations:
+///
+/// * The given path is a file and not a directory
+/// * The given path does not exist
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use filetools::sync::list_nested_directories;
+///
+/// let target_dir = "some/dir/containing/nested/files";
+///
+/// // Will return a Vec containing all directories in the directory (including all subdirectories)
+/// let dirs = list_nested_directories(target_dir).expect("unable to list directories recursively");
+/// ```
+pub fn list_nested_directories + Send>(path: P) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+ iteritems_sync(path, FtIterItemState::RDir, None)
+}
+
+/// Lists directories in a given directory (not including subdirectories) matching a filter pattern.
+///
+/// This pattern can be a `String`, `PathBuf`, or a [`regex::Regex`] pattern.
+///
+/// ## Async
+///
+/// For the `async` version, see: [`crate::list_directories_with_filter`]
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use regex::Regex;
+/// use std::path::PathBuf;
+/// use filetools::{sync::list_directories_with_filter, FtFilter};
+///
+/// let root = "some/path/containing/dirs";
+///
+/// // List all dirs containing the phrase `log`
+/// let mut filter = FtFilter::Raw("log".to_string());
+/// let mut results = list_directories_with_filter(&root, filter).expect("unable to list dirs with filter");
+///
+/// // List all dirs containing the path segment `files/test`
+/// filter = FtFilter::Path(PathBuf::from("files/test"));
+/// results = list_directories_with_filter(&root, filter).expect("unable to list dirs with filter");
+///
+/// // List all dirs ending with `_test`
+/// let re = Regex::new(r"(.*)_test").expect("unable to create regex");
+/// filter = FtFilter::Regex(re);
+/// results = list_directories_with_filter(&root, filter).expect("unable to list dirs with filter");
+/// ```
+pub fn list_directories_with_filter>(
+ path: P,
+ filter: FtFilter,
+) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+ iteritems_sync(path, FtIterItemState::Dir, Some(&filter))
+}
+
+/// Lists directories in a given directory (including ALL subdirectories) matching a filter pattern.
+///
+/// Use responsibly.
+///
+/// This pattern can be a `String`, `PathBuf`, or a [`regex::Regex`] pattern.
+///
+/// ## Async
+///
+/// For the `async` version, see: [`crate::list_nested_directories_with_filter`]
+///
+/// # Example
+///
+/// ```rust,no_run
+/// use regex::Regex;
+/// use std::path::PathBuf;
+/// use filetools::{sync::list_nested_directories_with_filter, FtFilter};
+///
+/// let root = "some/path/containing/dirs";
+///
+/// // List all dirs containing the phrase `log`
+/// let mut filter = FtFilter::Raw("log".to_string());
+/// let mut results = list_nested_directories_with_filter(&root, filter).expect("unable to list nested dirs with filter");
+///
+/// // List all dirs containing the path segment `files/test`
+/// filter = FtFilter::Path(PathBuf::from("files/test"));
+/// results = list_nested_directories_with_filter(&root, filter).expect("unable to list nested dirs with filter");
+///
+/// // List all dirs ending with `_test`
+/// let re = Regex::new(r"(.*)_test").expect("unable to create regex");
+/// filter = FtFilter::Regex(re);
+/// results = list_nested_directories_with_filter(&root, filter).expect("unable to list nested dirs with filter");
+/// ```
+pub fn list_nested_directories_with_filter>(
+ path: P,
+ filter: FtFilter,
+) -> Result> {
+ anyhow::ensure!(path.as_ref().exists(), "path does not exist");
+ anyhow::ensure!(
+ path.as_ref().is_dir(),
+ "path should be a directory, not a file"
+ );
+ iteritems_sync(path, FtIterItemState::RDir, Some(&filter))
+}
+
+// No tests needed cause these are tested in the main crate
diff --git a/src/util.rs b/src/util.rs
new file mode 100644
index 0000000..a0bd416
--- /dev/null
+++ b/src/util.rs
@@ -0,0 +1,232 @@
+//! Internal helper utilities and types
+
+use crate::{ensure_directory, path_contains, FtFilter};
+use anyhow::{Context, Result};
+use async_recursion::async_recursion;
+use std::path::{Path, PathBuf};
+use tokio::fs;
+
+/// Determines the type of iteration performed by the `list_directories` and `list_files` functions
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub(crate) enum FtIterItemState {
+ /// Iterate files with no recursion
+ File,
+
+ /// Iterate files with recursion
+ RFile,
+
+ /// Iterate directories with no recursion
+ Dir,
+
+ /// Iterate directories with recursion
+ RDir,
+}
+
+/// Helper function to determine if an path item is valid based on the supplied filter
+fn matches_filter(item: impl AsRef, filter: &FtFilter) -> bool {
+ match filter {
+ // I know these are the same for Raw and Path
+ // but it complains when you try and use the | with match
+ // for this
+ FtFilter::Raw(raw) => {
+ if path_contains(&item, raw) {
+ return true;
+ }
+ }
+ FtFilter::Path(filter_path) => {
+ if path_contains(&item, filter_path) {
+ return true;
+ }
+ }
+ FtFilter::Regex(re) => {
+ if re.is_match(item.as_ref().to_str().unwrap()) {
+ return true;
+ }
+ }
+ }
+
+ false
+}
+
+/// Helper function to iterate through a directory to find all Files / Directories
+/// depending on the `FilterState` passed.
+#[async_recursion]
+pub(crate) async fn iteritems + Send>(
+ path: P,
+ iterstate: FtIterItemState,
+ filter: Option<&'async_recursion FtFilter>,
+) -> Result> {
+ let mut items = vec![];
+
+ let mut entries = fs::read_dir(path.as_ref())
+ .await
+ .context("list items inner call")?;
+
+ while let Some(entry) = entries.next_entry().await? {
+ let e_path = entry.path();
+
+ // If a filter is present, set the value to the result of the filter
+ // check, else default to true so always adds the value
+ let filter_pass = match filter.as_ref() {
+ Some(f) => matches_filter(&e_path, f),
+ None => true,
+ };
+
+ match iterstate {
+ FtIterItemState::File => {
+ if e_path.is_file() && filter_pass {
+ items.push(e_path);
+ }
+ }
+ FtIterItemState::RFile => {
+ if e_path.is_file() && filter_pass {
+ items.push(e_path)
+ } else if e_path.is_dir() {
+ items.extend(iteritems(e_path, iterstate, filter).await?);
+ }
+ }
+ FtIterItemState::Dir => {
+ if e_path.is_dir() && filter_pass {
+ items.push(e_path);
+ }
+ }
+ FtIterItemState::RDir => {
+ if e_path.is_dir() {
+ if filter_pass {
+ items.push(e_path.clone());
+ }
+
+ items.extend(iteritems(e_path, iterstate, filter).await?);
+ }
+ }
+ }
+ }
+
+ Ok(items)
+}
+
+pub(crate) fn iteritems_sync>(
+ path: P,
+ iterstate: FtIterItemState,
+ filter: Option<&FtFilter>,
+) -> Result> {
+ let mut items = vec![];
+
+ let mut entries = std::fs::read_dir(path.as_ref()).context("sync iteritems entry call")?;
+
+ while let Some(Ok(entry)) = entries.next() {
+ let e_path = entry.path();
+
+ // If a filter is present, set the value to the result of the filter
+ // check, else default to true so always adds the value
+ let filter_pass = match filter.as_ref() {
+ Some(f) => matches_filter(&e_path, f),
+ None => true,
+ };
+ match iterstate {
+ FtIterItemState::File => {
+ if e_path.is_file() && filter_pass {
+ items.push(e_path);
+ }
+ }
+ FtIterItemState::RFile => {
+ if e_path.is_file() && filter_pass {
+ items.push(e_path)
+ } else if e_path.is_dir() {
+ items.extend(iteritems_sync(e_path, iterstate, filter)?);
+ }
+ }
+ FtIterItemState::Dir => {
+ if e_path.is_dir() && filter_pass {
+ items.push(e_path);
+ }
+ }
+ FtIterItemState::RDir => {
+ if e_path.is_dir() {
+ if filter_pass {
+ items.push(e_path.clone());
+ }
+
+ items.extend(iteritems_sync(e_path, iterstate, filter)?);
+ }
+ }
+ }
+ }
+
+ Ok(items)
+}
+/// Helper for creating temp directories
+///
+/// Tempfile _would_ work but I want nested dirs and easy ways to create
+/// a series of files / folder quickly without worrying
+/// A cheap knock-off of `Tempfile` but meh, this works kinda better for my use case
+pub(crate) struct TempPath {
+ pub path: PathBuf,
+}
+
+// This is only used in the test suite
+#[allow(dead_code)]
+impl TempPath {
+ pub async fn new(p: impl AsRef) -> Result {
+ let root = std::env::temp_dir();
+ let path = if p.as_ref().starts_with(&root) {
+ p.as_ref().to_path_buf()
+ } else {
+ root.join(p)
+ };
+
+ ensure_directory(&path).await?;
+
+ Ok(Self { path })
+ }
+
+ pub async fn new_file(&self, name: impl AsRef) -> Result {
+ let p = self.path.join(name);
+ tokio::fs::File::create(&p).await?;
+
+ Self::new(p).await
+ }
+
+ pub async fn multi_file(&self, names: Vec>) -> Result<()> {
+ for name in names {
+ tokio::fs::File::create(&self.path.join(name)).await?;
+ }
+
+ Ok(())
+ }
+
+ pub async fn new_folder(&self, name: impl AsRef) -> Result {
+ let p = self.path.join(name);
+ ensure_directory(&p).await?;
+
+ Self::new(p).await
+ }
+
+ pub async fn multi_folder(&self, names: Vec>) -> Result<()> {
+ for name in names {
+ ensure_directory(&self.path.join(name)).await?;
+ }
+
+ Ok(())
+ }
+
+ pub async fn nest_folders(&self, subfolder_chain: Vec>) -> Result {
+ let mut dst_path = self.path.clone();
+ for sf in subfolder_chain {
+ dst_path = dst_path.join(sf.as_ref());
+ }
+
+ ensure_directory(&dst_path).await?;
+ Self::new(dst_path).await
+ }
+
+ pub fn join(&self, path: impl AsRef) -> impl AsRef {
+ self.path.join(path)
+ }
+}
+
+impl Drop for TempPath {
+ fn drop(&mut self) {
+ let _ = std::fs::remove_dir_all(&self.path);
+ }
+}