## ----include = FALSE----------------------------------------------------------
knitr::opts_chunk$set(
  collapse = TRUE,
  comment = "#>"
)

## ----setup--------------------------------------------------------------------
library(rcloner)
has_rclone <- rclone_available()
if (!has_rclone) {
  message("rclone is not installed on this system. ",
          "Code chunks that require rclone are skipped. ",
          "Install with install_rclone().")
}

## ----install, eval=FALSE------------------------------------------------------
# install_rclone()

## ----version, eval = has_rclone-----------------------------------------------
rclone_version()

## ----s3-config, eval=FALSE----------------------------------------------------
# rclone_config_create(
#   "aws",
#   type     = "s3",
#   provider = "AWS",
#   access_key_id     = Sys.getenv("AWS_ACCESS_KEY_ID"),
#   secret_access_key = Sys.getenv("AWS_SECRET_ACCESS_KEY"),
#   region            = "us-east-1"
# )

## ----minio-config, eval=FALSE-------------------------------------------------
# rclone_config_create(
#   "minio",
#   type     = "s3",
#   provider = "Minio",
#   access_key_id     = Sys.getenv("MINIO_ACCESS_KEY"),
#   secret_access_key = Sys.getenv("MINIO_SECRET_KEY"),
#   endpoint          = "https://minio.example.com"
# )

## ----listremotes, eval=FALSE--------------------------------------------------
# rclone_listremotes()

## ----ls-local, eval = has_rclone----------------------------------------------
# List a local directory
rclone_ls(tempdir(), files_only = TRUE)

## ----ls-remote, eval=FALSE----------------------------------------------------
# # List a bucket on a configured remote
# rclone_ls("aws:my-bucket")
# 
# # Recursive listing
# rclone_ls("aws:my-bucket/data/", recursive = TRUE)
# 
# # Directories only
# rclone_lsd("aws:my-bucket")

## ----copy-local, eval = has_rclone--------------------------------------------
src  <- tempfile()
dest <- tempfile()
dir.create(src)
dir.create(dest)
writeLines("hello from rcloner", file.path(src, "readme.txt"))

rclone_copy(src, dest)
list.files(dest)

## ----cleanup-copy, echo = FALSE, eval = has_rclone----------------------------
unlink(src,  recursive = TRUE)
unlink(dest, recursive = TRUE)

## ----copy-cloud, eval=FALSE---------------------------------------------------
# # Upload a local directory to S3
# rclone_copy("/local/data", "aws:my-bucket/data")
# 
# # Download a file from S3
# rclone_copy("aws:my-bucket/report.csv", "/local/downloads/")
# 
# # Copy a URL directly to cloud storage (no local intermediate)
# rclone_copyurl(
#   "https://raw.githubusercontent.com/tidyverse/readr/main/inst/extdata/mtcars.csv",
#   "aws:my-bucket/mtcars.csv"
# )

## ----sync, eval=FALSE---------------------------------------------------------
# rclone_sync("aws:my-bucket/data", "/local/backup")

## ----move, eval=FALSE---------------------------------------------------------
# rclone_move("aws:staging/file.csv", "aws:archive/2024/file.csv")

## ----ops, eval=FALSE----------------------------------------------------------
# # Read a remote file into R
# contents <- rclone_cat("aws:my-bucket/config.yaml")
# 
# # Get metadata for an object
# rclone_stat("aws:my-bucket/data.csv")
# 
# # Total size of a path
# rclone_size("aws:my-bucket")
# 
# # Create a bucket/directory
# rclone_mkdir("aws:new-bucket")
# 
# # Delete files (keeps directories)
# rclone_delete("aws:my-bucket/old-data/")
# 
# # Remove a path and all its contents
# rclone_purge("aws:my-bucket/scratch")
# 
# # Generate a public link (where supported)
# rclone_link("aws:my-bucket/report.html")
# 
# # Get storage quota info
# rclone_about("aws:")

## ----lowlevel, eval=FALSE-----------------------------------------------------
# # Equivalent to: rclone version
# rclone("version")
# 
# # Run any rclone command
# rclone(c("check", "aws:bucket", "/local/backup", "--one-way"))

