diff --git a/README.md b/README.md index 842b4211c..6dc5ee771 100644 --- a/README.md +++ b/README.md @@ -146,7 +146,7 @@ Faster and Cheaper than direct cloud storage! * [WebDAV] accesses as a mapped drive on Mac and Windows, or from mobile devices. * [AES256-GCM Encrypted Storage][FilerDataEncryption] safely stores the encrypted data. * [Super Large Files][SuperLargeFiles] stores large or super large files in tens of TB. -* [Cloud Data Accelerator][RemoteStorage] transparently read and write existing cloud data at local speed with content cache, metadata cache, and asynchronous write back. +* [Cloud Drive][CloudDrive] mount cloud data to local cluster for fast read and write with asynchronous write back. ## Kubernetes ## * [Kubernetes CSI Driver][SeaweedFsCsiDriver] A Container Storage Interface (CSI) Driver. [![Docker Pulls](https://img.shields.io/docker/pulls/chrislusf/seaweedfs-csi-driver.svg?maxAge=4800)](https://hub.docker.com/r/chrislusf/seaweedfs-csi-driver/) @@ -169,7 +169,7 @@ Faster and Cheaper than direct cloud storage! [ActiveActiveAsyncReplication]: https://github.com/chrislusf/seaweedfs/wiki/Filer-Active-Active-cross-cluster-continuous-synchronization [FilerStoreReplication]: https://github.com/chrislusf/seaweedfs/wiki/Filer-Store-Replication [KeyLargeValueStore]: https://github.com/chrislusf/seaweedfs/wiki/Filer-as-a-Key-Large-Value-Store -[RemoteStorage]: https://github.com/chrislusf/seaweedfs/wiki/Cloud-Cache-Architecture +[CloudDrive]: https://github.com/chrislusf/seaweedfs/wiki/Cloud-Drive-Architecture [Back to TOC](#table-of-contents) diff --git a/weed/shell/command_remote_cache.go b/weed/shell/command_remote_cache.go index f032239f3..abd53461b 100644 --- a/weed/shell/command_remote_cache.go +++ b/weed/shell/command_remote_cache.go @@ -32,11 +32,14 @@ func (c *commandRemoteCache) Help() string { remote.cache -dir=/xxx remote.cache -dir=/xxx/some/sub/dir remote.cache -dir=/xxx/some/sub/dir -include=*.pdf + remote.cache -dir=/xxx/some/sub/dir -exclude=*.txt + remote.cache -maxSize=1024000 # cache files smaller than 100K + remote.cache -maxAge=3600 # cache files less than 1 hour old This is designed to run regularly. So you can add it to some cronjob. If a file is already synchronized with the remote copy, the file will be skipped to avoid unnecessary copy. - The actual data copying goes through volume severs. + The actual data copying goes through volume severs in parallel. ` } diff --git a/weed/shell/command_remote_uncache.go b/weed/shell/command_remote_uncache.go index 0e5152f78..ac7e44a7d 100644 --- a/weed/shell/command_remote_uncache.go +++ b/weed/shell/command_remote_uncache.go @@ -33,6 +33,8 @@ func (c *commandRemoteUncache) Help() string { remote.uncache -dir=/xxx/some/sub/dir remote.uncache -dir=/xxx/some/sub/dir -include=*.pdf remote.uncache -dir=/xxx/some/sub/dir -exclude=*.txt + remote.uncache -minSize=1024000 # uncache files larger than 100K + remote.uncache -minAge=3600 # uncache files older than 1 hour ` }