Skip to content

Commit

Permalink
feature: Released RedisShake 4.0 version
Browse files Browse the repository at this point in the history
  • Loading branch information
suxb201 committed Aug 3, 2023
1 parent 5b2303c commit 04e65c0
Show file tree
Hide file tree
Showing 122 changed files with 4,286 additions and 1,986 deletions.
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/issue.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ assignees: ''

---

- [ ] 请确保已经看过 wiki:https://github.com/alibaba/RedisShake/wiki
- [ ] 请确保已经看过 wiki:https://RedisShake/wiki
- [ ] 请确保已经学习过 Markdown 语法,良好的排版有助于维护人员了解你的问题
- [ ] 请在此提供足够的信息供社区维护人员排查问题
- [ ] 请在提交 issue 前删除此模板中多余的文字,包括这几句话
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
name: CI

on: [ pull_request ]
on: [ push, pull_request ]

jobs:
black-box-test:
runs-on: ubuntu-latest
strategy:
matrix:
redis-version: [ 5, 6, 7 ]
redis-version: [ "2.8", "3.0", "4.0", "5.0", "6.0", "7.0" ]
steps:
- name: Git checkout
uses: actions/checkout@v2
Expand All @@ -22,7 +22,7 @@ jobs:
sudo apt-get install git
git clone https://github.com/redis/redis
cd redis
git checkout ${{ matrix.redis-version }}.0
git checkout ${{ matrix.redis-version }}
make -j
mkdir bin
cp src/redis-server bin/redis-server
Expand All @@ -31,7 +31,7 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: '3.10'
python-version: '3.11'

- name: make redis-shake
run: |
Expand Down
33 changes: 33 additions & 0 deletions .github/workflows/pages.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
name: Pages
on:
workflow_dispatch: { }
push:
branches:
- main
jobs:
deploy:
runs-on: ubuntu-latest
permissions:
pages: write
id-token: write
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: actions/setup-node@v3
with:
node-version: 16
cache: npm
- run: npm ci
- name: Build
run: npm run docs:build
- uses: actions/configure-pages@v2
- uses: actions/upload-pages-artifact@v1
with:
path: docs/.vitepress/dist
- name: Deploy
id: deployment
uses: actions/deploy-pages@v1
20 changes: 20 additions & 0 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,3 +66,23 @@ jobs:
asset_content_type: application/gzip
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

- name: "Upload release windows-amd64"
uses: actions/upload-release-asset@v1
with:
upload_url: ${{ steps.release.outputs.upload_url }}
asset_path: ./bin/redis-shake-windows-amd64.tar.gz
asset_name: redis-shake-windows-amd64.tar.gz
asset_content_type: application/gzip
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

- name: "Upload release windows-arm64"
uses: actions/upload-release-asset@v1
with:
upload_url: ${{ steps.release.outputs.upload_url }}
asset_path: ./bin/redis-shake-windows-arm64.tar.gz
asset_name: redis-shake-windows-arm64.tar.gz
asset_content_type: application/gzip
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
14 changes: 9 additions & 5 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
.idea
data
__pycache__
bin
.DS_Store
# system
.idea/
__pycache__/
.DS_Store/

# compiled output or test output
bin/
dist/
tmp/
*.log
*.rdb
*.aof
10 changes: 5 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# redis-shake

[![CI](https://github.com/alibaba/RedisShake/actions/workflows/ci.yml/badge.svg?branch=v3)](https://github.com/alibaba/RedisShake/actions/workflows/ci.yml)
[![CI](https://RedisShake/actions/workflows/ci.yml/badge.svg?branch=v3)](https://RedisShake/actions/workflows/ci.yml)

- [中文文档](https://github.com/alibaba/RedisShake/wiki)
- [中文文档](https://RedisShake/wiki)

redis-shake is a tool for Redis data migration and data filtering.

Expand All @@ -16,7 +16,7 @@ redis-shake is a tool for Redis data migration and data filtering.
* ☁️ Support Aliyun Redis and ElastiCache

For older versions of redis-shake (support codis, twemproxy) please
visit [here](https://github.com/alibaba/RedisShake/tree/develop).
visit [here](https://RedisShake/tree/develop).

![redis-shake2.PNG](https://s2.loli.net/2022/07/10/OZrSGutknlI8XNp.png)

Expand All @@ -28,14 +28,14 @@ visit [here](https://github.com/alibaba/RedisShake/tree/develop).

### Binary package

Download from Release: [https://github.com/alibaba/RedisShake/releases](https://github.com/alibaba/RedisShake/releases)
Download from Release: [https://RedisShake/releases](https://RedisShake/releases)

### Compile from source

After downloading the source code, run the `sh build.sh` command to compile.

```shell
git clone https://github.com/alibaba/RedisShake
git clone https://RedisShake
cd RedisShake
sh build.sh
```
Expand Down
8 changes: 2 additions & 6 deletions build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,7 @@ BIN_DIR=$(pwd)/bin/
rm -rf "$BIN_DIR"
mkdir -p "$BIN_DIR"

cp sync.toml "$BIN_DIR"
cp scan.toml "$BIN_DIR"
cp restore.toml "$BIN_DIR"
cp -r filters "$BIN_DIR"
cp -r scripts/cluster_helper "$BIN_DIR"
cp -r configs/* "$BIN_DIR"

dist() {
echo "try build GOOS=$1 GOARCH=$2"
Expand All @@ -30,7 +26,7 @@ dist() {

if [ "$1" == "dist" ]; then
echo "[ DIST ]"
for g in "linux" "darwin"; do
for g in "linux" "darwin" "windows"; do
for a in "amd64" "arm64"; do
dist "$g" "$a"
done
Expand Down
190 changes: 97 additions & 93 deletions cmd/redis-shake/main.go
Original file line number Diff line number Diff line change
@@ -1,120 +1,124 @@
package main

import (
"fmt"
"github.com/alibaba/RedisShake/internal/commands"
"github.com/alibaba/RedisShake/internal/config"
"github.com/alibaba/RedisShake/internal/filter"
"github.com/alibaba/RedisShake/internal/log"
"github.com/alibaba/RedisShake/internal/reader"
"github.com/alibaba/RedisShake/internal/statistics"
"github.com/alibaba/RedisShake/internal/writer"
"net/http"
"RedisShake/internal/commands"
"RedisShake/internal/config"
"RedisShake/internal/log"
"RedisShake/internal/reader"
"RedisShake/internal/status"
"RedisShake/internal/transform"
"RedisShake/internal/utils"
"RedisShake/internal/writer"
"github.com/mcuadros/go-defaults"
_ "net/http/pprof"
"os"
"runtime"
)

func main() {
if len(os.Args) < 2 || len(os.Args) > 3 {
fmt.Println("Usage: redis-shake <config file> <filter file>")
fmt.Println("Example: redis-shake config.toml filter.lua")
os.Exit(1)
}

// load filter file
if len(os.Args) == 3 {
luaFile := os.Args[2]
filter.LoadFromFile(luaFile)
}

// load config
configFile := os.Args[1]
config.LoadFromFile(configFile)
v := config.LoadConfig()

log.Init()
log.Infof("GOOS: %s, GOARCH: %s", runtime.GOOS, runtime.GOARCH)
log.Infof("Ncpu: %d, GOMAXPROCS: %d", config.Config.Advanced.Ncpu, runtime.GOMAXPROCS(0))
log.Infof("pid: %d", os.Getpid())
log.Infof("pprof_port: %d", config.Config.Advanced.PprofPort)
if len(os.Args) == 2 {
log.Infof("No lua file specified, will not filter any cmd.")
}

// start pprof
if config.Config.Advanced.PprofPort != 0 {
go func() {
err := http.ListenAndServe(fmt.Sprintf("localhost:%d", config.Config.Advanced.PprofPort), nil)
if err != nil {
log.PanicError(err)
}
}()
}
log.Init(config.Opt.Advanced.LogLevel, config.Opt.Advanced.LogFile)
utils.ChdirAndAcquireFileLock()
utils.SetNcpu()
utils.SetPprofPort()
transform.Init()

// start statistics
if config.Config.Advanced.MetricsPort != 0 {
statistics.Metrics.Address = config.Config.Source.Address
go func() {
log.Infof("metrics url: http://localhost:%d", config.Config.Advanced.MetricsPort)
mux := http.NewServeMux()
mux.HandleFunc("/", statistics.Handler)
err := http.ListenAndServe(fmt.Sprintf("localhost:%d", config.Config.Advanced.MetricsPort), mux)
if err != nil {
log.PanicError(err)
}
}()
// create reader
var theReader reader.Reader
if v.IsSet("SyncStandaloneReader") {
opts := new(reader.SyncStandaloneReaderOptions)
defaults.SetDefaults(opts)
err := v.UnmarshalKey("SyncStandaloneReader", opts)
if err != nil {
log.Panicf("failed to read the SyncReader config entry. err: %v", err)
}
theReader = reader.NewSyncStandaloneReader(opts)
log.Infof("create SyncStandaloneReader: %v", opts.Address)
} else if v.IsSet("SyncClusterReader") {
opts := new(reader.SyncClusterReaderOptions)
defaults.SetDefaults(opts)
err := v.UnmarshalKey("SyncClusterReader", opts)
if err != nil {
log.Panicf("failed to read the SyncReader config entry. err: %v", err)
}
theReader = reader.NewSyncClusterReader(opts)
log.Infof("create SyncClusterReader: %v", opts.Address)
} else if v.IsSet("ScanStandaloneReader") {
opts := new(reader.ScanStandaloneReaderOptions)
defaults.SetDefaults(opts)
err := v.UnmarshalKey("ScanStandaloneReader", opts)
if err != nil {
log.Panicf("failed to read the ScanReader config entry. err: %v", err)
}
theReader = reader.NewScanStandaloneReader(opts)
log.Infof("create ScanStandaloneReader: %v", opts.Address)
} else if v.IsSet("ScanClusterReader") {
opts := new(reader.ScanClusterReaderOptions)
defaults.SetDefaults(opts)
err := v.UnmarshalKey("ScanClusterReader", opts)
if err != nil {
log.Panicf("failed to read the ScanReader config entry. err: %v", err)
}
theReader = reader.NewScanClusterReader(opts)
log.Infof("create ScanClusterReader: %v", opts.Address)
} else if v.IsSet("RdbReader") {
opts := new(reader.RdbReaderOptions)
defaults.SetDefaults(opts)
err := v.UnmarshalKey("RdbReader", opts)
if err != nil {
log.Panicf("failed to read the RdbReader config entry. err: %v", err)
}
theReader = reader.NewRDBReader(opts)
log.Infof("create RdbReader: %v", opts.Filepath)
} else {
log.Panicf("no reader config entry found")
}

// create writer
var theWriter writer.Writer
target := &config.Config.Target
switch config.Config.Target.Type {
case "standalone":
theWriter = writer.NewRedisWriter(target.Address, target.Username, target.Password, target.IsTLS)
case "cluster":
theWriter = writer.NewRedisClusterWriter(target.Address, target.Username, target.Password, target.IsTLS)
default:
log.Panicf("unknown target type: %s", target.Type)
}

// create reader
source := &config.Config.Source
var theReader reader.Reader
if config.Config.Type == "sync" {
theReader = reader.NewPSyncReader(source.Address, source.Username, source.Password, source.IsTLS, source.ElastiCachePSync)
} else if config.Config.Type == "restore" {
theReader = reader.NewRDBReader(source.RDBFilePath)
} else if config.Config.Type == "scan" {
theReader = reader.NewScanReader(source.Address, source.Username, source.Password, source.IsTLS)
if v.IsSet("RedisStandaloneWriter") {
opts := new(writer.RedisStandaloneWriterOptions)
defaults.SetDefaults(opts)
err := v.UnmarshalKey("RedisStandaloneWriter", opts)
if err != nil {
log.Panicf("failed to read the RedisStandaloneWriter config entry. err: %v", err)
}
theWriter = writer.NewRedisStandaloneWriter(opts)
log.Infof("create RedisStandaloneWriter: %v", opts.Address)
} else if v.IsSet("RedisClusterWriter") {
opts := new(writer.RedisClusterWriterOptions)
defaults.SetDefaults(opts)
err := v.UnmarshalKey("RedisClusterWriter", opts)
if err != nil {
log.Panicf("failed to read the RedisClusterWriter config entry. err: %v", err)
}
theWriter = writer.NewRedisClusterWriter(opts)
log.Infof("create RedisClusterWriter: %v", opts.Address)
} else {
log.Panicf("unknown source type: %s", config.Config.Type)
log.Panicf("no writer config entry found")
}
ch := theReader.StartRead()

// start sync
statistics.Init()
id := uint64(0)
// create status
status.Init(theReader, theWriter)

ch := theReader.StartRead()
for e := range ch {
statistics.UpdateInQueueEntriesCount(uint64(len(ch)))
// calc arguments
e.Id = id
id++
e.CmdName, e.Group, e.Keys = commands.CalcKeys(e.Argv)
e.Slots = commands.CalcSlots(e.Keys)

// filter
code := filter.Filter(e)
statistics.UpdateEntryId(e.Id)
if code == filter.Allow {
code := transform.Transform(e)
if code == transform.Allow {
theWriter.Write(e)
statistics.AddAllowEntriesCount()
} else if code == filter.Disallow {
// do something
statistics.AddDisallowEntriesCount()
status.AddEntryCount(e.CmdName, true)
} else if code == transform.Disallow {
status.AddEntryCount(e.CmdName, false)
} else {
log.Panicf("error when run lua filter. entry: %s", e.ToString())
log.Panicf("error when run lua filter. entry: %s", e.String())
}
}
theWriter.Close()
log.Infof("finished.")

theWriter.Close() // Wait for all writing operations to complete
utils.ReleaseFileLock() // Release file lock
log.Infof("all done")
}
Loading

0 comments on commit 04e65c0

Please sign in to comment.