This commit is contained in:
marcel-dempers 2020-08-05 18:00:53 +10:00
parent 2b14f61e87
commit 4b8f92df85
5 changed files with 85 additions and 83 deletions

View File

@ -1,66 +1,66 @@
package main
import (
"fmt"
"net/http"
"github.com/julienschmidt/httprouter"
log "github.com/sirupsen/logrus"
"os"
"github.com/go-redis/redis/v8"
"context"
"strconv"
)
var redis_host = os.Getenv("REDIS_HOST")
var redis_port = os.Getenv("REDIS_PORT")
var redis_password = os.Getenv("REDIS_PASSWORD")
var ctx = context.Background()
var rdb *redis.Client
var counter = 0
func main() {
r := redis.NewClient(&redis.Options{
Addr: redis_host + ":" + redis_port,
Password: redis_password, // no password set
DB: 0, // use default DB
})
rdb = r
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, p httprouter.Params){
increment_redis_key(w,r,p)
})
fmt.Println("Running...")
log.Fatal(http.ListenAndServe(":80", router))
}
func increment_redis_key(writer http.ResponseWriter, request *http.Request, p httprouter.Params) {
val, err := rdb.Get(ctx, "counter").Result()
if err == redis.Nil {
err := rdb.Set(ctx, "counter", 1, 0).Err()
counter++
if err != nil {
panic(err)
}
} else if err != nil {
panic(err)
} else {
counter,_ = strconv.Atoi(val)
counter++
err := rdb.Set(ctx, "counter", counter, 0).Err()
if err != nil {
panic(err)
}
}
fmt.Fprint(writer, counter)
fmt.Println("counter", counter)
package main
import (
"fmt"
"net/http"
"github.com/julienschmidt/httprouter"
log "github.com/sirupsen/logrus"
"os"
"github.com/go-redis/redis/v8"
"context"
"strconv"
)
var redis_host = os.Getenv("REDIS_HOST")
var redis_port = os.Getenv("REDIS_PORT")
var redis_password = os.Getenv("REDIS_PASSWORD")
var ctx = context.Background()
var rdb *redis.Client
var counter = 0
func main() {
r := redis.NewClient(&redis.Options{
Addr: redis_host + ":" + redis_port,
Password: redis_password, // no password set
DB: 0, // use default DB
})
rdb = r
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, p httprouter.Params){
increment_redis_key(w,r,p)
})
fmt.Println("Running...")
log.Fatal(http.ListenAndServe(":80", router))
}
func increment_redis_key(writer http.ResponseWriter, request *http.Request, p httprouter.Params) {
val, err := rdb.Get(ctx, "counter").Result()
if err == redis.Nil {
err := rdb.Set(ctx, "counter", 1, 0).Err()
counter++
if err != nil {
panic(err)
}
} else if err != nil {
panic(err)
} else {
counter,_ = strconv.Atoi(val)
counter++
err := rdb.Set(ctx, "counter", counter, 0).Err()
if err != nil {
panic(err)
}
}
fmt.Fprint(writer, counter)
fmt.Println("counter", counter)
}

View File

@ -1,17 +1,17 @@
FROM golang:1.14-alpine as build
RUN apk add --no-cache git
WORKDIR /src
COPY go.sum /src/
COPY go.mod /src/
COPY client.go /src
RUN go build client.go
FROM alpine as runtime
COPY --from=build /src/client /app/client
FROM golang:1.14-alpine as build
RUN apk add --no-cache git
WORKDIR /src
COPY go.sum /src/
COPY go.mod /src/
COPY client.go /src
RUN go build client.go
FROM alpine as runtime
COPY --from=build /src/client /app/client
CMD [ "/app/client" ]

View File

@ -1,4 +1,4 @@
module github.com/my/repo
module example.com/hello
go 1.14

View File

@ -41,7 +41,6 @@ github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/julienschmidt/httprouter v1.3.0 h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U=
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=

View File

@ -79,3 +79,6 @@ aimvector/redis-client:v1.0.0
```
## Redis Replication and High Availability
Lets move on to the [clustering](./clustering/readme.md) secion.