Always enable caches (#28527)
Nowadays, cache will be used on almost everywhere of Gitea and it cannot be disabled, otherwise some features will become unaviable. Then I think we can just remove the option for cache enable. That means cache cannot be disabled. But of course, we can still use cache configuration to set how should Gitea use the cache.
This commit is contained in:
parent
4eb2a29910
commit
e7cb8da2a8
|
@ -1705,9 +1705,6 @@ LEVEL = Info
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;;
|
;;
|
||||||
;; if the cache enabled
|
|
||||||
;ENABLED = true
|
|
||||||
;;
|
|
||||||
;; Either "memory", "redis", "memcache", or "twoqueue". default is "memory"
|
;; Either "memory", "redis", "memcache", or "twoqueue". default is "memory"
|
||||||
;ADAPTER = memory
|
;ADAPTER = memory
|
||||||
;;
|
;;
|
||||||
|
@ -1732,8 +1729,6 @@ LEVEL = Info
|
||||||
;[cache.last_commit]
|
;[cache.last_commit]
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;; if the cache enabled
|
|
||||||
;ENABLED = true
|
|
||||||
;;
|
;;
|
||||||
;; Time to keep items in cache if not used, default is 8760 hours.
|
;; Time to keep items in cache if not used, default is 8760 hours.
|
||||||
;; Setting it to -1 disables caching
|
;; Setting it to -1 disables caching
|
||||||
|
|
|
@ -763,7 +763,6 @@ and
|
||||||
|
|
||||||
## Cache (`cache`)
|
## Cache (`cache`)
|
||||||
|
|
||||||
- `ENABLED`: **true**: Enable the cache.
|
|
||||||
- `ADAPTER`: **memory**: Cache engine adapter, either `memory`, `redis`, `redis-cluster`, `twoqueue` or `memcache`. (`twoqueue` represents a size limited LRU cache.)
|
- `ADAPTER`: **memory**: Cache engine adapter, either `memory`, `redis`, `redis-cluster`, `twoqueue` or `memcache`. (`twoqueue` represents a size limited LRU cache.)
|
||||||
- `INTERVAL`: **60**: Garbage Collection interval (sec), for memory and twoqueue cache only.
|
- `INTERVAL`: **60**: Garbage Collection interval (sec), for memory and twoqueue cache only.
|
||||||
- `HOST`: **_empty_**: Connection string for `redis`, `redis-cluster` and `memcache`. For `twoqueue` sets configuration for the queue.
|
- `HOST`: **_empty_**: Connection string for `redis`, `redis-cluster` and `memcache`. For `twoqueue` sets configuration for the queue.
|
||||||
|
@ -775,7 +774,6 @@ and
|
||||||
|
|
||||||
## Cache - LastCommitCache settings (`cache.last_commit`)
|
## Cache - LastCommitCache settings (`cache.last_commit`)
|
||||||
|
|
||||||
- `ENABLED`: **true**: Enable the cache.
|
|
||||||
- `ITEM_TTL`: **8760h**: Time to keep items in cache if not used, Setting it to -1 disables caching.
|
- `ITEM_TTL`: **8760h**: Time to keep items in cache if not used, Setting it to -1 disables caching.
|
||||||
- `COMMITS_COUNT`: **1000**: Only enable the cache when repository's commits count great than.
|
- `COMMITS_COUNT`: **1000**: Only enable the cache when repository's commits count great than.
|
||||||
|
|
||||||
|
|
|
@ -721,7 +721,6 @@ Gitea 创建以下非唯一队列:
|
||||||
|
|
||||||
## 缓存 (`cache`)
|
## 缓存 (`cache`)
|
||||||
|
|
||||||
- `ENABLED`: **true**: 是否启用缓存。
|
|
||||||
- `ADAPTER`: **memory**: 缓存引擎,可以为 `memory`, `redis`, `redis-cluster`, `twoqueue` 和 `memcache`. (`twoqueue` 代表缓冲区固定的LRU缓存)
|
- `ADAPTER`: **memory**: 缓存引擎,可以为 `memory`, `redis`, `redis-cluster`, `twoqueue` 和 `memcache`. (`twoqueue` 代表缓冲区固定的LRU缓存)
|
||||||
- `INTERVAL`: **60**: 垃圾回收间隔(秒),只对`memory`和`towqueue`有效。
|
- `INTERVAL`: **60**: 垃圾回收间隔(秒),只对`memory`和`towqueue`有效。
|
||||||
- `HOST`: **_empty_**: 缓存配置。`redis`, `redis-cluster`,`memcache`配置连接字符串;`twoqueue` 设置队列参数
|
- `HOST`: **_empty_**: 缓存配置。`redis`, `redis-cluster`,`memcache`配置连接字符串;`twoqueue` 设置队列参数
|
||||||
|
@ -733,7 +732,6 @@ Gitea 创建以下非唯一队列:
|
||||||
|
|
||||||
### 缓存 - 最后提交缓存设置 (`cache.last_commit`)
|
### 缓存 - 最后提交缓存设置 (`cache.last_commit`)
|
||||||
|
|
||||||
- `ENABLED`: **true**:是否启用缓存。
|
|
||||||
- `ITEM_TTL`: **8760h**:如果未使用,保持缓存中的项目的时间,将其设置为 -1 会禁用缓存。
|
- `ITEM_TTL`: **8760h**:如果未使用,保持缓存中的项目的时间,将其设置为 -1 会禁用缓存。
|
||||||
- `COMMITS_COUNT`: **1000**:仅在存储库的提交计数大于时启用缓存。
|
- `COMMITS_COUNT`: **1000**:仅在存储库的提交计数大于时启用缓存。
|
||||||
|
|
||||||
|
|
6
modules/cache/cache.go
vendored
6
modules/cache/cache.go
vendored
|
@ -24,11 +24,11 @@ func newCache(cacheConfig setting.Cache) (mc.Cache, error) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewContext start cache service
|
// Init start cache service
|
||||||
func NewContext() error {
|
func Init() error {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if conn == nil && setting.CacheService.Enabled {
|
if conn == nil {
|
||||||
if conn, err = newCache(setting.CacheService.Cache); err != nil {
|
if conn, err = newCache(setting.CacheService.Cache); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
4
modules/cache/cache_test.go
vendored
4
modules/cache/cache_test.go
vendored
|
@ -22,9 +22,9 @@ func createTestCache() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNewContext(t *testing.T) {
|
func TestNewContext(t *testing.T) {
|
||||||
assert.NoError(t, NewContext())
|
assert.NoError(t, Init())
|
||||||
|
|
||||||
setting.CacheService.Cache = setting.Cache{Enabled: true, Adapter: "redis", Conn: "some random string"}
|
setting.CacheService.Cache = setting.Cache{Adapter: "redis", Conn: "some random string"}
|
||||||
con, err := newCache(setting.Cache{
|
con, err := newCache(setting.Cache{
|
||||||
Adapter: "rand",
|
Adapter: "rand",
|
||||||
Conn: "false conf",
|
Conn: "false conf",
|
||||||
|
|
|
@ -39,7 +39,7 @@ func NewLastCommitCache(count int64, repoPath string, gitRepo *Repository, cache
|
||||||
if cache == nil {
|
if cache == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if !setting.CacheService.LastCommit.Enabled || count < setting.CacheService.LastCommit.CommitsCount {
|
if count < setting.CacheService.LastCommit.CommitsCount {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,6 @@ import (
|
||||||
|
|
||||||
// Cache represents cache settings
|
// Cache represents cache settings
|
||||||
type Cache struct {
|
type Cache struct {
|
||||||
Enabled bool
|
|
||||||
Adapter string
|
Adapter string
|
||||||
Interval int
|
Interval int
|
||||||
Conn string
|
Conn string
|
||||||
|
@ -24,23 +23,19 @@ var CacheService = struct {
|
||||||
Cache `ini:"cache"`
|
Cache `ini:"cache"`
|
||||||
|
|
||||||
LastCommit struct {
|
LastCommit struct {
|
||||||
Enabled bool
|
|
||||||
TTL time.Duration `ini:"ITEM_TTL"`
|
TTL time.Duration `ini:"ITEM_TTL"`
|
||||||
CommitsCount int64
|
CommitsCount int64
|
||||||
} `ini:"cache.last_commit"`
|
} `ini:"cache.last_commit"`
|
||||||
}{
|
}{
|
||||||
Cache: Cache{
|
Cache: Cache{
|
||||||
Enabled: true,
|
|
||||||
Adapter: "memory",
|
Adapter: "memory",
|
||||||
Interval: 60,
|
Interval: 60,
|
||||||
TTL: 16 * time.Hour,
|
TTL: 16 * time.Hour,
|
||||||
},
|
},
|
||||||
LastCommit: struct {
|
LastCommit: struct {
|
||||||
Enabled bool
|
|
||||||
TTL time.Duration `ini:"ITEM_TTL"`
|
TTL time.Duration `ini:"ITEM_TTL"`
|
||||||
CommitsCount int64
|
CommitsCount int64
|
||||||
}{
|
}{
|
||||||
Enabled: true,
|
|
||||||
TTL: 8760 * time.Hour,
|
TTL: 8760 * time.Hour,
|
||||||
CommitsCount: 1000,
|
CommitsCount: 1000,
|
||||||
},
|
},
|
||||||
|
@ -65,30 +60,12 @@ func loadCacheFrom(rootCfg ConfigProvider) {
|
||||||
if CacheService.Conn == "" {
|
if CacheService.Conn == "" {
|
||||||
CacheService.Conn = "50000"
|
CacheService.Conn = "50000"
|
||||||
}
|
}
|
||||||
case "": // disable cache
|
|
||||||
CacheService.Enabled = false
|
|
||||||
default:
|
default:
|
||||||
log.Fatal("Unknown cache adapter: %s", CacheService.Adapter)
|
log.Fatal("Unknown cache adapter: %s", CacheService.Adapter)
|
||||||
}
|
}
|
||||||
|
|
||||||
if CacheService.Enabled {
|
|
||||||
log.Info("Cache Service Enabled")
|
|
||||||
} else {
|
|
||||||
log.Warn("Cache Service Disabled so that captcha disabled too")
|
|
||||||
// captcha depends on cache service
|
|
||||||
Service.EnableCaptcha = false
|
|
||||||
}
|
|
||||||
|
|
||||||
sec = rootCfg.Section("cache.last_commit")
|
sec = rootCfg.Section("cache.last_commit")
|
||||||
if !CacheService.Enabled {
|
|
||||||
CacheService.LastCommit.Enabled = false
|
|
||||||
}
|
|
||||||
|
|
||||||
CacheService.LastCommit.CommitsCount = sec.Key("COMMITS_COUNT").MustInt64(1000)
|
CacheService.LastCommit.CommitsCount = sec.Key("COMMITS_COUNT").MustInt64(1000)
|
||||||
|
|
||||||
if CacheService.LastCommit.Enabled {
|
|
||||||
log.Info("Last Commit Cache Service Enabled")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TTLSeconds returns the TTLSeconds or unix timestamp for memcache
|
// TTLSeconds returns the TTLSeconds or unix timestamp for memcache
|
||||||
|
|
|
@ -29,10 +29,9 @@ func NodeInfo(ctx *context.APIContext) {
|
||||||
|
|
||||||
nodeInfoUsage := structs.NodeInfoUsage{}
|
nodeInfoUsage := structs.NodeInfoUsage{}
|
||||||
if setting.Federation.ShareUserStatistics {
|
if setting.Federation.ShareUserStatistics {
|
||||||
cached := false
|
var cached bool
|
||||||
if setting.CacheService.Enabled {
|
|
||||||
nodeInfoUsage, cached = ctx.Cache.Get(cacheKeyNodeInfoUsage).(structs.NodeInfoUsage)
|
nodeInfoUsage, cached = ctx.Cache.Get(cacheKeyNodeInfoUsage).(structs.NodeInfoUsage)
|
||||||
}
|
|
||||||
if !cached {
|
if !cached {
|
||||||
usersTotal := int(user_model.CountUsers(ctx, nil))
|
usersTotal := int(user_model.CountUsers(ctx, nil))
|
||||||
now := time.Now()
|
now := time.Now()
|
||||||
|
@ -53,14 +52,13 @@ func NodeInfo(ctx *context.APIContext) {
|
||||||
LocalPosts: int(allIssues),
|
LocalPosts: int(allIssues),
|
||||||
LocalComments: int(allComments),
|
LocalComments: int(allComments),
|
||||||
}
|
}
|
||||||
if setting.CacheService.Enabled {
|
|
||||||
if err := ctx.Cache.Put(cacheKeyNodeInfoUsage, nodeInfoUsage, 180); err != nil {
|
if err := ctx.Cache.Put(cacheKeyNodeInfoUsage, nodeInfoUsage, 180); err != nil {
|
||||||
ctx.InternalServerError(err)
|
ctx.InternalServerError(err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
nodeInfo := &structs.NodeInfo{
|
nodeInfo := &structs.NodeInfo{
|
||||||
Version: "2.1",
|
Version: "2.1",
|
||||||
|
|
|
@ -118,7 +118,7 @@ func InitWebInstalled(ctx context.Context) {
|
||||||
mustInit(storage.Init)
|
mustInit(storage.Init)
|
||||||
|
|
||||||
mailer.NewContext(ctx)
|
mailer.NewContext(ctx)
|
||||||
mustInit(cache.NewContext)
|
mustInit(cache.Init)
|
||||||
mustInit(feed_service.Init)
|
mustInit(feed_service.Init)
|
||||||
mustInit(uinotification.Init)
|
mustInit(uinotification.Init)
|
||||||
mustInitCtx(ctx, archiver.Init)
|
mustInitCtx(ctx, archiver.Init)
|
||||||
|
|
|
@ -622,11 +622,9 @@ func handleUserCreated(ctx *context.Context, u *user_model.User, gothUser *goth.
|
||||||
ctx.Data["ActiveCodeLives"] = timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale)
|
ctx.Data["ActiveCodeLives"] = timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale)
|
||||||
ctx.HTML(http.StatusOK, TplActivate)
|
ctx.HTML(http.StatusOK, TplActivate)
|
||||||
|
|
||||||
if setting.CacheService.Enabled {
|
|
||||||
if err := ctx.Cache.Put("MailResendLimit_"+u.LowerName, u.LowerName, 180); err != nil {
|
if err := ctx.Cache.Put("MailResendLimit_"+u.LowerName, u.LowerName, 180); err != nil {
|
||||||
log.Error("Set cache(MailResendLimit) fail: %v", err)
|
log.Error("Set cache(MailResendLimit) fail: %v", err)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -645,18 +643,16 @@ func Activate(ctx *context.Context) {
|
||||||
}
|
}
|
||||||
// Resend confirmation email.
|
// Resend confirmation email.
|
||||||
if setting.Service.RegisterEmailConfirm {
|
if setting.Service.RegisterEmailConfirm {
|
||||||
if setting.CacheService.Enabled && ctx.Cache.IsExist("MailResendLimit_"+ctx.Doer.LowerName) {
|
if ctx.Cache.IsExist("MailResendLimit_" + ctx.Doer.LowerName) {
|
||||||
ctx.Data["ResendLimited"] = true
|
ctx.Data["ResendLimited"] = true
|
||||||
} else {
|
} else {
|
||||||
ctx.Data["ActiveCodeLives"] = timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale)
|
ctx.Data["ActiveCodeLives"] = timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale)
|
||||||
mailer.SendActivateAccountMail(ctx.Locale, ctx.Doer)
|
mailer.SendActivateAccountMail(ctx.Locale, ctx.Doer)
|
||||||
|
|
||||||
if setting.CacheService.Enabled {
|
|
||||||
if err := ctx.Cache.Put("MailResendLimit_"+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil {
|
if err := ctx.Cache.Put("MailResendLimit_"+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil {
|
||||||
log.Error("Set cache(MailResendLimit) fail: %v", err)
|
log.Error("Set cache(MailResendLimit) fail: %v", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
ctx.Data["ServiceNotEnabled"] = true
|
ctx.Data["ServiceNotEnabled"] = true
|
||||||
}
|
}
|
||||||
|
@ -789,7 +785,7 @@ func ActivateEmail(ctx *context.Context) {
|
||||||
|
|
||||||
if u, err := user_model.GetUserByID(ctx, email.UID); err != nil {
|
if u, err := user_model.GetUserByID(ctx, email.UID); err != nil {
|
||||||
log.Warn("GetUserByID: %d", email.UID)
|
log.Warn("GetUserByID: %d", email.UID)
|
||||||
} else if setting.CacheService.Enabled {
|
} else {
|
||||||
// Allow user to validate more emails
|
// Allow user to validate more emails
|
||||||
_ = ctx.Cache.Delete("MailResendLimit_" + u.LowerName)
|
_ = ctx.Cache.Delete("MailResendLimit_" + u.LowerName)
|
||||||
}
|
}
|
||||||
|
|
|
@ -79,7 +79,7 @@ func ForgotPasswdPost(ctx *context.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if setting.CacheService.Enabled && ctx.Cache.IsExist("MailResendLimit_"+u.LowerName) {
|
if ctx.Cache.IsExist("MailResendLimit_" + u.LowerName) {
|
||||||
ctx.Data["ResendLimited"] = true
|
ctx.Data["ResendLimited"] = true
|
||||||
ctx.HTML(http.StatusOK, tplForgotPassword)
|
ctx.HTML(http.StatusOK, tplForgotPassword)
|
||||||
return
|
return
|
||||||
|
@ -87,11 +87,9 @@ func ForgotPasswdPost(ctx *context.Context) {
|
||||||
|
|
||||||
mailer.SendResetPasswordMail(u)
|
mailer.SendResetPasswordMail(u)
|
||||||
|
|
||||||
if setting.CacheService.Enabled {
|
|
||||||
if err = ctx.Cache.Put("MailResendLimit_"+u.LowerName, u.LowerName, 180); err != nil {
|
if err = ctx.Cache.Put("MailResendLimit_"+u.LowerName, u.LowerName, 180); err != nil {
|
||||||
log.Error("Set cache(MailResendLimit) fail: %v", err)
|
log.Error("Set cache(MailResendLimit) fail: %v", err)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
ctx.Data["ResetPwdCodeLives"] = timeutil.MinutesToFriendly(setting.Service.ResetPwdCodeLives, ctx.Locale)
|
ctx.Data["ResetPwdCodeLives"] = timeutil.MinutesToFriendly(setting.Service.ResetPwdCodeLives, ctx.Locale)
|
||||||
ctx.Data["IsResetSent"] = true
|
ctx.Data["IsResetSent"] = true
|
||||||
|
|
|
@ -121,10 +121,6 @@ func checkDatabase(ctx context.Context, checks checks) status {
|
||||||
|
|
||||||
// cache checks gitea cache status
|
// cache checks gitea cache status
|
||||||
func checkCache(checks checks) status {
|
func checkCache(checks checks) status {
|
||||||
if !setting.CacheService.Enabled {
|
|
||||||
return pass
|
|
||||||
}
|
|
||||||
|
|
||||||
st := componentStatus{}
|
st := componentStatus{}
|
||||||
if err := cache.GetCache().Ping(); err != nil {
|
if err := cache.GetCache().Ping(); err != nil {
|
||||||
st.Status = fail
|
st.Status = fail
|
||||||
|
|
|
@ -105,7 +105,7 @@ func EmailPost(ctx *context.Context) {
|
||||||
// Send activation Email
|
// Send activation Email
|
||||||
if ctx.FormString("_method") == "SENDACTIVATION" {
|
if ctx.FormString("_method") == "SENDACTIVATION" {
|
||||||
var address string
|
var address string
|
||||||
if setting.CacheService.Enabled && ctx.Cache.IsExist("MailResendLimit_"+ctx.Doer.LowerName) {
|
if ctx.Cache.IsExist("MailResendLimit_" + ctx.Doer.LowerName) {
|
||||||
log.Error("Send activation: activation still pending")
|
log.Error("Send activation: activation still pending")
|
||||||
ctx.Redirect(setting.AppSubURL + "/user/settings/account")
|
ctx.Redirect(setting.AppSubURL + "/user/settings/account")
|
||||||
return
|
return
|
||||||
|
@ -141,11 +141,10 @@ func EmailPost(ctx *context.Context) {
|
||||||
}
|
}
|
||||||
address = email.Email
|
address = email.Email
|
||||||
|
|
||||||
if setting.CacheService.Enabled {
|
|
||||||
if err := ctx.Cache.Put("MailResendLimit_"+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil {
|
if err := ctx.Cache.Put("MailResendLimit_"+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil {
|
||||||
log.Error("Set cache(MailResendLimit) fail: %v", err)
|
log.Error("Set cache(MailResendLimit) fail: %v", err)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
ctx.Flash.Info(ctx.Tr("settings.add_email_confirmation_sent", address, timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale)))
|
ctx.Flash.Info(ctx.Tr("settings.add_email_confirmation_sent", address, timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale)))
|
||||||
ctx.Redirect(setting.AppSubURL + "/user/settings/account")
|
ctx.Redirect(setting.AppSubURL + "/user/settings/account")
|
||||||
return
|
return
|
||||||
|
@ -204,11 +203,10 @@ func EmailPost(ctx *context.Context) {
|
||||||
// Send confirmation email
|
// Send confirmation email
|
||||||
if setting.Service.RegisterEmailConfirm {
|
if setting.Service.RegisterEmailConfirm {
|
||||||
mailer.SendActivateEmailMail(ctx.Doer, email)
|
mailer.SendActivateEmailMail(ctx.Doer, email)
|
||||||
if setting.CacheService.Enabled {
|
|
||||||
if err := ctx.Cache.Put("MailResendLimit_"+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil {
|
if err := ctx.Cache.Put("MailResendLimit_"+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil {
|
||||||
log.Error("Set cache(MailResendLimit) fail: %v", err)
|
log.Error("Set cache(MailResendLimit) fail: %v", err)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
ctx.Flash.Info(ctx.Tr("settings.add_email_confirmation_sent", email.Email, timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale)))
|
ctx.Flash.Info(ctx.Tr("settings.add_email_confirmation_sent", email.Email, timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale)))
|
||||||
} else {
|
} else {
|
||||||
ctx.Flash.Success(ctx.Tr("settings.add_email_success"))
|
ctx.Flash.Success(ctx.Tr("settings.add_email_success"))
|
||||||
|
@ -276,7 +274,7 @@ func loadAccountData(ctx *context.Context) {
|
||||||
user_model.EmailAddress
|
user_model.EmailAddress
|
||||||
CanBePrimary bool
|
CanBePrimary bool
|
||||||
}
|
}
|
||||||
pendingActivation := setting.CacheService.Enabled && ctx.Cache.IsExist("MailResendLimit_"+ctx.Doer.LowerName)
|
pendingActivation := ctx.Cache.IsExist("MailResendLimit_" + ctx.Doer.LowerName)
|
||||||
emails := make([]*UserEmail, len(emlist))
|
emails := make([]*UserEmail, len(emlist))
|
||||||
for i, em := range emlist {
|
for i, em := range emlist {
|
||||||
var email UserEmail
|
var email UserEmail
|
||||||
|
|
|
@ -9,15 +9,10 @@ import (
|
||||||
repo_model "code.gitea.io/gitea/models/repo"
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
"code.gitea.io/gitea/modules/cache"
|
"code.gitea.io/gitea/modules/cache"
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// CacheRef cachhe last commit information of the branch or the tag
|
// CacheRef cachhe last commit information of the branch or the tag
|
||||||
func CacheRef(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, fullRefName git.RefName) error {
|
func CacheRef(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, fullRefName git.RefName) error {
|
||||||
if !setting.CacheService.LastCommit.Enabled {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
commit, err := gitRepo.GetCommit(fullRefName.String())
|
commit, err := gitRepo.GetCommit(fullRefName.String())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
Loading…
Reference in a new issue