diff options
author | Anthony Wang | 2023-02-11 23:52:36 +0000 |
---|---|---|
committer | Anthony Wang | 2023-02-11 23:52:36 +0000 |
commit | e61e9fba59d6e6f0eb86869e07d9d52867384132 (patch) | |
tree | c6072319ab1b1429eb4132c9797ab4c6d6ffd760 /services | |
parent | 1a54d5e8970f2ff6ffe3aeaa19b3917f5e7dc9fd (diff) | |
parent | 1cb8d14bf71e0b8637c9eaa10808b4fd05139f45 (diff) |
Merge remote-tracking branch 'origin/main' into forgejo-federation
Diffstat (limited to 'services')
-rw-r--r-- | services/activitypub/keypair.go | 47 | ||||
-rw-r--r-- | services/activitypub/keypair_test.go | 61 | ||||
-rw-r--r-- | services/activitypub/user_settings.go | 5 | ||||
-rw-r--r-- | services/auth/middleware.go | 60 | ||||
-rw-r--r-- | services/auth/source/ldap/source_authenticate.go | 96 | ||||
-rw-r--r-- | services/auth/source/ldap/source_group_sync.go | 94 | ||||
-rw-r--r-- | services/auth/source/ldap/source_search.go | 136 | ||||
-rw-r--r-- | services/auth/source/ldap/source_sync.go | 11 | ||||
-rw-r--r-- | services/auth/source/oauth2/source.go | 23 | ||||
-rw-r--r-- | services/auth/source/source_group_sync.go | 116 | ||||
-rw-r--r-- | services/cron/tasks_basic.go | 4 | ||||
-rw-r--r-- | services/forms/auth_form.go | 4 | ||||
-rw-r--r-- | services/forms/package_form.go | 2 | ||||
-rw-r--r-- | services/mirror/mirror_pull.go | 4 | ||||
-rw-r--r-- | services/packages/cargo/index.go | 290 | ||||
-rw-r--r-- | services/packages/cleanup/cleanup.go | 154 | ||||
-rw-r--r-- | services/packages/container/cleanup.go | 5 | ||||
-rw-r--r-- | services/packages/packages.go | 123 | ||||
-rw-r--r-- | services/pull/merge.go | 3 | ||||
-rw-r--r-- | services/pull/pull.go | 36 | ||||
-rw-r--r-- | services/pull/update.go | 2 |
21 files changed, 778 insertions, 498 deletions
diff --git a/services/activitypub/keypair.go b/services/activitypub/keypair.go deleted file mode 100644 index 299bdc43e..000000000 --- a/services/activitypub/keypair.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2021 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package activitypub - -import ( - "crypto/rand" - "crypto/rsa" - "crypto/x509" - "encoding/pem" -) - -const rsaBits = 2048 - -// GenerateKeyPair generates a public and private keypair for signing actions by users for activitypub purposes -func GenerateKeyPair() (string, string, error) { - priv, _ := rsa.GenerateKey(rand.Reader, rsaBits) - privPem, err := pemBlockForPriv(priv) - if err != nil { - return "", "", err - } - pubPem, err := pemBlockForPub(&priv.PublicKey) - if err != nil { - return "", "", err - } - return privPem, pubPem, nil -} - -func pemBlockForPriv(priv *rsa.PrivateKey) (string, error) { - privBytes := pem.EncodeToMemory(&pem.Block{ - Type: "RSA PRIVATE KEY", - Bytes: x509.MarshalPKCS1PrivateKey(priv), - }) - return string(privBytes), nil -} - -func pemBlockForPub(pub *rsa.PublicKey) (string, error) { - pubASN1, err := x509.MarshalPKIXPublicKey(pub) - if err != nil { - return "", err - } - pubBytes := pem.EncodeToMemory(&pem.Block{ - Type: "PUBLIC KEY", - Bytes: pubASN1, - }) - return string(pubBytes), nil -} diff --git a/services/activitypub/keypair_test.go b/services/activitypub/keypair_test.go deleted file mode 100644 index 888254c9d..000000000 --- a/services/activitypub/keypair_test.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2021 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package activitypub - -import ( - "crypto" - "crypto/rand" - "crypto/rsa" - "crypto/sha256" - "crypto/x509" - "encoding/pem" - "regexp" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestKeygen(t *testing.T) { - priv, pub, err := GenerateKeyPair() - assert.NoError(t, err) - - assert.NotEmpty(t, priv) - assert.NotEmpty(t, pub) - - assert.Regexp(t, regexp.MustCompile("^-----BEGIN RSA PRIVATE KEY-----.*"), priv) - assert.Regexp(t, regexp.MustCompile("^-----BEGIN PUBLIC KEY-----.*"), pub) -} - -func TestSignUsingKeys(t *testing.T) { - priv, pub, err := GenerateKeyPair() - assert.NoError(t, err) - - privPem, _ := pem.Decode([]byte(priv)) - if privPem == nil || privPem.Type != "RSA PRIVATE KEY" { - t.Fatal("key is wrong type") - } - - privParsed, err := x509.ParsePKCS1PrivateKey(privPem.Bytes) - assert.NoError(t, err) - - pubPem, _ := pem.Decode([]byte(pub)) - if pubPem == nil || pubPem.Type != "PUBLIC KEY" { - t.Fatal("key failed to decode") - } - - pubParsed, err := x509.ParsePKIXPublicKey(pubPem.Bytes) - assert.NoError(t, err) - - // Sign - msg := "activity pub is great!" - h := sha256.New() - h.Write([]byte(msg)) - d := h.Sum(nil) - sig, err := rsa.SignPKCS1v15(rand.Reader, privParsed, crypto.SHA256, d) - assert.NoError(t, err) - - // Verify - err = rsa.VerifyPKCS1v15(pubParsed.(*rsa.PublicKey), crypto.SHA256, d, sig) - assert.NoError(t, err) -} diff --git a/services/activitypub/user_settings.go b/services/activitypub/user_settings.go index ec5fa5984..2d156c17e 100644 --- a/services/activitypub/user_settings.go +++ b/services/activitypub/user_settings.go @@ -5,8 +5,11 @@ package activitypub import ( user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/util" ) +const rsaBits = 2048 + // GetKeyPair function returns a user's private and public keys func GetKeyPair(user *user_model.User) (pub, priv string, err error) { var settings map[string]*user_model.Setting @@ -14,7 +17,7 @@ func GetKeyPair(user *user_model.User) (pub, priv string, err error) { if err != nil { return } else if len(settings) == 0 { - if priv, pub, err = GenerateKeyPair(); err != nil { + if priv, pub, err = util.GenerateKeyPair(rsaBits); err != nil { return } if err = user_model.SetUserSetting(user.ID, user_model.UserActivityPubPrivPem, priv); err != nil { diff --git a/services/auth/middleware.go b/services/auth/middleware.go new file mode 100644 index 000000000..cccaab299 --- /dev/null +++ b/services/auth/middleware.go @@ -0,0 +1,60 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package auth + +import ( + "net/http" + + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/web/middleware" +) + +// Auth is a middleware to authenticate a web user +func Auth(authMethod Method) func(*context.Context) { + return func(ctx *context.Context) { + if err := authShared(ctx, authMethod); err != nil { + log.Error("Failed to verify user: %v", err) + ctx.Error(http.StatusUnauthorized, "Verify") + return + } + if ctx.Doer == nil { + // ensure the session uid is deleted + _ = ctx.Session.Delete("uid") + } + } +} + +// APIAuth is a middleware to authenticate an api user +func APIAuth(authMethod Method) func(*context.APIContext) { + return func(ctx *context.APIContext) { + if err := authShared(ctx.Context, authMethod); err != nil { + ctx.Error(http.StatusUnauthorized, "APIAuth", err) + } + } +} + +func authShared(ctx *context.Context, authMethod Method) error { + var err error + ctx.Doer, err = authMethod.Verify(ctx.Req, ctx.Resp, ctx, ctx.Session) + if err != nil { + return err + } + if ctx.Doer != nil { + if ctx.Locale.Language() != ctx.Doer.Language { + ctx.Locale = middleware.Locale(ctx.Resp, ctx.Req) + } + ctx.IsBasicAuth = ctx.Data["AuthedMethod"].(string) == BasicMethodName + ctx.IsSigned = true + ctx.Data["IsSigned"] = ctx.IsSigned + ctx.Data["SignedUser"] = ctx.Doer + ctx.Data["SignedUserID"] = ctx.Doer.ID + ctx.Data["SignedUserName"] = ctx.Doer.Name + ctx.Data["IsAdmin"] = ctx.Doer.IsAdmin + } else { + ctx.Data["SignedUserID"] = int64(0) + ctx.Data["SignedUserName"] = "" + } + return nil +} diff --git a/services/auth/source/ldap/source_authenticate.go b/services/auth/source/ldap/source_authenticate.go index 321cf5540..fba8da793 100644 --- a/services/auth/source/ldap/source_authenticate.go +++ b/services/auth/source/ldap/source_authenticate.go @@ -10,9 +10,10 @@ import ( asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/auth" "code.gitea.io/gitea/models/db" - "code.gitea.io/gitea/models/organization" user_model "code.gitea.io/gitea/models/user" + auth_module "code.gitea.io/gitea/modules/auth" "code.gitea.io/gitea/modules/util" + source_service "code.gitea.io/gitea/services/auth/source" "code.gitea.io/gitea/services/mailer" user_service "code.gitea.io/gitea/services/user" ) @@ -64,61 +65,66 @@ func (source *Source) Authenticate(user *user_model.User, userName, password str } if user != nil { - if source.GroupsEnabled && (source.GroupTeamMap != "" || source.GroupTeamMapRemoval) { - orgCache := make(map[string]*organization.Organization) - teamCache := make(map[string]*organization.Team) - source.SyncLdapGroupsToTeams(user, sr.LdapTeamAdd, sr.LdapTeamRemove, orgCache, teamCache) - } if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(user, source.authSource, sr.SSHPublicKey) { - return user, asymkey_model.RewriteAllPublicKeys() + if err := asymkey_model.RewriteAllPublicKeys(); err != nil { + return user, err + } + } + } else { + // Fallback. + if len(sr.Username) == 0 { + sr.Username = userName } - return user, nil - } - - // Fallback. - if len(sr.Username) == 0 { - sr.Username = userName - } - if len(sr.Mail) == 0 { - sr.Mail = fmt.Sprintf("%s@localhost", sr.Username) - } + if len(sr.Mail) == 0 { + sr.Mail = fmt.Sprintf("%s@localhost", sr.Username) + } - user = &user_model.User{ - LowerName: strings.ToLower(sr.Username), - Name: sr.Username, - FullName: composeFullName(sr.Name, sr.Surname, sr.Username), - Email: sr.Mail, - LoginType: source.authSource.Type, - LoginSource: source.authSource.ID, - LoginName: userName, - IsAdmin: sr.IsAdmin, - } - overwriteDefault := &user_model.CreateUserOverwriteOptions{ - IsRestricted: util.OptionalBoolOf(sr.IsRestricted), - IsActive: util.OptionalBoolTrue, - } + user = &user_model.User{ + LowerName: strings.ToLower(sr.Username), + Name: sr.Username, + FullName: composeFullName(sr.Name, sr.Surname, sr.Username), + Email: sr.Mail, + LoginType: source.authSource.Type, + LoginSource: source.authSource.ID, + LoginName: userName, + IsAdmin: sr.IsAdmin, + } + overwriteDefault := &user_model.CreateUserOverwriteOptions{ + IsRestricted: util.OptionalBoolOf(sr.IsRestricted), + IsActive: util.OptionalBoolTrue, + } - err := user_model.CreateUser(user, overwriteDefault) - if err != nil { - return user, err - } + err := user_model.CreateUser(user, overwriteDefault) + if err != nil { + return user, err + } - mailer.SendRegisterNotifyMail(user) + mailer.SendRegisterNotifyMail(user) - if isAttributeSSHPublicKeySet && asymkey_model.AddPublicKeysBySource(user, source.authSource, sr.SSHPublicKey) { - err = asymkey_model.RewriteAllPublicKeys() - } - if err == nil && len(source.AttributeAvatar) > 0 { - _ = user_service.UploadAvatar(user, sr.Avatar) + if isAttributeSSHPublicKeySet && asymkey_model.AddPublicKeysBySource(user, source.authSource, sr.SSHPublicKey) { + if err := asymkey_model.RewriteAllPublicKeys(); err != nil { + return user, err + } + } + if len(source.AttributeAvatar) > 0 { + if err := user_service.UploadAvatar(user, sr.Avatar); err != nil { + return user, err + } + } } + if source.GroupsEnabled && (source.GroupTeamMap != "" || source.GroupTeamMapRemoval) { - orgCache := make(map[string]*organization.Organization) - teamCache := make(map[string]*organization.Team) - source.SyncLdapGroupsToTeams(user, sr.LdapTeamAdd, sr.LdapTeamRemove, orgCache, teamCache) + groupTeamMapping, err := auth_module.UnmarshalGroupTeamMapping(source.GroupTeamMap) + if err != nil { + return user, err + } + if err := source_service.SyncGroupsToTeams(db.DefaultContext, user, sr.Groups, groupTeamMapping, source.GroupTeamMapRemoval); err != nil { + return user, err + } } - return user, err + return user, nil } // IsSkipLocalTwoFA returns if this source should skip local 2fa for password authentication diff --git a/services/auth/source/ldap/source_group_sync.go b/services/auth/source/ldap/source_group_sync.go deleted file mode 100644 index 95a608492..000000000 --- a/services/auth/source/ldap/source_group_sync.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright 2021 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package ldap - -import ( - "code.gitea.io/gitea/models" - "code.gitea.io/gitea/models/db" - "code.gitea.io/gitea/models/organization" - user_model "code.gitea.io/gitea/models/user" - "code.gitea.io/gitea/modules/log" -) - -// SyncLdapGroupsToTeams maps LDAP groups to organization and team memberships -func (source *Source) SyncLdapGroupsToTeams(user *user_model.User, ldapTeamAdd, ldapTeamRemove map[string][]string, orgCache map[string]*organization.Organization, teamCache map[string]*organization.Team) { - var err error - if source.GroupsEnabled && source.GroupTeamMapRemoval { - // when the user is not a member of configs LDAP group, remove mapped organizations/teams memberships - removeMappedMemberships(user, ldapTeamRemove, orgCache, teamCache) - } - for orgName, teamNames := range ldapTeamAdd { - org, ok := orgCache[orgName] - if !ok { - org, err = organization.GetOrgByName(orgName) - if err != nil { - // organization must be created before LDAP group sync - log.Warn("LDAP group sync: Could not find organisation %s: %v", orgName, err) - continue - } - orgCache[orgName] = org - } - - for _, teamName := range teamNames { - team, ok := teamCache[orgName+teamName] - if !ok { - team, err = org.GetTeam(teamName) - if err != nil { - // team must be created before LDAP group sync - log.Warn("LDAP group sync: Could not find team %s: %v", teamName, err) - continue - } - teamCache[orgName+teamName] = team - } - if isMember, err := organization.IsTeamMember(db.DefaultContext, org.ID, team.ID, user.ID); !isMember && err == nil { - log.Trace("LDAP group sync: adding user [%s] to team [%s]", user.Name, org.Name) - } else { - continue - } - err := models.AddTeamMember(team, user.ID) - if err != nil { - log.Error("LDAP group sync: Could not add user to team: %v", err) - } - } - } -} - -// remove membership to organizations/teams if user is not member of corresponding LDAP group -// e.g. lets assume user is member of LDAP group "x", but LDAP group team map contains LDAP groups "x" and "y" -// then users membership gets removed for all organizations/teams mapped by LDAP group "y" -func removeMappedMemberships(user *user_model.User, ldapTeamRemove map[string][]string, orgCache map[string]*organization.Organization, teamCache map[string]*organization.Team) { - var err error - for orgName, teamNames := range ldapTeamRemove { - org, ok := orgCache[orgName] - if !ok { - org, err = organization.GetOrgByName(orgName) - if err != nil { - // organization must be created before LDAP group sync - log.Warn("LDAP group sync: Could not find organisation %s: %v", orgName, err) - continue - } - orgCache[orgName] = org - } - for _, teamName := range teamNames { - team, ok := teamCache[orgName+teamName] - if !ok { - team, err = org.GetTeam(teamName) - if err != nil { - // team must must be created before LDAP group sync - log.Warn("LDAP group sync: Could not find team %s: %v", teamName, err) - continue - } - } - if isMember, err := organization.IsTeamMember(db.DefaultContext, org.ID, team.ID, user.ID); isMember && err == nil { - log.Trace("LDAP group sync: removing user [%s] from team [%s]", user.Name, org.Name) - } else { - continue - } - err = models.RemoveTeamMember(team, user.ID) - if err != nil { - log.Error("LDAP group sync: Could not remove user from team: %v", err) - } - } - } -} diff --git a/services/auth/source/ldap/source_search.go b/services/auth/source/ldap/source_search.go index 16f13029f..5a2d25b0c 100644 --- a/services/auth/source/ldap/source_search.go +++ b/services/auth/source/ldap/source_search.go @@ -11,26 +11,24 @@ import ( "strconv" "strings" - "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/util" "github.com/go-ldap/ldap/v3" ) // SearchResult : user data type SearchResult struct { - Username string // Username - Name string // Name - Surname string // Surname - Mail string // E-mail address - SSHPublicKey []string // SSH Public Key - IsAdmin bool // if user is administrator - IsRestricted bool // if user is restricted - LowerName string // LowerName - Avatar []byte - LdapTeamAdd map[string][]string // organizations teams to add - LdapTeamRemove map[string][]string // organizations teams to remove + Username string // Username + Name string // Name + Surname string // Surname + Mail string // E-mail address + SSHPublicKey []string // SSH Public Key + IsAdmin bool // if user is administrator + IsRestricted bool // if user is restricted + LowerName string // LowerName + Avatar []byte + Groups container.Set[string] } func (source *Source) sanitizedUserQuery(username string) (string, bool) { @@ -196,9 +194,8 @@ func checkRestricted(l *ldap.Conn, ls *Source, userDN string) bool { } // List all group memberships of a user -func (source *Source) listLdapGroupMemberships(l *ldap.Conn, uid string, applyGroupFilter bool) []string { - var ldapGroups []string - var searchFilter string +func (source *Source) listLdapGroupMemberships(l *ldap.Conn, uid string, applyGroupFilter bool) container.Set[string] { + ldapGroups := make(container.Set[string]) groupFilter, ok := source.sanitizedGroupFilter(source.GroupFilter) if !ok { @@ -210,12 +207,12 @@ func (source *Source) listLdapGroupMemberships(l *ldap.Conn, uid string, applyGr return ldapGroups } + var searchFilter string if applyGroupFilter { searchFilter = fmt.Sprintf("(&(%s)(%s=%s))", groupFilter, source.GroupMemberUID, ldap.EscapeFilter(uid)) } else { searchFilter = fmt.Sprintf("(%s=%s)", source.GroupMemberUID, ldap.EscapeFilter(uid)) } - result, err := l.Search(ldap.NewSearchRequest( groupDN, ldap.ScopeWholeSubtree, @@ -237,44 +234,12 @@ func (source *Source) listLdapGroupMemberships(l *ldap.Conn, uid string, applyGr log.Error("LDAP search was successful, but found no DN!") continue } - ldapGroups = append(ldapGroups, entry.DN) + ldapGroups.Add(entry.DN) } return ldapGroups } -// parse LDAP groups and return map of ldap groups to organizations teams -func (source *Source) mapLdapGroupsToTeams() map[string]map[string][]string { - ldapGroupsToTeams := make(map[string]map[string][]string) - err := json.Unmarshal([]byte(source.GroupTeamMap), &ldapGroupsToTeams) - if err != nil { - log.Error("Failed to unmarshall LDAP teams map: %v", err) - return ldapGroupsToTeams - } - return ldapGroupsToTeams -} - -// getMappedMemberships : returns the organizations and teams to modify the users membership -func (source *Source) getMappedMemberships(usersLdapGroups []string, uid string) (map[string][]string, map[string][]string) { - // unmarshall LDAP group team map from configs - ldapGroupsToTeams := source.mapLdapGroupsToTeams() - membershipsToAdd := map[string][]string{} - membershipsToRemove := map[string][]string{} - for group, memberships := range ldapGroupsToTeams { - isUserInGroup := util.SliceContainsString(usersLdapGroups, group) - if isUserInGroup { - for org, teams := range memberships { - membershipsToAdd[org] = teams - } - } else if !isUserInGroup { - for org, teams := range memberships { - membershipsToRemove[org] = teams - } - } - } - return membershipsToAdd, membershipsToRemove -} - func (source *Source) getUserAttributeListedInGroup(entry *ldap.Entry) string { if strings.ToLower(source.UserUID) == "dn" { return entry.DN @@ -399,23 +364,6 @@ func (source *Source) SearchEntry(name, passwd string, directBind bool) *SearchR surname := sr.Entries[0].GetAttributeValue(source.AttributeSurname) mail := sr.Entries[0].GetAttributeValue(source.AttributeMail) - teamsToAdd := make(map[string][]string) - teamsToRemove := make(map[string][]string) - - // Check group membership - if source.GroupsEnabled { - userAttributeListedInGroup := source.getUserAttributeListedInGroup(sr.Entries[0]) - usersLdapGroups := source.listLdapGroupMemberships(l, userAttributeListedInGroup, true) - - if source.GroupFilter != "" && len(usersLdapGroups) == 0 { - return nil - } - - if source.GroupTeamMap != "" || source.GroupTeamMapRemoval { - teamsToAdd, teamsToRemove = source.getMappedMemberships(usersLdapGroups, userAttributeListedInGroup) - } - } - if isAttributeSSHPublicKeySet { sshPublicKey = sr.Entries[0].GetAttributeValues(source.AttributeSSHPublicKey) } @@ -431,6 +379,17 @@ func (source *Source) SearchEntry(name, passwd string, directBind bool) *SearchR Avatar = sr.Entries[0].GetRawAttributeValue(source.AttributeAvatar) } + // Check group membership + var usersLdapGroups container.Set[string] + if source.GroupsEnabled { + userAttributeListedInGroup := source.getUserAttributeListedInGroup(sr.Entries[0]) + usersLdapGroups = source.listLdapGroupMemberships(l, userAttributeListedInGroup, true) + + if source.GroupFilter != "" && len(usersLdapGroups) == 0 { + return nil + } + } + if !directBind && source.AttributesInBind { // binds user (checking password) after looking-up attributes in BindDN context err = bindUser(l, userDN, passwd) @@ -440,17 +399,16 @@ func (source *Source) SearchEntry(name, passwd string, directBind bool) *SearchR } return &SearchResult{ - LowerName: strings.ToLower(username), - Username: username, - Name: firstname, - Surname: surname, - Mail: mail, - SSHPublicKey: sshPublicKey, - IsAdmin: isAdmin, - IsRestricted: isRestricted, - Avatar: Avatar, - LdapTeamAdd: teamsToAdd, - LdapTeamRemove: teamsToRemove, + LowerName: strings.ToLower(username), + Username: username, + Name: firstname, + Surname: surname, + Mail: mail, + SSHPublicKey: sshPublicKey, + IsAdmin: isAdmin, + IsRestricted: isRestricted, + Avatar: Avatar, + Groups: usersLdapGroups, } } @@ -512,33 +470,29 @@ func (source *Source) SearchEntries() ([]*SearchResult, error) { result := make([]*SearchResult, 0, len(sr.Entries)) for _, v := range sr.Entries { - teamsToAdd := make(map[string][]string) - teamsToRemove := make(map[string][]string) - + var usersLdapGroups container.Set[string] if source.GroupsEnabled { userAttributeListedInGroup := source.getUserAttributeListedInGroup(v) if source.GroupFilter != "" { - usersLdapGroups := source.listLdapGroupMemberships(l, userAttributeListedInGroup, true) + usersLdapGroups = source.listLdapGroupMemberships(l, userAttributeListedInGroup, true) if len(usersLdapGroups) == 0 { continue } } if source.GroupTeamMap != "" || source.GroupTeamMapRemoval { - usersLdapGroups := source.listLdapGroupMemberships(l, userAttributeListedInGroup, false) - teamsToAdd, teamsToRemove = source.getMappedMemberships(usersLdapGroups, userAttributeListedInGroup) + usersLdapGroups = source.listLdapGroupMemberships(l, userAttributeListedInGroup, false) } } user := &SearchResult{ - Username: v.GetAttributeValue(source.AttributeUsername), - Name: v.GetAttributeValue(source.AttributeName), - Surname: v.GetAttributeValue(source.AttributeSurname), - Mail: v.GetAttributeValue(source.AttributeMail), - IsAdmin: checkAdmin(l, source, v.DN), - LdapTeamAdd: teamsToAdd, - LdapTeamRemove: teamsToRemove, + Username: v.GetAttributeValue(source.AttributeUsername), + Name: v.GetAttributeValue(source.AttributeName), + Surname: v.GetAttributeValue(source.AttributeSurname), + Mail: v.GetAttributeValue(source.AttributeMail), + IsAdmin: checkAdmin(l, source, v.DN), + Groups: usersLdapGroups, } if !user.IsAdmin { diff --git a/services/auth/source/ldap/source_sync.go b/services/auth/source/ldap/source_sync.go index 73e8309ac..4571ff654 100644 --- a/services/auth/source/ldap/source_sync.go +++ b/services/auth/source/ldap/source_sync.go @@ -13,8 +13,10 @@ import ( "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/organization" user_model "code.gitea.io/gitea/models/user" + auth_module "code.gitea.io/gitea/modules/auth" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/util" + source_service "code.gitea.io/gitea/services/auth/source" user_service "code.gitea.io/gitea/services/user" ) @@ -65,6 +67,11 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { orgCache := make(map[string]*organization.Organization) teamCache := make(map[string]*organization.Team) + groupTeamMapping, err := auth_module.UnmarshalGroupTeamMapping(source.GroupTeamMap) + if err != nil { + return err + } + for _, su := range sr { select { case <-ctx.Done(): @@ -173,7 +180,9 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { } // Synchronize LDAP groups with organization and team memberships if source.GroupsEnabled && (source.GroupTeamMap != "" || source.GroupTeamMapRemoval) { - source.SyncLdapGroupsToTeams(usr, su.LdapTeamAdd, su.LdapTeamRemove, orgCache, teamCache) + if err := source_service.SyncGroupsToTeamsCached(ctx, usr, su.Groups, groupTeamMapping, source.GroupTeamMapRemoval, orgCache, teamCache); err != nil { + log.Error("SyncGroupsToTeamsCached: %v", err) + } } } diff --git a/services/auth/source/oauth2/source.go b/services/auth/source/oauth2/source.go index 0abebc04e..675005e55 100644 --- a/services/auth/source/oauth2/source.go +++ b/services/auth/source/oauth2/source.go @@ -8,13 +8,6 @@ import ( "code.gitea.io/gitea/modules/json" ) -// ________ _____ __ .__ ________ -// \_____ \ / _ \ __ ___/ |_| |__ \_____ \ -// / | \ / /_\ \| | \ __\ | \ / ____/ -// / | \/ | \ | /| | | Y \/ \ -// \_______ /\____|__ /____/ |__| |___| /\_______ \ -// \/ \/ \/ \/ - // Source holds configuration for the OAuth2 login source. type Source struct { Provider string @@ -24,13 +17,15 @@ type Source struct { CustomURLMapping *CustomURLMapping IconURL string - Scopes []string - RequiredClaimName string - RequiredClaimValue string - GroupClaimName string - AdminGroup string - RestrictedGroup string - SkipLocalTwoFA bool `json:",omitempty"` + Scopes []string + RequiredClaimName string + RequiredClaimValue string + GroupClaimName string + AdminGroup string + GroupTeamMap string + GroupTeamMapRemoval bool + RestrictedGroup string + SkipLocalTwoFA bool `json:",omitempty"` // reference to the authSource authSource *auth.Source diff --git a/services/auth/source/source_group_sync.go b/services/auth/source/source_group_sync.go new file mode 100644 index 000000000..20b609534 --- /dev/null +++ b/services/auth/source/source_group_sync.go @@ -0,0 +1,116 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package source + +import ( + "context" + "fmt" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/container" + "code.gitea.io/gitea/modules/log" +) + +type syncType int + +const ( + syncAdd syncType = iota + syncRemove +) + +// SyncGroupsToTeams maps authentication source groups to organization and team memberships +func SyncGroupsToTeams(ctx context.Context, user *user_model.User, sourceUserGroups container.Set[string], sourceGroupTeamMapping map[string]map[string][]string, performRemoval bool) error { + orgCache := make(map[string]*organization.Organization) + teamCache := make(map[string]*organization.Team) + return SyncGroupsToTeamsCached(ctx, user, sourceUserGroups, sourceGroupTeamMapping, performRemoval, orgCache, teamCache) +} + +// SyncGroupsToTeamsCached maps authentication source groups to organization and team memberships +func SyncGroupsToTeamsCached(ctx context.Context, user *user_model.User, sourceUserGroups container.Set[string], sourceGroupTeamMapping map[string]map[string][]string, performRemoval bool, orgCache map[string]*organization.Organization, teamCache map[string]*organization.Team) error { + membershipsToAdd, membershipsToRemove := resolveMappedMemberships(sourceUserGroups, sourceGroupTeamMapping) + + if performRemoval { + if err := syncGroupsToTeamsCached(ctx, user, membershipsToRemove, syncRemove, orgCache, teamCache); err != nil { + return fmt.Errorf("could not sync[remove] user groups: %w", err) + } + } + + if err := syncGroupsToTeamsCached(ctx, user, membershipsToAdd, syncAdd, orgCache, teamCache); err != nil { + return fmt.Errorf("could not sync[add] user groups: %w", err) + } + + return nil +} + +func resolveMappedMemberships(sourceUserGroups container.Set[string], sourceGroupTeamMapping map[string]map[string][]string) (map[string][]string, map[string][]string) { + membershipsToAdd := map[string][]string{} + membershipsToRemove := map[string][]string{} + for group, memberships := range sourceGroupTeamMapping { + isUserInGroup := sourceUserGroups.Contains(group) + if isUserInGroup { + for org, teams := range memberships { + membershipsToAdd[org] = teams + } + } else { + for org, teams := range memberships { + membershipsToRemove[org] = teams + } + } + } + return membershipsToAdd, membershipsToRemove +} + +func syncGroupsToTeamsCached(ctx context.Context, user *user_model.User, orgTeamMap map[string][]string, action syncType, orgCache map[string]*organization.Organization, teamCache map[string]*organization.Team) error { + for orgName, teamNames := range orgTeamMap { + var err error + org, ok := orgCache[orgName] + if !ok { + org, err = organization.GetOrgByName(ctx, orgName) + if err != nil { + if organization.IsErrOrgNotExist(err) { + // organization must be created before group sync + log.Warn("group sync: Could not find organisation %s: %v", orgName, err) + continue + } + return err + } + orgCache[orgName] = org + } + for _, teamName := range teamNames { + team, ok := teamCache[orgName+teamName] + if !ok { + team, err = org.GetTeam(ctx, teamName) + if err != nil { + if organization.IsErrTeamNotExist(err) { + // team must be created before group sync + log.Warn("group sync: Could not find team %s: %v", teamName, err) + continue + } + return err + } + teamCache[orgName+teamName] = team + } + + isMember, err := organization.IsTeamMember(ctx, org.ID, team.ID, user.ID) + if err != nil { + return err + } + + if action == syncAdd && !isMember { + if err := models.AddTeamMember(team, user.ID); err != nil { + log.Error("group sync: Could not add user to team: %v", err) + return err + } + } else if action == syncRemove && isMember { + if err := models.RemoveTeamMember(team, user.ID); err != nil { + log.Error("group sync: Could not remove user from team: %v", err) + return err + } + } + } + } + return nil +} diff --git a/services/cron/tasks_basic.go b/services/cron/tasks_basic.go index aad0e3959..2e6560ec0 100644 --- a/services/cron/tasks_basic.go +++ b/services/cron/tasks_basic.go @@ -16,7 +16,7 @@ import ( "code.gitea.io/gitea/services/auth" "code.gitea.io/gitea/services/migrations" mirror_service "code.gitea.io/gitea/services/mirror" - packages_service "code.gitea.io/gitea/services/packages" + packages_cleanup_service "code.gitea.io/gitea/services/packages/cleanup" repo_service "code.gitea.io/gitea/services/repository" archiver_service "code.gitea.io/gitea/services/repository/archiver" ) @@ -152,7 +152,7 @@ func registerCleanupPackages() { OlderThan: 24 * time.Hour, }, func(ctx context.Context, _ *user_model.User, config Config) error { realConfig := config.(*OlderThanConfig) - return packages_service.Cleanup(ctx, realConfig.OlderThan) + return packages_cleanup_service.Cleanup(ctx, realConfig.OlderThan) }) } diff --git a/services/forms/auth_form.go b/services/forms/auth_form.go index 0cede07f9..5625aa1e2 100644 --- a/services/forms/auth_form.go +++ b/services/forms/auth_form.go @@ -72,13 +72,15 @@ type AuthenticationForm struct { Oauth2GroupClaimName string Oauth2AdminGroup string Oauth2RestrictedGroup string + Oauth2GroupTeamMap string `binding:"ValidGroupTeamMap"` + Oauth2GroupTeamMapRemoval bool SkipLocalTwoFA bool SSPIAutoCreateUsers bool SSPIAutoActivateUsers bool SSPIStripDomainNames bool SSPISeparatorReplacement string `binding:"AlphaDashDot;MaxSize(5)"` SSPIDefaultLanguage string - GroupTeamMap string + GroupTeamMap string `binding:"ValidGroupTeamMap"` GroupTeamMapRemoval bool } diff --git a/services/forms/package_form.go b/services/forms/package_form.go index e78e64ef7..b22ed47c7 100644 --- a/services/forms/package_form.go +++ b/services/forms/package_form.go @@ -15,7 +15,7 @@ import ( type PackageCleanupRuleForm struct { ID int64 Enabled bool - Type string `binding:"Required;In(composer,conan,conda,container,generic,helm,maven,npm,nuget,pub,pypi,rubygems,vagrant)"` + Type string `binding:"Required;In(cargo,chef,composer,conan,conda,container,generic,helm,maven,npm,nuget,pub,pypi,rubygems,vagrant)"` KeepCount int `binding:"In(0,1,5,10,25,50,100)"` KeepPattern string `binding:"RegexPattern"` RemoveDays int `binding:"In(0,7,14,30,60,90,180)"` diff --git a/services/mirror/mirror_pull.go b/services/mirror/mirror_pull.go index 7dee90352..126d2bf35 100644 --- a/services/mirror/mirror_pull.go +++ b/services/mirror/mirror_pull.go @@ -18,6 +18,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/notification" "code.gitea.io/gitea/modules/process" + "code.gitea.io/gitea/modules/proxy" repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" @@ -215,6 +216,8 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo return nil, false } + envs := proxy.EnvWithProxy(remoteURL.URL) + stdoutBuilder := strings.Builder{} stderrBuilder := strings.Builder{} if err := cmd. @@ -222,6 +225,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo Run(&git.RunOpts{ Timeout: timeout, Dir: repoPath, + Env: envs, Stdout: &stdoutBuilder, Stderr: &stderrBuilder, }); err != nil { diff --git a/services/packages/cargo/index.go b/services/packages/cargo/index.go new file mode 100644 index 000000000..e58a47281 --- /dev/null +++ b/services/packages/cargo/index.go @@ -0,0 +1,290 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package cargo + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "path" + "strconv" + "time" + + packages_model "code.gitea.io/gitea/models/packages" + repo_model "code.gitea.io/gitea/models/repo" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/json" + cargo_module "code.gitea.io/gitea/modules/packages/cargo" + repo_module "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" + files_service "code.gitea.io/gitea/services/repository/files" +) + +const ( + IndexRepositoryName = "_cargo-index" + ConfigFileName = "config.json" +) + +// https://doc.rust-lang.org/cargo/reference/registries.html#index-format + +func BuildPackagePath(name string) string { + switch len(name) { + case 0: + panic("Cargo package name can not be empty") + case 1: + return path.Join("1", name) + case 2: + return path.Join("2", name) + case 3: + return path.Join("3", string(name[0]), name) + default: + return path.Join(name[0:2], name[2:4], name) + } +} + +func InitializeIndexRepository(ctx context.Context, doer, owner *user_model.User) error { + repo, err := getOrCreateIndexRepository(ctx, doer, owner) + if err != nil { + return err + } + + if err := createOrUpdateConfigFile(ctx, repo, doer, owner); err != nil { + return fmt.Errorf("createOrUpdateConfigFile: %w", err) + } + + return nil +} + +func RebuildIndex(ctx context.Context, doer, owner *user_model.User) error { + repo, err := getOrCreateIndexRepository(ctx, doer, owner) + if err != nil { + return err + } + + ps, err := packages_model.GetPackagesByType(ctx, owner.ID, packages_model.TypeCargo) + if err != nil { + return fmt.Errorf("GetPackagesByType: %w", err) + } + + return alterRepositoryContent( + ctx, + doer, + repo, + "Rebuild Cargo Index", + func(t *files_service.TemporaryUploadRepository) error { + // Remove all existing content but the Cargo config + files, err := t.LsFiles() + if err != nil { + return err + } + for i, file := range files { + if file == ConfigFileName { + files[i] = files[len(files)-1] + files = files[:len(files)-1] + break + } + } + if err := t.RemoveFilesFromIndex(files...); err != nil { + return err + } + + // Add all packages + for _, p := range ps { + if err := addOrUpdatePackageIndex(ctx, t, p); err != nil { + return err + } + } + + return nil + }, + ) +} + +func AddOrUpdatePackageIndex(ctx context.Context, doer, owner *user_model.User, packageID int64) error { + repo, err := getOrCreateIndexRepository(ctx, doer, owner) + if err != nil { + return err + } + + p, err := packages_model.GetPackageByID(ctx, packageID) + if err != nil { + return fmt.Errorf("GetPackageByID[%d]: %w", packageID, err) + } + + return alterRepositoryContent( + ctx, + doer, + repo, + "Update "+p.Name, + func(t *files_service.TemporaryUploadRepository) error { + return addOrUpdatePackageIndex(ctx, t, p) + }, + ) +} + +type IndexVersionEntry struct { + Name string `json:"name"` + Version string `json:"vers"` + Dependencies []*cargo_module.Dependency `json:"deps"` + FileChecksum string `json:"cksum"` + Features map[string][]string `json:"features"` + Yanked bool `json:"yanked"` + Links string `json:"links,omitempty"` +} + +func addOrUpdatePackageIndex(ctx context.Context, t *files_service.TemporaryUploadRepository, p *packages_model.Package) error { + pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + PackageID: p.ID, + Sort: packages_model.SortVersionAsc, + }) + if err != nil { + return fmt.Errorf("SearchVersions[%s]: %w", p.Name, err) + } + if len(pvs) == 0 { + return nil + } + + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + return fmt.Errorf("GetPackageDescriptors[%s]: %w", p.Name, err) + } + + var b bytes.Buffer + for _, pd := range pds { + metadata := pd.Metadata.(*cargo_module.Metadata) + + dependencies := metadata.Dependencies + if dependencies == nil { + dependencies = make([]*cargo_module.Dependency, 0) + } + + features := metadata.Features + if features == nil { + features = make(map[string][]string) + } + + yanked, _ := strconv.ParseBool(pd.VersionProperties.GetByName(cargo_module.PropertyYanked)) + entry, err := json.Marshal(&IndexVersionEntry{ + Name: pd.Package.Name, + Version: pd.Version.Version, + Dependencies: dependencies, + FileChecksum: pd.Files[0].Blob.HashSHA256, + Features: features, + Yanked: yanked, + Links: metadata.Links, + }) + if err != nil { + return err + } + + b.Write(entry) + b.WriteString("\n") + } + + return writeObjectToIndex(t, BuildPackagePath(pds[0].Package.LowerName), &b) +} + +func getOrCreateIndexRepository(ctx context.Context, doer, owner *user_model.User) (*repo_model.Repository, error) { + repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner.Name, IndexRepositoryName) + if err != nil { + if errors.Is(err, util.ErrNotExist) { + repo, err = repo_module.CreateRepository(doer, owner, repo_module.CreateRepoOptions{ + Name: IndexRepositoryName, + }) + if err != nil { + return nil, fmt.Errorf("CreateRepository: %w", err) + } + } else { + return nil, fmt.Errorf("GetRepositoryByOwnerAndName: %w", err) + } + } + + return repo, nil +} + +type Config struct { + DownloadURL string `json:"dl"` + APIURL string `json:"api"` +} + +func createOrUpdateConfigFile(ctx context.Context, repo *repo_model.Repository, doer, owner *user_model.User) error { + return alterRepositoryContent( + ctx, + doer, + repo, + "Initialize Cargo Config", + func(t *files_service.TemporaryUploadRepository) error { + var b bytes.Buffer + err := json.NewEncoder(&b).Encode(Config{ + DownloadURL: setting.AppURL + "api/packages/" + owner.Name + "/cargo/api/v1/crates", + APIURL: setting.AppURL + "api/packages/" + owner.Name + "/cargo", + }) + if err != nil { + return err + } + + return writeObjectToIndex(t, ConfigFileName, &b) + }, + ) +} + +// This is a shorter version of CreateOrUpdateRepoFile which allows to perform multiple actions on a git repository +func alterRepositoryContent(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, commitMessage string, fn func(*files_service.TemporaryUploadRepository) error) error { + t, err := files_service.NewTemporaryUploadRepository(ctx, repo) + if err != nil { + return err + } + defer t.Close() + + var lastCommitID string + if err := t.Clone(repo.DefaultBranch); err != nil { + if !git.IsErrBranchNotExist(err) || !repo.IsEmpty { + return err + } + if err := t.Init(); err != nil { + return err + } + } else { + if err := t.SetDefaultIndex(); err != nil { + return err + } + + commit, err := t.GetBranchCommit(repo.DefaultBranch) + if err != nil { + return err + } + + lastCommitID = commit.ID.String() + } + + if err := fn(t); err != nil { + return err + } + + treeHash, err := t.WriteTree() + if err != nil { + return err + } + + now := time.Now() + commitHash, err := t.CommitTreeWithDate(lastCommitID, doer, doer, treeHash, commitMessage, false, now, now) + if err != nil { + return err + } + + return t.Push(doer, commitHash, repo.DefaultBranch) +} + +func writeObjectToIndex(t *files_service.TemporaryUploadRepository, path string, r io.Reader) error { + hash, err := t.HashObject(r) + if err != nil { + return err + } + + return t.AddObjectToIndex("100644", hash, path) +} diff --git a/services/packages/cleanup/cleanup.go b/services/packages/cleanup/cleanup.go new file mode 100644 index 000000000..2d62a028a --- /dev/null +++ b/services/packages/cleanup/cleanup.go @@ -0,0 +1,154 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package container + +import ( + "context" + "fmt" + "time" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/log" + packages_module "code.gitea.io/gitea/modules/packages" + "code.gitea.io/gitea/modules/util" + packages_service "code.gitea.io/gitea/services/packages" + cargo_service "code.gitea.io/gitea/services/packages/cargo" + container_service "code.gitea.io/gitea/services/packages/container" +) + +// Cleanup removes expired package data +func Cleanup(taskCtx context.Context, olderThan time.Duration) error { + ctx, committer, err := db.TxContext(taskCtx) + if err != nil { + return err + } + defer committer.Close() + + err = packages_model.IterateEnabledCleanupRules(ctx, func(ctx context.Context, pcr *packages_model.PackageCleanupRule) error { + select { + case <-taskCtx.Done(): + return db.ErrCancelledf("While processing package cleanup rules") + default: + } + + if err := pcr.CompiledPattern(); err != nil { + return fmt.Errorf("CleanupRule [%d]: CompilePattern failed: %w", pcr.ID, err) + } + + olderThan := time.Now().AddDate(0, 0, -pcr.RemoveDays) + + packages, err := packages_model.GetPackagesByType(ctx, pcr.OwnerID, pcr.Type) + if err != nil { + return fmt.Errorf("CleanupRule [%d]: GetPackagesByType failed: %w", pcr.ID, err) + } + + for _, p := range packages { + pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + PackageID: p.ID, + IsInternal: util.OptionalBoolFalse, + Sort: packages_model.SortCreatedDesc, + Paginator: db.NewAbsoluteListOptions(pcr.KeepCount, 200), + }) + if err != nil { + return fmt.Errorf("CleanupRule [%d]: SearchVersions failed: %w", pcr.ID, err) + } + versionDeleted := false + for _, pv := range pvs { + if pcr.Type == packages_model.TypeContainer { + if skip, err := container_service.ShouldBeSkipped(ctx, pcr, p, pv); err != nil { + return fmt.Errorf("CleanupRule [%d]: container.ShouldBeSkipped failed: %w", pcr.ID, err) + } else if skip { + log.Debug("Rule[%d]: keep '%s/%s' (container)", pcr.ID, p.Name, pv.Version) + continue + } + } + + toMatch := pv.LowerVersion + if pcr.MatchFullName { + toMatch = p.LowerName + "/" + pv.LowerVersion + } + + if pcr.KeepPatternMatcher != nil && pcr.KeepPatternMatcher.MatchString(toMatch) { + log.Debug("Rule[%d]: keep '%s/%s' (keep pattern)", pcr.ID, p.Name, pv.Version) + continue + } + if pv.CreatedUnix.AsLocalTime().After(olderThan) { + log.Debug("Rule[%d]: keep '%s/%s' (remove days)", pcr.ID, p.Name, pv.Version) + continue + } + if pcr.RemovePatternMatcher != nil && !pcr.RemovePatternMatcher.MatchString(toMatch) { + log.Debug("Rule[%d]: keep '%s/%s' (remove pattern)", pcr.ID, p.Name, pv.Version) + continue + } + + log.Debug("Rule[%d]: remove '%s/%s'", pcr.ID, p.Name, pv.Version) + + if err := packages_service.DeletePackageVersionAndReferences(ctx, pv); err != nil { + return fmt.Errorf("CleanupRule [%d]: DeletePackageVersionAndReferences failed: %w", pcr.ID, err) + } + + versionDeleted = true + } + + if versionDeleted { + if pcr.Type == packages_model.TypeCargo { + owner, err := user_model.GetUserByID(ctx, pcr.OwnerID) + if err != nil { + return fmt.Errorf("GetUserByID failed: %w", err) + } + if err := cargo_service.AddOrUpdatePackageIndex(ctx, owner, owner, p.ID); err != nil { + return fmt.Errorf("CleanupRule [%d]: cargo.AddOrUpdatePackageIndex failed: %w", pcr.ID, err) + } + } + } + } + return nil + }) + if err != nil { + return err + } + + if err := container_service.Cleanup(ctx, olderThan); err != nil { + return err + } + + ps, err := packages_model.FindUnreferencedPackages(ctx) + if err != nil { + return err + } + for _, p := range ps { + if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypePackage, p.ID); err != nil { + return err + } + if err := packages_model.DeletePackageByID(ctx, p.ID); err != nil { + return err + } + } + + pbs, err := packages_model.FindExpiredUnreferencedBlobs(ctx, olderThan) + if err != nil { + return err + } + + for _, pb := range pbs { + if err := packages_model.DeleteBlobByID(ctx, pb.ID); err != nil { + return err + } + } + + if err := committer.Commit(); err != nil { + return err + } + + contentStore := packages_module.NewContentStore() + for _, pb := range pbs { + if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil { + log.Error("Error deleting package blob [%v]: %v", pb.ID, err) + } + } + + return nil +} diff --git a/services/packages/container/cleanup.go b/services/packages/container/cleanup.go index d6d4d152c..1a9ef2639 100644 --- a/services/packages/container/cleanup.go +++ b/services/packages/container/cleanup.go @@ -10,8 +10,9 @@ import ( packages_model "code.gitea.io/gitea/models/packages" container_model "code.gitea.io/gitea/models/packages/container" container_module "code.gitea.io/gitea/modules/packages/container" - "code.gitea.io/gitea/modules/packages/container/oci" "code.gitea.io/gitea/modules/util" + + digest "github.com/opencontainers/go-digest" ) // Cleanup removes expired container data @@ -87,7 +88,7 @@ func ShouldBeSkipped(ctx context.Context, pcr *packages_model.PackageCleanupRule } // Check if the version is a digest (or untagged) - if oci.Digest(pv.LowerVersion).Validate() { + if digest.Digest(pv.LowerVersion).Validate() == nil { // Check if there is another manifest referencing this version has, err := packages_model.ExistVersion(ctx, &packages_model.PackageSearchOptions{ PackageID: p.ID, diff --git a/services/packages/packages.go b/services/packages/packages.go index 9e52cb145..3abca7337 100644 --- a/services/packages/packages.go +++ b/services/packages/packages.go @@ -10,7 +10,6 @@ import ( "fmt" "io" "strings" - "time" "code.gitea.io/gitea/models/db" packages_model "code.gitea.io/gitea/models/packages" @@ -22,7 +21,6 @@ import ( packages_module "code.gitea.io/gitea/modules/packages" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" - container_service "code.gitea.io/gitea/services/packages/container" ) var ( @@ -335,6 +333,10 @@ func CheckSizeQuotaExceeded(ctx context.Context, doer, owner *user_model.User, p var typeSpecificSize int64 switch packageType { + case packages_model.TypeCargo: + typeSpecificSize = setting.Packages.LimitSizeCargo + case packages_model.TypeChef: + typeSpecificSize = setting.Packages.LimitSizeChef case packages_model.TypeComposer: typeSpecificSize = setting.Packages.LimitSizeComposer case packages_model.TypeConan: @@ -448,123 +450,6 @@ func DeletePackageFile(ctx context.Context, pf *packages_model.PackageFile) erro return packages_model.DeleteFileByID(ctx, pf.ID) } -// Cleanup removes expired package data -func Cleanup(taskCtx context.Context, olderThan time.Duration) error { - ctx, committer, err := db.TxContext(taskCtx) - if err != nil { - return err - } - defer committer.Close() - - err = packages_model.IterateEnabledCleanupRules(ctx, func(ctx context.Context, pcr *packages_model.PackageCleanupRule) error { - select { - case <-taskCtx.Done(): - return db.ErrCancelledf("While processing package cleanup rules") - default: - } - - if err := pcr.CompiledPattern(); err != nil { - return fmt.Errorf("CleanupRule [%d]: CompilePattern failed: %w", pcr.ID, err) - } - - olderThan := time.Now().AddDate(0, 0, -pcr.RemoveDays) - - packages, err := packages_model.GetPackagesByType(ctx, pcr.OwnerID, pcr.Type) - if err != nil { - return fmt.Errorf("CleanupRule [%d]: GetPackagesByType failed: %w", pcr.ID, err) - } - - for _, p := range packages { - pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ - PackageID: p.ID, - IsInternal: util.OptionalBoolFalse, - Sort: packages_model.SortCreatedDesc, - Paginator: db.NewAbsoluteListOptions(pcr.KeepCount, 200), - }) - if err != nil { - return fmt.Errorf("CleanupRule [%d]: SearchVersions failed: %w", pcr.ID, err) - } - for _, pv := range pvs { - if skip, err := container_service.ShouldBeSkipped(ctx, pcr, p, pv); err != nil { - return fmt.Errorf("CleanupRule [%d]: container.ShouldBeSkipped failed: %w", pcr.ID, err) - } else if skip { - log.Debug("Rule[%d]: keep '%s/%s' (container)", pcr.ID, p.Name, pv.Version) - continue - } - - toMatch := pv.LowerVersion - if pcr.MatchFullName { - toMatch = p.LowerName + "/" + pv.LowerVersion - } - - if pcr.KeepPatternMatcher != nil && pcr.KeepPatternMatcher.MatchString(toMatch) { - log.Debug("Rule[%d]: keep '%s/%s' (keep pattern)", pcr.ID, p.Name, pv.Version) - continue - } - if pv.CreatedUnix.AsLocalTime().After(olderThan) { - log.Debug("Rule[%d]: keep '%s/%s' (remove days)", pcr.ID, p.Name, pv.Version) - continue - } - if pcr.RemovePatternMatcher != nil && !pcr.RemovePatternMatcher.MatchString(toMatch) { - log.Debug("Rule[%d]: keep '%s/%s' (remove pattern)", pcr.ID, p.Name, pv.Version) - continue - } - - log.Debug("Rule[%d]: remove '%s/%s'", pcr.ID, p.Name, pv.Version) - - if err := DeletePackageVersionAndReferences(ctx, pv); err != nil { - return fmt.Errorf("CleanupRule [%d]: DeletePackageVersionAndReferences failed: %w", pcr.ID, err) - } - } - } - return nil - }) - if err != nil { - return err - } - - if err := container_service.Cleanup(ctx, olderThan); err != nil { - return err - } - - ps, err := packages_model.FindUnreferencedPackages(ctx) - if err != nil { - return err - } - for _, p := range ps { - if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypePackage, p.ID); err != nil { - return err - } - if err := packages_model.DeletePackageByID(ctx, p.ID); err != nil { - return err - } - } - - pbs, err := packages_model.FindExpiredUnreferencedBlobs(ctx, olderThan) - if err != nil { - return err - } - - for _, pb := range pbs { - if err := packages_model.DeleteBlobByID(ctx, pb.ID); err != nil { - return err - } - } - - if err := committer.Commit(); err != nil { - return err - } - - contentStore := packages_module.NewContentStore() - for _, pb := range pbs { - if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil { - log.Error("Error deleting package blob [%v]: %v", pb.ID, err) - } - } - - return nil -} - // GetFileStreamByPackageNameAndVersion returns the content of the specific package file func GetFileStreamByPackageNameAndVersion(ctx context.Context, pvi *PackageInfo, pfi *PackageFileInfo) (io.ReadSeekCloser, *packages_model.PackageFile, error) { log.Trace("Getting package file stream: %v, %v, %s, %s, %s, %s", pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version, pfi.Filename, pfi.CompositeKey) diff --git a/services/pull/merge.go b/services/pull/merge.go index edd5b601d..a3d69df8d 100644 --- a/services/pull/merge.go +++ b/services/pull/merge.go @@ -98,6 +98,9 @@ func GetDefaultMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr } for _, ref := range refs { if ref.RefAction == references.XRefActionCloses { + if err := ref.LoadIssue(ctx); err != nil { + return "", "", err + } closeIssueIndexes = append(closeIssueIndexes, fmt.Sprintf("%s %s%d", closeWord, issueReference, ref.Issue.Index)) } } diff --git a/services/pull/pull.go b/services/pull/pull.go index 317875d21..0d260c93b 100644 --- a/services/pull/pull.go +++ b/services/pull/pull.go @@ -263,6 +263,24 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string, return } + for _, pr := range prs { + log.Trace("Updating PR[%d]: composing new test task", pr.ID) + if pr.Flow == issues_model.PullRequestFlowGithub { + if err := PushToBaseRepo(ctx, pr); err != nil { + log.Error("PushToBaseRepo: %v", err) + continue + } + } else { + continue + } + + AddToTaskQueue(pr) + comment, err := CreatePushPullComment(ctx, doer, pr, oldCommitID, newCommitID) + if err == nil && comment != nil { + notification.NotifyPullRequestPushCommits(ctx, doer, pr, comment) + } + } + if isSync { requests := issues_model.PullRequestList(prs) if err = requests.LoadAttributes(); err != nil { @@ -303,24 +321,6 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string, } } - for _, pr := range prs { - log.Trace("Updating PR[%d]: composing new test task", pr.ID) - if pr.Flow == issues_model.PullRequestFlowGithub { - if err := PushToBaseRepo(ctx, pr); err != nil { - log.Error("PushToBaseRepo: %v", err) - continue - } - } else { - continue - } - - AddToTaskQueue(pr) - comment, err := CreatePushPullComment(ctx, doer, pr, oldCommitID, newCommitID) - if err == nil && comment != nil { - notification.NotifyPullRequestPushCommits(ctx, doer, pr, comment) - } - } - log.Trace("AddTestPullRequestTask [base_repo_id: %d, base_branch: %s]: finding pull requests", repoID, branch) prs, err = issues_model.GetUnmergedPullRequestsByBaseInfo(repoID, branch) if err != nil { diff --git a/services/pull/update.go b/services/pull/update.go index ede89bcdf..b9525cf0c 100644 --- a/services/pull/update.go +++ b/services/pull/update.go @@ -106,7 +106,7 @@ func IsUserAllowedToUpdate(ctx context.Context, pull *issues_model.PullRequest, BaseBranch: pull.HeadBranch, } - pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pull.BaseRepoID, pull.BaseBranch) + pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch) if err != nil { return false, false, err } |